You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by vi...@apache.org on 2015/04/15 13:09:53 UTC
hadoop git commit: HDFS-7349. Support DFS command for the EC encoding
(Contributed by Vinayakumar B)
Repository: hadoop
Updated Branches:
refs/heads/HDFS-7285 91e7bf81d -> 295ccbca7
HDFS-7349. Support DFS command for the EC encoding (Contributed by Vinayakumar B)
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/295ccbca
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/295ccbca
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/295ccbca
Branch: refs/heads/HDFS-7285
Commit: 295ccbca74a66d2dab89dd704908deb7071298e9
Parents: 91e7bf8
Author: Vinayakumar B <vi...@apache.org>
Authored: Wed Apr 15 16:38:22 2015 +0530
Committer: Vinayakumar B <vi...@apache.org>
Committed: Wed Apr 15 16:38:22 2015 +0530
----------------------------------------------------------------------
.../main/java/org/apache/hadoop/fs/FsShell.java | 8 +-
.../hadoop-hdfs/CHANGES-HDFS-EC-7285.txt | 4 +-
.../hadoop-hdfs/src/main/bin/hdfs | 5 +
.../java/org/apache/hadoop/hdfs/DFSClient.java | 18 ++
.../hadoop/hdfs/DistributedFileSystem.java | 32 +++
.../hadoop/hdfs/protocol/ClientProtocol.java | 9 +
.../apache/hadoop/hdfs/protocol/ECZoneInfo.java | 56 +++++
...tNamenodeProtocolServerSideTranslatorPB.java | 18 ++
.../ClientNamenodeProtocolTranslatorPB.java | 19 ++
.../apache/hadoop/hdfs/protocolPB/PBHelper.java | 12 ++
.../namenode/ErasureCodingZoneManager.java | 11 +-
.../hdfs/server/namenode/FSDirectory.java | 10 +
.../hdfs/server/namenode/FSNamesystem.java | 24 +++
.../hdfs/server/namenode/NameNodeRpcServer.java | 7 +
.../hadoop/hdfs/tools/erasurecode/ECCli.java | 48 +++++
.../hdfs/tools/erasurecode/ECCommand.java | 209 +++++++++++++++++++
.../src/main/proto/ClientNamenodeProtocol.proto | 2 +
.../src/main/proto/erasurecoding.proto | 15 ++
18 files changed, 502 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/295ccbca/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
index db73f6d..f873a01 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
@@ -111,6 +111,10 @@ public class FsShell extends Configured implements Tool {
return getTrash().getCurrentTrashDir();
}
+ protected String getUsagePrefix() {
+ return usagePrefix;
+ }
+
// NOTE: Usage/Help are inner classes to allow access to outer methods
// that access commandFactory
@@ -194,7 +198,7 @@ public class FsShell extends Configured implements Tool {
}
} else {
// display help or usage for all commands
- out.println(usagePrefix);
+ out.println(getUsagePrefix());
// display list of short usages
ArrayList<Command> instances = new ArrayList<Command>();
@@ -218,7 +222,7 @@ public class FsShell extends Configured implements Tool {
}
private void printInstanceUsage(PrintStream out, Command instance) {
- out.println(usagePrefix + " " + instance.getUsage());
+ out.println(getUsagePrefix() + " " + instance.getUsage());
}
private void printInstanceHelp(PrintStream out, Command instance) {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/295ccbca/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
index 9fdac98..b9fc6fa 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
@@ -79,4 +79,6 @@
operation fails. (Rakesh R via Zhe Zhang)
HDFS-8123. Erasure Coding: Better to move EC related proto messages to a
- separate erasurecoding proto file (Rakesh R via vinayakumarb)
\ No newline at end of file
+ separate erasurecoding proto file (Rakesh R via vinayakumarb)
+
+ HDFS-7349. Support DFS command for the EC encoding (vinayakumarb)
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hadoop/blob/295ccbca/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
index f464261..84c79b8 100755
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
@@ -134,6 +134,11 @@ case ${COMMAND} in
hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
;;
+ erasurecode)
+ CLASS=org.apache.hadoop.hdfs.tools.erasurecode.ECCli
+ hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
+ HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
+ ;;
fetchdt)
CLASS=org.apache.hadoop.hdfs.tools.DelegationTokenFetcher
;;
http://git-wip-us.apache.org/repos/asf/hadoop/blob/295ccbca/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
index 7ff9073..d993ab3 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
@@ -118,6 +118,7 @@ import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.ECInfo;
+import org.apache.hadoop.hdfs.protocol.ECZoneInfo;
import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.EncryptionZoneIterator;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
@@ -3309,4 +3310,21 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory,
}
return scope;
}
+
+ /**
+ * Get the erasure coding zone information for the specified path
+ *
+ * @param src path to get the information for
+ * @return Returns the zone information if path is in EC Zone, null otherwise
+ * @throws IOException
+ */
+ public ECZoneInfo getErasureCodingZoneInfo(String src) throws IOException {
+ checkOpen();
+ try {
+ return namenode.getErasureCodingZoneInfo(src);
+ } catch (RemoteException re) {
+ throw re.unwrapRemoteException(FileNotFoundException.class,
+ AccessControlException.class, UnresolvedPathException.class);
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/295ccbca/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
index 160aae3..0f4d2ec 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
@@ -72,6 +72,7 @@ import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.DirectoryListing;
+import org.apache.hadoop.hdfs.protocol.ECZoneInfo;
import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
@@ -2250,4 +2251,35 @@ public class DistributedFileSystem extends FileSystem {
}
}.resolve(this, absF);
}
+
+ /**
+ * Get ErasureCoding zone information for the specified path
+ *
+ * @param path
+ * @return Returns the zone information if path is in EC zone, null otherwise
+ * @throws IOException
+ */
+ public ECZoneInfo getErasureCodingZoneInfo(final Path path)
+ throws IOException {
+ Path absF = fixRelativePart(path);
+ return new FileSystemLinkResolver<ECZoneInfo>() {
+ @Override
+ public ECZoneInfo doCall(final Path p) throws IOException,
+ UnresolvedLinkException {
+ return dfs.getErasureCodingZoneInfo(getPathName(p));
+ }
+
+ @Override
+ public ECZoneInfo next(final FileSystem fs, final Path p)
+ throws IOException {
+ if (fs instanceof DistributedFileSystem) {
+ DistributedFileSystem myDfs = (DistributedFileSystem) fs;
+ return myDfs.getErasureCodingZoneInfo(p);
+ }
+ throw new UnsupportedOperationException(
+ "Cannot getErasureCodingZoneInfo through a symlink to a "
+ + "non-DistributedFileSystem: " + path + " -> " + p);
+ }
+ }.resolve(this, absF);
+ }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/295ccbca/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
index 0c04ca9..c257cc1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
@@ -1484,4 +1484,13 @@ public interface ClientProtocol {
*/
@Idempotent
public ECSchema[] getECSchemas() throws IOException;
+
+ /**
+ * Get the information about the EC zone for the path
+ *
+ * @param src path to get the info for
+ * @throws IOException
+ */
+ @Idempotent
+ public ECZoneInfo getErasureCodingZoneInfo(String src) throws IOException;
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/295ccbca/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ECZoneInfo.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ECZoneInfo.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ECZoneInfo.java
new file mode 100644
index 0000000..ecfb92e
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ECZoneInfo.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdfs.protocol;
+
+import org.apache.hadoop.io.erasurecode.ECSchema;
+
+/**
+ * Information about the EC Zone at the specified path.
+ */
+public class ECZoneInfo {
+
+ private String dir;
+ private ECSchema schema;
+
+ public ECZoneInfo(String dir, ECSchema schema) {
+ this.dir = dir;
+ this.schema = schema;
+ }
+
+ /**
+ * Get directory of the EC zone.
+ *
+ * @return
+ */
+ public String getDir() {
+ return dir;
+ }
+
+ /**
+ * Get the schema for the EC Zone
+ *
+ * @return
+ */
+ public ECSchema getSchema() {
+ return schema;
+ }
+
+ @Override
+ public String toString() {
+ return "Dir: " + getDir() + ", Schema: " + schema;
+ }
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/295ccbca/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java
index e3073ae..6bee07f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java
@@ -36,6 +36,7 @@ import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.ECInfo;
+import org.apache.hadoop.hdfs.protocol.ECZoneInfo;
import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.LastBlockWithStatus;
@@ -200,6 +201,8 @@ import org.apache.hadoop.hdfs.protocol.proto.EncryptionZonesProtos.ListEncryptio
import org.apache.hadoop.hdfs.protocol.proto.EncryptionZonesProtos.ListEncryptionZonesRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECSchemasRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECSchemasResponseProto;
+import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECZoneInfoRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECZoneInfoResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingInfoRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingInfoResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CreateErasureCodingZoneRequestProto;
@@ -1551,4 +1554,19 @@ public class ClientNamenodeProtocolServerSideTranslatorPB implements
throw new ServiceException(e);
}
}
+
+ @Override
+ public GetECZoneInfoResponseProto getErasureCodingZoneInfo(RpcController controller,
+ GetECZoneInfoRequestProto request) throws ServiceException {
+ try {
+ ECZoneInfo ecZoneInfo = server.getErasureCodingZoneInfo(request.getSrc());
+ GetECZoneInfoResponseProto.Builder builder = GetECZoneInfoResponseProto.newBuilder();
+ if (ecZoneInfo != null) {
+ builder.setECZoneInfo(PBHelper.convertECZoneInfo(ecZoneInfo));
+ }
+ return builder.build();
+ } catch (IOException e) {
+ throw new ServiceException(e);
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/295ccbca/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java
index 44d1258..6c1c971 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java
@@ -59,6 +59,7 @@ import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.ECInfo;
+import org.apache.hadoop.hdfs.protocol.ECZoneInfo;
import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
@@ -167,6 +168,8 @@ import org.apache.hadoop.hdfs.protocol.proto.EncryptionZonesProtos.GetEZForPathR
import org.apache.hadoop.hdfs.protocol.proto.EncryptionZonesProtos.ListEncryptionZonesRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECSchemasRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECSchemasResponseProto;
+import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECZoneInfoRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECZoneInfoResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingInfoRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingInfoResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CreateErasureCodingZoneRequestProto;
@@ -1576,4 +1579,20 @@ public class ClientNamenodeProtocolTranslatorPB implements
throw ProtobufHelper.getRemoteException(e);
}
}
+
+ @Override
+ public ECZoneInfo getErasureCodingZoneInfo(String src) throws IOException {
+ GetECZoneInfoRequestProto req = GetECZoneInfoRequestProto.newBuilder()
+ .setSrc(src).build();
+ try {
+ GetECZoneInfoResponseProto response = rpcProxy.getErasureCodingZoneInfo(
+ null, req);
+ if (response.hasECZoneInfo()) {
+ return PBHelper.convertECZoneInfo(response.getECZoneInfo());
+ }
+ return null;
+ } catch (ServiceException e) {
+ throw ProtobufHelper.getRemoteException(e);
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/295ccbca/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
index 1282ade..33b6a2a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
@@ -76,6 +76,7 @@ import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.fs.FileEncryptionInfo;
+import org.apache.hadoop.hdfs.protocol.ECZoneInfo;
import org.apache.hadoop.hdfs.protocol.FsPermissionExtension;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.RollingUpgradeAction;
@@ -133,6 +134,7 @@ import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockReportC
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.ECInfoProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.ECSchemaOptionEntryProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.ECSchemaProto;
+import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.ECZoneInfoProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto;
@@ -3139,4 +3141,14 @@ public class PBHelper {
}
return builder.build();
}
+
+ public static ECZoneInfoProto convertECZoneInfo(ECZoneInfo ecZoneInfo) {
+ return ECZoneInfoProto.newBuilder().setDir(ecZoneInfo.getDir())
+ .setSchema(convertECSchema(ecZoneInfo.getSchema())).build();
+ }
+
+ public static ECZoneInfo convertECZoneInfo(ECZoneInfoProto ecZoneInfoProto) {
+ return new ECZoneInfo(ecZoneInfoProto.getDir(),
+ convertECSchema(ecZoneInfoProto.getSchema()));
+ }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/295ccbca/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ErasureCodingZoneManager.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ErasureCodingZoneManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ErasureCodingZoneManager.java
index 5320c1c..0a84083 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ErasureCodingZoneManager.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ErasureCodingZoneManager.java
@@ -22,6 +22,7 @@ import com.google.common.collect.Lists;
import org.apache.hadoop.fs.XAttr;
import org.apache.hadoop.fs.XAttrSetFlag;
import org.apache.hadoop.hdfs.XAttrHelper;
+import org.apache.hadoop.hdfs.protocol.ECZoneInfo;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.ECSchemaProto;
import org.apache.hadoop.hdfs.protocolPB.PBHelper;
import org.apache.hadoop.io.erasurecode.ECSchema;
@@ -57,7 +58,12 @@ public class ErasureCodingZoneManager {
return getECSchema(iip) != null;
}
- ECSchema getECSchema(INodesInPath iip) throws IOException{
+ ECSchema getECSchema(INodesInPath iip) throws IOException {
+ ECZoneInfo ecZoneInfo = getECZoneInfo(iip);
+ return ecZoneInfo == null ? null : ecZoneInfo.getSchema();
+ }
+
+ ECZoneInfo getECZoneInfo(INodesInPath iip) throws IOException {
assert dir.hasReadLock();
Preconditions.checkNotNull(iip);
List<INode> inodes = iip.getReadOnlyINodes();
@@ -80,7 +86,8 @@ public class ErasureCodingZoneManager {
if (XATTR_ERASURECODING_ZONE.equals(XAttrHelper.getPrefixName(xAttr))) {
ECSchemaProto ecSchemaProto;
ecSchemaProto = ECSchemaProto.parseFrom(xAttr.getValue());
- return PBHelper.convertECSchema(ecSchemaProto);
+ ECSchema schema = PBHelper.convertECSchema(ecSchemaProto);
+ return new ECZoneInfo(inode.getFullPathName(), schema);
}
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/295ccbca/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
index 188425e..113203a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
@@ -45,6 +45,7 @@ import org.apache.hadoop.hdfs.XAttrHelper;
import org.apache.hadoop.hdfs.protocol.AclException;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
+import org.apache.hadoop.hdfs.protocol.ECZoneInfo;
import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.FSLimitException.MaxDirectoryItemsExceededException;
import org.apache.hadoop.hdfs.protocol.FSLimitException.PathComponentTooLongException;
@@ -1424,6 +1425,15 @@ public class FSDirectory implements Closeable {
}
}
+ ECZoneInfo getECZoneInfo(INodesInPath iip) throws IOException {
+ readLock();
+ try {
+ return ecZoneManager.getECZoneInfo(iip);
+ } finally {
+ readUnlock();
+ }
+ }
+
static INode resolveLastINode(INodesInPath iip) throws FileNotFoundException {
INode inode = iip.getLastINode();
if (inode == null) {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/295ccbca/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
index 1a9c529..715ba34 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
@@ -181,6 +181,7 @@ import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.ECInfo;
+import org.apache.hadoop.hdfs.protocol.ECZoneInfo;
import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
@@ -8179,6 +8180,29 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean,
}
/**
+ * Get the erasure coding zone information for specified path
+ */
+ ECZoneInfo getErasureCodingZoneInfo(String src) throws AccessControlException,
+ UnresolvedLinkException, IOException {
+ checkOperation(OperationCategory.READ);
+ final byte[][] pathComponents = FSDirectory
+ .getPathComponentsForReservedPath(src);
+ final FSPermissionChecker pc = getPermissionChecker();
+ readLock();
+ try {
+ checkOperation(OperationCategory.READ);
+ src = dir.resolvePath(pc, src, pathComponents);
+ final INodesInPath iip = dir.getINodesInPath(src, true);
+ if (isPermissionEnabled) {
+ dir.checkPathAccess(pc, iip, FsAction.READ);
+ }
+ return dir.getECZoneInfo(iip);
+ } finally {
+ readUnlock();
+ }
+ }
+
+ /**
* Get available ECSchemas
*/
ECSchema[] getECSchemas() throws IOException {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/295ccbca/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java
index 5e01c77..06aee59 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java
@@ -85,6 +85,7 @@ import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.ECInfo;
+import org.apache.hadoop.hdfs.protocol.ECZoneInfo;
import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.FSLimitException;
@@ -2056,4 +2057,10 @@ class NameNodeRpcServer implements NamenodeProtocols {
checkNNStartup();
return namesystem.getECSchemas();
}
+
+ @Override // ClientProtocol
+ public ECZoneInfo getErasureCodingZoneInfo(String src) throws IOException {
+ checkNNStartup();
+ return namesystem.getErasureCodingZoneInfo(src);
+ }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/295ccbca/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/erasurecode/ECCli.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/erasurecode/ECCli.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/erasurecode/ECCli.java
new file mode 100644
index 0000000..4ed9d0a
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/erasurecode/ECCli.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdfs.tools.erasurecode;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FsShell;
+import org.apache.hadoop.fs.shell.CommandFactory;
+import org.apache.hadoop.hdfs.HdfsConfiguration;
+import org.apache.hadoop.util.ToolRunner;
+
+/**
+ * CLI for the erasure code encoding operations.
+ */
+public class ECCli extends FsShell {
+
+ private final static String usagePrefix =
+ "Usage: hdfs erasurecode [generic options]";
+
+ @Override
+ protected String getUsagePrefix() {
+ return usagePrefix;
+ }
+
+ @Override
+ protected void registerCommands(CommandFactory factory) {
+ factory.registerCommands(ECCommand.class);
+ }
+
+ public static void main(String[] args) throws Exception {
+ Configuration conf = new HdfsConfiguration();
+ int res = ToolRunner.run(conf, new ECCli(), args);
+ System.exit(res);
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hadoop/blob/295ccbca/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/erasurecode/ECCommand.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/erasurecode/ECCommand.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/erasurecode/ECCommand.java
new file mode 100644
index 0000000..84c2275
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/erasurecode/ECCommand.java
@@ -0,0 +1,209 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdfs.tools.erasurecode;
+
+import java.io.IOException;
+import java.util.LinkedList;
+
+import org.apache.hadoop.HadoopIllegalArgumentException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.shell.Command;
+import org.apache.hadoop.fs.shell.CommandFactory;
+import org.apache.hadoop.fs.shell.PathData;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.hdfs.protocol.ECZoneInfo;
+import org.apache.hadoop.hdfs.server.namenode.UnsupportedActionException;
+import org.apache.hadoop.io.erasurecode.ECSchema;
+import org.apache.hadoop.util.StringUtils;
+
+/**
+ * Erasure Coding CLI commands
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public abstract class ECCommand extends Command {
+
+ public static void registerCommands(CommandFactory factory) {
+ // Register all commands of Erasure CLI, with a '-' at the beginning in name
+ // of the command.
+ factory.addClass(CreateECZoneCommand.class, "-" + CreateECZoneCommand.NAME);
+ factory.addClass(GetECZoneInfoCommand.class, "-"
+ + GetECZoneInfoCommand.NAME);
+ factory.addClass(ListECSchemas.class, "-" + ListECSchemas.NAME);
+ }
+
+ @Override
+ public String getCommandName() {
+ return getName();
+ }
+
+ @Override
+ protected void run(Path path) throws IOException {
+ throw new RuntimeException("Not suppose to get here");
+ }
+
+ @Deprecated
+ @Override
+ public int runAll() {
+ return run(args);
+ }
+
+ @Override
+ protected void processPath(PathData item) throws IOException {
+ if (!(item.fs instanceof DistributedFileSystem)) {
+ throw new UnsupportedActionException(
+ "Erasure commands are only supported for the HDFS paths");
+ }
+ }
+
+ /**
+ * Create EC encoding zone command. Zones are created to use specific EC
+ * encoding schema, other than default while encoding the files under some
+ * specific directory.
+ */
+ static class CreateECZoneCommand extends ECCommand {
+ public static final String NAME = "createZone";
+ public static final String USAGE = "[-s <schemaName>] <path>";
+ public static final String DESCRIPTION =
+ "Create a zone to encode files using a specified schema\n"
+ + "Options :\n"
+ + " -s <schemaName> : EC schema name to encode files. "
+ + "If not passed default schema will be used\n"
+ + " <path> : Path to an empty directory. Under this directory "
+ + "files will be encoded using specified schema";
+ private String schemaName;
+ private ECSchema schema = null;
+
+ @Override
+ protected void processOptions(LinkedList<String> args) throws IOException {
+ schemaName = StringUtils.popOptionWithArgument("-s", args);
+ if (args.isEmpty()) {
+ throw new HadoopIllegalArgumentException("<path> is missing");
+ }
+ if (args.size() > 1) {
+ throw new HadoopIllegalArgumentException("Too many arguments");
+ }
+ }
+
+ @Override
+ protected void processPath(PathData item) throws IOException {
+ super.processPath(item);
+ DistributedFileSystem dfs = (DistributedFileSystem) item.fs;
+ try {
+ if (schemaName != null) {
+ ECSchema[] ecSchemas = dfs.getClient().getECSchemas();
+ for (ECSchema ecSchema : ecSchemas) {
+ if (schemaName.equals(ecSchema.getSchemaName())) {
+ schema = ecSchema;
+ break;
+ }
+ }
+ if (schema == null) {
+ StringBuilder sb = new StringBuilder();
+ sb.append("Schema '");
+ sb.append(schemaName);
+ sb.append("' does not match any of the supported schemas.");
+ sb.append("Please select any one of [");
+ for (ECSchema ecSchema : ecSchemas) {
+ sb.append(ecSchema.getSchemaName());
+ sb.append(", ");
+ }
+ throw new HadoopIllegalArgumentException(sb.toString());
+ }
+ }
+ dfs.createErasureCodingZone(item.path, schema);
+ out.println("EC Zone created successfully at " + item.path);
+ } catch (IOException e) {
+ throw new IOException("Unable to create EC zone for the path "
+ + item.path, e);
+ }
+ }
+ }
+
+ /**
+ * Get the information about the zone
+ */
+ static class GetECZoneInfoCommand extends ECCommand {
+ public static final String NAME = "getZoneInfo";
+ public static final String USAGE = "<path>";
+ public static final String DESCRIPTION =
+ "Get information about the EC zone at specified path\n";
+
+ @Override
+ protected void processOptions(LinkedList<String> args) throws IOException {
+ if (args.isEmpty()) {
+ throw new HadoopIllegalArgumentException("<path> is missing");
+ }
+ if (args.size() > 1) {
+ throw new HadoopIllegalArgumentException("Too many arguments");
+ }
+ }
+
+ @Override
+ protected void processPath(PathData item) throws IOException {
+ super.processPath(item);
+ DistributedFileSystem dfs = (DistributedFileSystem) item.fs;
+ try {
+ ECZoneInfo ecZoneInfo = dfs.getErasureCodingZoneInfo(item.path);
+ out.println(ecZoneInfo.toString());
+ } catch (IOException e) {
+ throw new IOException("Unable to create EC zone for the path "
+ + item.path, e);
+ }
+ }
+ }
+
+ /**
+ * List all supported EC Schemas
+ */
+ static class ListECSchemas extends ECCommand {
+ public static final String NAME = "listSchemas";
+ public static final String USAGE = "";
+ public static final String DESCRIPTION =
+ "Get the list of ECSchemas supported\n";
+
+ @Override
+ protected void processOptions(LinkedList<String> args) throws IOException {
+ if (!args.isEmpty()) {
+ throw new HadoopIllegalArgumentException("Too many parameters");
+ }
+
+ FileSystem fs = FileSystem.get(getConf());
+ if (fs instanceof DistributedFileSystem == false) {
+ throw new UnsupportedActionException(
+ "Erasure commands are only supported for the HDFS");
+ }
+ DistributedFileSystem dfs = (DistributedFileSystem) fs;
+
+ ECSchema[] ecSchemas = dfs.getClient().getECSchemas();
+ StringBuilder sb = new StringBuilder();
+ int i = 0;
+ while (i < ecSchemas.length) {
+ ECSchema ecSchema = ecSchemas[i];
+ sb.append(ecSchema.getSchemaName());
+ i++;
+ if (i < ecSchemas.length) {
+ sb.append(", ");
+ }
+ }
+ out.println(sb.toString());
+ }
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hadoop/blob/295ccbca/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto
index 89c38e9..046120d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto
@@ -867,4 +867,6 @@ service ClientNamenodeProtocol {
returns(GetErasureCodingInfoResponseProto);
rpc getECSchemas(GetECSchemasRequestProto)
returns(GetECSchemasResponseProto);
+ rpc getErasureCodingZoneInfo(GetECZoneInfoRequestProto)
+ returns(GetECZoneInfoResponseProto);
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/295ccbca/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/erasurecoding.proto
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/erasurecoding.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/erasurecoding.proto
index 4d5731b..d888f71 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/erasurecoding.proto
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/erasurecoding.proto
@@ -49,6 +49,13 @@ message ECInfoProto {
required ECSchemaProto schema = 2;
}
+/**
+ * ECZoneInfo
+ */
+message ECZoneInfoProto {
+ required string dir = 1;
+ required ECSchemaProto schema = 2;
+}
message CreateErasureCodingZoneRequestProto {
required string src = 1;
@@ -72,3 +79,11 @@ message GetECSchemasRequestProto { // void request
message GetECSchemasResponseProto {
repeated ECSchemaProto schemas = 1;
}
+
+message GetECZoneInfoRequestProto {
+ required string src = 1; // path to get the zone info
+}
+
+message GetECZoneInfoResponseProto {
+ optional ECZoneInfoProto ECZoneInfo = 1;
+}
\ No newline at end of file