You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cd...@apache.org on 2008/08/15 01:06:58 UTC
svn commit: r686076 - in /hadoop/core/trunk: ./
src/core/org/apache/hadoop/ipc/ src/hdfs/org/apache/hadoop/hdfs/
src/hdfs/org/apache/hadoop/hdfs/server/namenode/
src/test/org/apache/hadoop/fs/ src/tools/org/apache/hadoop/tools/
Author: cdouglas
Date: Thu Aug 14 16:06:57 2008
New Revision: 686076
URL: http://svn.apache.org/viewvc?rev=686076&view=rev
Log:
HADOOP-3889. Improve error reporting from HftpFileSystem, handling in
DistCp. Contributed by Tsz Wo (Nicholas), SZE.
Modified:
hadoop/core/trunk/CHANGES.txt
hadoop/core/trunk/src/core/org/apache/hadoop/ipc/RemoteException.java
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java
hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestCopyFiles.java
hadoop/core/trunk/src/tools/org/apache/hadoop/tools/DistCp.java
Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=686076&r1=686075&r2=686076&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Thu Aug 14 16:06:57 2008
@@ -314,6 +314,9 @@
HADOOP-3951. Fix package name for FSNamesystem logs and modify other
hard-coded Logs to use the class name. (cdouglas)
+ HADOOP-3889. Improve error reporting from HftpFileSystem, handling in
+ DistCp. (Tsz Wo (Nicholas), SZE via cdouglas)
+
Release 0.18.0 - 2008-08-19
INCOMPATIBLE CHANGES
Modified: hadoop/core/trunk/src/core/org/apache/hadoop/ipc/RemoteException.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/ipc/RemoteException.java?rev=686076&r1=686075&r2=686076&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/ipc/RemoteException.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/ipc/RemoteException.java Thu Aug 14 16:06:57 2008
@@ -21,7 +21,13 @@
import java.io.IOException;
import java.lang.reflect.Constructor;
+import org.xml.sax.Attributes;
+import org.znerd.xmlenc.XMLOutputter;
+
public class RemoteException extends IOException {
+ /** For java.io.Serializable */
+ private static final long serialVersionUID = 1L;
+
private String className;
public RemoteException(String className, String msg) {
@@ -91,4 +97,24 @@
ex.initCause(this);
return ex;
}
+
+ /** Write the object to XML format */
+ public void writeXml(String path, XMLOutputter doc) throws IOException {
+ doc.startTag(RemoteException.class.getSimpleName());
+ doc.attribute("path", path);
+ doc.attribute("class", getClassName());
+ String msg = getLocalizedMessage();
+ int i = msg.indexOf("\n");
+ if (i >= 0) {
+ msg = msg.substring(0, i);
+ }
+ doc.attribute("message", msg.substring(msg.indexOf(":") + 1).trim());
+ doc.endTag();
+ }
+
+ /** Create RemoteException from attributes */
+ public static RemoteException valueOf(Attributes attrs) {
+ return new RemoteException(attrs.getValue("class"),
+ attrs.getValue("message"));
+ }
}
Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java?rev=686076&r1=686075&r2=686076&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java Thu Aug 14 16:06:57 2008
@@ -51,6 +51,7 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.server.namenode.ListPathsServlet;
+import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.security.*;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.StringUtils;
@@ -103,6 +104,9 @@
try {
final URL url = new URI("http", null, nnAddr.getHostName(),
nnAddr.getPort(), path, query, null).toURL();
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("url=" + url);
+ }
return (HttpURLConnection)url.openConnection();
} catch (URISyntaxException e) {
throw (IOException)new IOException().initCause(e);
@@ -149,6 +153,9 @@
Attributes attrs) throws SAXException {
if ("listing".equals(qname)) return;
if (!"file".equals(qname) && !"directory".equals(qname)) {
+ if (RemoteException.class.getSimpleName().equals(qname)) {
+ throw new SAXException(RemoteException.valueOf(attrs));
+ }
throw new SAXException("Unrecognized entry: " + qname);
}
long modif;
@@ -183,10 +190,12 @@
InputStream resp = connection.getInputStream();
xr.parse(new InputSource(resp));
- } catch (SAXException e) {
- IOException ie = new IOException("invalid xml directory content");
- ie.initCause(e);
- throw ie;
+ } catch(SAXException e) {
+ final Exception embedded = e.getException();
+ if (embedded != null && embedded instanceof IOException) {
+ throw (IOException)embedded;
+ }
+ throw new IOException("invalid xml directory content", e);
}
}
Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java?rev=686076&r1=686075&r2=686076&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java Thu Aug 14 16:06:57 2008
@@ -20,21 +20,24 @@
import java.io.IOException;
import javax.servlet.ServletContext;
-import javax.servlet.http.*;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
-import org.apache.hadoop.ipc.RemoteException;
-import org.apache.hadoop.security.*;
-import org.znerd.xmlenc.XMLOutputter;
+import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation;
/**
* A base class for the servlets in DFS.
*/
abstract class DfsServlet extends HttpServlet {
+ /** For java.io.Serializable */
+ private static final long serialVersionUID = 1L;
+
static final Log LOG = LogFactory.getLog(DfsServlet.class.getCanonicalName());
/** Get {@link UserGroupInformation} from request */
@@ -62,18 +65,4 @@
UnixUserGroupInformation.UGI_PROPERTY_NAME, ugi);
return DFSClient.createNamenode(nn.getNameNodeAddress(), conf);
}
-
- static void writeRemoteException(String path, RemoteException re,
- XMLOutputter doc) throws IOException {
- doc.startTag("RemoteException");
- doc.attribute("path", path);
- doc.attribute("class", re.getClassName());
- String msg = re.getLocalizedMessage();
- int i = msg.indexOf("\n");
- if (i >= 0) {
- msg = msg.substring(0, i);
- }
- doc.attribute("message", msg.substring(msg.indexOf(":") + 1).trim());
- doc.endTag();
- }
}
Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java?rev=686076&r1=686075&r2=686076&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java Thu Aug 14 16:06:57 2008
@@ -44,6 +44,9 @@
* @see org.apache.hadoop.hdfs.HftpFileSystem
*/
public class ListPathsServlet extends DfsServlet {
+ /** For java.io.Serializable */
+ private static final long serialVersionUID = 1L;
+
public static final SimpleDateFormat df =
new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ");
static {
@@ -156,7 +159,7 @@
writeInfo(i, doc);
}
}
- catch(RemoteException re) {writeRemoteException(p, re, doc);}
+ catch(RemoteException re) {re.writeXml(p, doc);}
}
} catch (PatternSyntaxException e) {
out.println(e.toString());
Modified: hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestCopyFiles.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestCopyFiles.java?rev=686076&r1=686075&r2=686076&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestCopyFiles.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestCopyFiles.java Thu Aug 14 16:06:57 2008
@@ -38,6 +38,8 @@
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster;
+import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.tools.DistCp;
import org.apache.hadoop.util.ToolRunner;
@@ -52,6 +54,7 @@
((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.OFF);
((Log4JLogger)FSNamesystem.LOG).getLogger().setLevel(Level.OFF);
((Log4JLogger)DistCp.LOG).getLogger().setLevel(Level.ALL);
+ ((Log4JLogger)FileSystem.LOG).getLogger().setLevel(Level.ALL);
}
static final URI LOCAL_FS = URI.create("file:///");
@@ -108,14 +111,17 @@
long getSeed() { return seed; }
}
+ private static MyFile[] createFiles(URI fsname, String topdir)
+ throws IOException {
+ return createFiles(FileSystem.get(fsname, new Configuration()), topdir);
+ }
+
/** create NFILES with random names and directory hierarchies
* with random (but reproducible) data in them.
*/
- private static MyFile[] createFiles(URI fsname, String topdir)
+ private static MyFile[] createFiles(FileSystem fs, String topdir)
throws IOException {
- FileSystem fs = FileSystem.get(fsname, new Configuration());
Path root = new Path(topdir);
-
MyFile[] files = new MyFile[NFILES];
for (int i = 0; i < NFILES; i++) {
files[i] = createFile(root, fs);
@@ -149,9 +155,13 @@
/** check if the files have been copied correctly. */
private static boolean checkFiles(String fsname, String topdir, MyFile[] files,
boolean existingOnly) throws IOException {
-
Configuration conf = new Configuration();
FileSystem fs = FileSystem.getNamed(fsname, conf);
+ return checkFiles(fs, topdir, files, existingOnly);
+ }
+
+ private static boolean checkFiles(FileSystem fs, String topdir, MyFile[] files,
+ boolean existingOnly) throws IOException {
Path root = new Path(topdir);
for (int idx = 0; idx < files.length; idx++) {
@@ -689,4 +699,64 @@
if (cluster != null) { cluster.shutdown(); }
}
}
+
+ static final long now = System.currentTimeMillis();
+
+ static UnixUserGroupInformation createUGI(String name, boolean issuper) {
+ String username = name + now;
+ String group = issuper? "supergroup": username;
+ return UnixUserGroupInformation.createImmutable(
+ new String[]{username, group});
+ }
+
+ static Path createHomeDirectory(FileSystem fs, UserGroupInformation ugi
+ ) throws IOException {
+ final Path home = new Path("/user/" + ugi.getUserName());
+ fs.mkdirs(home);
+ fs.setOwner(home, ugi.getUserName(), ugi.getGroupNames()[0]);
+ fs.setPermission(home, new FsPermission((short)0700));
+ return home;
+ }
+
+ public void testHftpAccessControl() throws Exception {
+ MiniDFSCluster cluster = null;
+ try {
+ final UnixUserGroupInformation DFS_UGI = createUGI("dfs", true);
+ final UnixUserGroupInformation USER_UGI = createUGI("user", false);
+
+ //start cluster by DFS_UGI
+ final Configuration dfsConf = new Configuration();
+ UnixUserGroupInformation.saveToConf(dfsConf,
+ UnixUserGroupInformation.UGI_PROPERTY_NAME, DFS_UGI);
+ cluster = new MiniDFSCluster(dfsConf, 2, true, null);
+ cluster.waitActive();
+
+ final String httpAdd = dfsConf.get("dfs.http.address");
+ final URI nnURI = FileSystem.getDefaultUri(dfsConf);
+ final String nnUri = nnURI.toString();
+ final Path home = createHomeDirectory(FileSystem.get(nnURI, dfsConf), USER_UGI);
+
+ //now, login as USER_UGI
+ final Configuration userConf = new Configuration();
+ UnixUserGroupInformation.saveToConf(userConf,
+ UnixUserGroupInformation.UGI_PROPERTY_NAME, USER_UGI);
+ final FileSystem fs = FileSystem.get(nnURI, userConf);
+
+ final Path srcrootpath = new Path(home, "src_root");
+ final String srcrootdir = srcrootpath.toString();
+ final Path dstrootpath = new Path(home, "dst_root");
+ final String dstrootdir = dstrootpath.toString();
+ final DistCp distcp = new DistCp(userConf);
+
+ FileSystem.mkdirs(fs, srcrootpath, new FsPermission((short)0700));
+ final String[] args = {"hftp://"+httpAdd+srcrootdir, nnUri+dstrootdir};
+
+ { //copy with permission 000, should fail
+ fs.setPermission(srcrootpath, new FsPermission((short)0));
+ assertEquals(-3, ToolRunner.run(distcp, args));
+ }
+ } finally {
+ if (cluster != null) { cluster.shutdown(); }
+ }
+ }
}
Modified: hadoop/core/trunk/src/tools/org/apache/hadoop/tools/DistCp.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/tools/org/apache/hadoop/tools/DistCp.java?rev=686076&r1=686075&r2=686076&view=diff
==============================================================================
--- hadoop/core/trunk/src/tools/org/apache/hadoop/tools/DistCp.java (original)
+++ hadoop/core/trunk/src/tools/org/apache/hadoop/tools/DistCp.java Thu Aug 14 16:06:57 2008
@@ -39,14 +39,16 @@
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.AccessControlException;
import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.protocol.QuotaExceededException;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.InputFormat;
@@ -820,7 +822,7 @@
* input files. The mapper actually copies the files allotted to it. The
* reduce is empty.
*/
- public int run(String[] args) throws Exception {
+ public int run(String[] args) {
try {
copy(conf, Arguments.valueOf(args, conf));
return 0;
@@ -831,6 +833,13 @@
} catch (DuplicationException e) {
System.err.println(StringUtils.stringifyException(e));
return DuplicationException.ERROR_CODE;
+ } catch (RemoteException e) {
+ final IOException unwrapped = e.unwrapRemoteException(
+ FileNotFoundException.class,
+ AccessControlException.class,
+ QuotaExceededException.class);
+ System.err.println(StringUtils.stringifyException(unwrapped));
+ return -3;
} catch (Exception e) {
System.err.println("With failures, global counters are inaccurate; " +
"consider running with -i");