You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2006/10/18 21:57:24 UTC
svn commit: r465354 - in /lucene/hadoop/trunk: ./
src/java/org/apache/hadoop/conf/ src/java/org/apache/hadoop/dfs/
src/java/org/apache/hadoop/fs/ src/test/org/apache/hadoop/dfs/
Author: cutting
Date: Wed Oct 18 12:57:23 2006
New Revision: 465354
URL: http://svn.apache.org/viewvc?view=rev&rev=465354
Log:
HADOOP-462. Improve command line parsing in DFSShell, so that incorrect numbers of arguments result in informative errors rather than ArrayOutOfBoundsException. Contributed by Dhruba.
Modified:
lucene/hadoop/trunk/CHANGES.txt
lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java
lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSMkdirs.java
Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?view=diff&rev=465354&r1=465353&r2=465354
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Wed Oct 18 12:57:23 2006
@@ -23,6 +23,10 @@
6. HADOOP-588. Fix logging and accounting of failed tasks.
(Sanjay Dahiya via cutting)
+ 7. HADOOP-462. Improve command line parsing in DFSShell, so that
+ incorrect numbers of arguments result in informative errors rather
+ than ArrayOutOfBoundsException. (Dhruba Borthakur via cutting)
+
Release 0.7.1 - 2006-10-11
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java?view=diff&rev=465354&r1=465353&r2=465354
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java Wed Oct 18 12:57:23 2006
@@ -71,6 +71,7 @@
private static final Log LOG =
LogFactory.getLog("org.apache.hadoop.conf.Configuration");
+ private boolean quietmode = false;
private ArrayList defaultResources = new ArrayList();
private ArrayList finalResources = new ArrayList();
@@ -454,7 +455,7 @@
private synchronized Properties getProps() {
if (properties == null) {
Properties newProps = new Properties();
- loadResources(newProps, defaultResources, false, false);
+ loadResources(newProps, defaultResources, false, quietmode);
loadResources(newProps, finalResources, true, true);
properties = newProps;
if(overlay!=null)
@@ -487,20 +488,26 @@
if (name instanceof URL) { // an URL resource
URL url = (URL)name;
if (url != null) {
- LOG.info("parsing " + url);
+ if (!quiet) {
+ LOG.info("parsing " + url);
+ }
doc = builder.parse(url.toString());
}
} else if (name instanceof String) { // a CLASSPATH resource
URL url = getResource((String)name);
if (url != null) {
- LOG.info("parsing " + url);
+ if (!quiet) {
+ LOG.info("parsing " + url);
+ }
doc = builder.parse(url.toString());
}
} else if (name instanceof Path) { // a file resource
Path file = (Path)name;
FileSystem fs = FileSystem.getNamed("local", this);
if (fs.exists(file)) {
- LOG.info("parsing " + file);
+ if (!quiet) {
+ LOG.info("parsing " + file);
+ }
InputStream in = new BufferedInputStream(fs.openRaw(file));
try {
doc = builder.parse(in);
@@ -627,6 +634,13 @@
}
sb.append(i.next());
}
+ }
+
+ /** Make this class quiet. Error and informational
+ * messages might not be logged.
+ */
+ public void setQuietMode(boolean value) {
+ quietmode = value;
}
/** For debugging. List non-default properties to the terminal and exit. */
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java?view=diff&rev=465354&r1=465353&r2=465354
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java Wed Oct 18 12:57:23 2006
@@ -16,9 +16,11 @@
package org.apache.hadoop.dfs;
import java.io.*;
+import java.text.*;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.fs.*;
+import org.apache.hadoop.ipc.*;
import org.apache.hadoop.util.ToolBase;
/**************************************************
@@ -36,6 +38,7 @@
}
public void init() throws IOException {
+ conf.setQuietMode(true);
this.fs = FileSystem.get(conf);
}
/**
@@ -455,7 +458,7 @@
return;
}
if( idx != argv.length-1 ) {
- System.out.println( safeModeUsage );
+ printUsage("-safemode");
return;
}
FSConstants.SafeModeAction action;
@@ -466,7 +469,7 @@
else if( "get".equalsIgnoreCase(argv[idx]) )
action = FSConstants.SafeModeAction.SAFEMODE_GET;
else {
- System.out.println( safeModeUsage );
+ printUsage("-safemode");
return;
}
DistributedFileSystem dfs = (DistributedFileSystem)fs;
@@ -475,42 +478,125 @@
}
/**
+ * Displays format of commands.
+ *
+ */
+ public void printUsage(String cmd) {
+ if ("-fs".equals(cmd)) {
+ System.err.println("Usage: java DFSShell" +
+ " [-fs <local | namenode:port>]");
+ } else if ("-conf".equals(cmd)) {
+ System.err.println("Usage: java DFSShell" +
+ " [-conf <configuration file>]");
+ } else if ("-D".equals(cmd)) {
+ System.err.println("Usage: java DFSShell" +
+ " [-D <[property=value>]");
+ } else if ("-ls".equals(cmd) || "-lsr".equals(cmd) ||
+ "-du".equals(cmd) || "-rm".equals(cmd) ||
+ "-rmr".equals(cmd) || "-mkdir".equals(cmd)) {
+ System.err.println("Usage: java DFSShell" +
+ " [" + cmd + " <path>]");
+ } else if ("-mv".equals(cmd) || "-cp".equals(cmd)) {
+ System.err.println("Usage: java DFSShell" +
+ " [" + cmd + " <src> <dst>]");
+ } else if ("-put".equals(cmd) || "-copyFromLocal".equals(cmd) ||
+ "-moveFromLocal".equals(cmd)) {
+ System.err.println("Usage: java DFSShell" +
+ " [" + cmd + " <localsrc> <dst>]");
+ } else if ("-get".equals(cmd) || "-copyToLocal".equals(cmd) ||
+ "-moveToLocal".equals(cmd)) {
+ System.err.println("Usage: java DFSShell" +
+ " [" + cmd + " <src> <localdst>]");
+ } else if ("-cat".equals(cmd)) {
+ System.out.println("Usage: java DFSShell" +
+ " [" + cmd + " <src>]");
+ } else if ("-get".equals(cmd)) {
+ System.err.println("Usage: java DFSShell" +
+ " [" + cmd + " <src> <localdst> [addnl]]");
+ } else if ("-report".equals(cmd)) {
+ System.err.println("Usage: java DFSShell" +
+ " [report]");
+ } else if ("-setrep".equals(cmd)) {
+ System.err.println("Usage: java DFSShell" +
+ " [-setrep [-R] <rep> <path/file>]");
+ } else if ("-safemode".equals(cmd)) {
+ System.err.println("Usage: java DFSShell" +
+ " [-safemode enter | leave | get]");
+ } else {
+ System.err.println("Usage: java DFSShell");
+ System.err.println(" [-fs <local | namenode:port>]");
+ System.err.println(" [-conf <configuration file>]");
+ System.err.println(" [-D <[property=value>]");
+ System.err.println(" [-ls <path>]" );
+ System.err.println(" [-lsr <path>]");
+ System.err.println(" [-du <path>]");
+ System.err.println(" [-mv <src> <dst>]");
+ System.err.println(" [-cp <src> <dst>]");
+ System.err.println(" [-rm <path>]");
+ System.err.println(" [-rmr <path>]");
+ System.err.println(" [-put <localsrc> <dst>]");
+ System.err.println(" [-copyFromLocal <localsrc> <dst>]");
+ System.err.println(" [-moveFromLocal <localsrc> <dst>]");
+ System.err.println(" [-get <src> <localdst>]");
+ System.err.println(" [-getmerge <src> <localdst> [addnl]]");
+ System.err.println(" [-cat <src>]");
+ System.err.println(" [-copyToLocal <src> <localdst>]");
+ System.err.println(" [-moveToLocal <src> <localdst>]");
+ System.err.println(" [-mkdir <path>]");
+ System.err.println(" [-report]");
+ System.err.println(" [-setrep [-R] <rep> <path/file>]");
+ System.err.println(" [-safemode enter | leave | get]");
+ }
+ }
+
+ /**
* run
*/
public int run( String argv[] ) throws Exception {
+
if (argv.length < 1) {
- System.out.println("Usage: java DFSShell" +
- " [-fs <local | namenode:port>]" +
- " [-conf <configuration file>]" +
- " [-D <[property=value>]"+
- " [-ls <path>]"+
- " [-lsr <path>]"+
- " [-du <path>]"+
- " [-mv <src> <dst>]"+
- " [-cp <src> <dst>]"+
- " [-rm <path>]" +
- " [-rmr <path>]" +
- " [-put <localsrc> <dst>]"+
- " [-copyFromLocal <localsrc> <dst>]"+
- " [-moveFromLocal <localsrc> <dst>]" +
- " [-get <src> <localdst>]"+
- " [-getmerge <src> <localdst> [addnl]]"+
- " [-cat <src>]"+
- " [-copyToLocal <src> <localdst>]" +
- " [-moveToLocal <src> <localdst>]"+
- " [-mkdir <path>]"+
- " [-report]"+
- " [-setrep [-R] <rep> <path/file>]" +
- " [-safemode enter | leave | get]");
+ printUsage("");
return -1;
}
- // initialize DFSShell
- init();
-
int exitCode = -1;
int i = 0;
String cmd = argv[i++];
+
+ //
+ // verify that we have enough command line parameters
+ //
+ if ("-put".equals(cmd) || "-get".equals(cmd) ||
+ "-copyFromLocal".equals(cmd) || "-moveFromLocal".equals(cmd) ||
+ "-copyToLocal".equals(cmd) || "-moveToLocal".equals(cmd) ||
+ "-mv".equals(cmd) || "-cp".equals(cmd)) {
+ if (argv.length != 3) {
+ printUsage(cmd);
+ return exitCode;
+ }
+ } else if ("-rm".equals(cmd) || "-rmr".equals(cmd) ||
+ "-cat".equals(cmd) || "-mkdir".equals(cmd) ||
+ "-safemode".equals(cmd)) {
+ if (argv.length != 2) {
+ printUsage(cmd);
+ return exitCode;
+ }
+ } else if ( "-report".equals(cmd)) {
+ if (argv.length != 1) {
+ printUsage(cmd);
+ return exitCode;
+ }
+ }
+
+ // initialize DFSShell
+ try {
+ init();
+ } catch (IOException e) {
+ System.err.println("Bad connection to DFS... command aborted.");
+ return exitCode;
+ }
+
+ exitCode = 0;
try {
if ("-put".equals(cmd) || "-copyFromLocal".equals(cmd)) {
copyFromLocal(new Path(argv[i++]), argv[i++]);
@@ -523,7 +609,6 @@
copyMergeToLocal(argv[i++], new Path(argv[i++]), Boolean.parseBoolean(argv[i++]));
else
copyMergeToLocal(argv[i++], new Path(argv[i++]));
-
} else if ("-cat".equals(cmd)) {
cat(argv[i++]);
} else if ("-moveToLocal".equals(cmd)) {
@@ -553,10 +638,32 @@
report();
} else if ("-safemode".equals(cmd)) {
setSafeMode(argv,i);
+ } else {
+ exitCode = -1;
+ System.err.println(cmd.substring(1) + ": Unknown command");
+ printUsage("");
}
- exitCode = 0;;
+ } catch (RemoteException e) {
+ //
+ // This is a error returned by hadoop server. Print
+ // out the first line of the error mesage, ignore the stack trace.
+ exitCode = -1;
+ try {
+ String[] content;
+ content = e.getLocalizedMessage().split("\n");
+ System.err.println(cmd.substring(1) + ": " +
+ content[0]);
+ } catch (Exception ex) {
+ System.err.println(cmd.substring(1) + ": " +
+ ex.getLocalizedMessage());
+ }
} catch (IOException e ) {
- System.err.println( cmd.substring(1) + ": " + e.getLocalizedMessage() );
+ //
+ // IO exception encountered locally.
+ //
+ exitCode = -1;
+ System.err.println(cmd.substring(1) + ": " +
+ e.getLocalizedMessage());
} finally {
fs.close();
}
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java?view=diff&rev=465354&r1=465353&r2=465354
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java Wed Oct 18 12:57:23 2006
@@ -190,8 +190,9 @@
}
// check whether the parent already has a node with that name
String name = newNode.name = target.getName();
- if( parentNode.getChild( name ) != null )
+ if( parentNode.getChild( name ) != null ) {
return null;
+ }
// insert into the parent children list
parentNode.children.put(name, newNode);
newNode.parent = parentNode;
@@ -700,7 +701,13 @@
NameNode.stateChangeLog.debug("DIR* FSDirectory.mkdirs: "
+"created directory "+cur );
fsImage.getEditLog().logMkDir( inserted );
- } // otherwise cur exists, continue
+ } else { // otherwise cur exists, verify that it is a directory
+ if (!isDir(new UTF8(cur))) {
+ NameNode.stateChangeLog.debug("DIR* FSDirectory.mkdirs: "
+ +"path " + cur + " is not a directory ");
+ return false;
+ }
+ }
} catch (FileNotFoundException e ) {
NameNode.stateChangeLog.debug("DIR* FSDirectory.mkdirs: "
+"failed to create directory "+src);
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java?view=diff&rev=465354&r1=465353&r2=465354
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java Wed Oct 18 12:57:23 2006
@@ -833,13 +833,18 @@
* Create all the necessary directories
*/
public boolean mkdirs( String src ) throws IOException {
+ boolean success;
NameNode.stateChangeLog.debug("DIR* NameSystem.mkdirs: " + src );
if( isInSafeMode() )
throw new SafeModeException( "Cannot create directory " + src, safeMode );
if (!isValidName(src)) {
throw new IOException("Invalid directory name: " + src);
}
- return dir.mkdirs(src);
+ success = dir.mkdirs(src);
+ if (!success) {
+ throw new IOException("Invalid directory name: " + src);
+ }
+ return success;
}
/**
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java?view=diff&rev=465354&r1=465353&r2=465354
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java Wed Oct 18 12:57:23 2006
@@ -78,6 +78,8 @@
} finally {
in.close();
}
+ } else {
+ throw new IOException(src.toString() + ": No such file or directory");
}
if (deleteSource) {
return srcFS.delete(src);
Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSMkdirs.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSMkdirs.java?view=diff&rev=465354&r1=465353&r2=465354
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSMkdirs.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSMkdirs.java Wed Oct 18 12:57:23 2006
@@ -41,7 +41,13 @@
// Third, use mkdir to create a subdirectory off of that file,
// and check that it fails.
Path myIllegalPath = new Path("/test/mkdirs/myFile/subdir");
- assertFalse(fileSys.mkdirs(myIllegalPath));
+ Boolean exist = true;
+ try {
+ fileSys.mkdirs(myIllegalPath);
+ } catch (IOException e) {
+ exist = false;
+ }
+ assertFalse(exist);
assertFalse(fileSys.exists(myIllegalPath));
fileSys.delete(myFile);