You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2006/08/02 20:11:32 UTC

svn commit: r428076 - in /lucene/hadoop/trunk: CHANGES.txt src/test/org/apache/hadoop/dfs/TestDFSShellGenericOptions.java

Author: cutting
Date: Wed Aug  2 11:11:31 2006
New Revision: 428076

URL: http://svn.apache.org/viewvc?rev=428076&view=rev
Log:
HADOOP-411.  Add unit tests for command line parser.  Contributed by Hairong.

Added:
    lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSShellGenericOptions.java
Modified:
    lucene/hadoop/trunk/CHANGES.txt

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?rev=428076&r1=428075&r2=428076&view=diff
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Wed Aug  2 11:11:31 2006
@@ -131,6 +131,9 @@
 37. HADOOP-410.  Replace some TreeMaps with HashMaps in DFS, for
     a 17% performance improvement. (Milind Bhandarkar via cutting)
 
+38. HADOOP-411.  Add unit tests for command line parser.
+    (Hairong Kuang via cutting)
+
 
 Release 0.4.0 - 2006-06-28
 

Added: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSShellGenericOptions.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSShellGenericOptions.java?rev=428076&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSShellGenericOptions.java (added)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSShellGenericOptions.java Wed Aug  2 11:11:31 2006
@@ -0,0 +1,105 @@
+package org.apache.hadoop.dfs;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.PrintWriter;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.dfs.DFSShell;
+import org.apache.hadoop.dfs.DataNode;
+import org.apache.hadoop.dfs.MiniDFSCluster;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+public class TestDFSShellGenericOptions extends TestCase {
+
+    public void testDFSCommand() throws IOException {
+        String namenode = null;
+        MiniDFSCluster cluster = null;
+        try {
+          Configuration conf = new Configuration();
+          cluster = new MiniDFSCluster(65316, conf, true);
+          namenode = conf.get("fs.default.name", "local");
+          String [] args = new String[4];
+          args[2] = "-mkdir";
+          args[3] = "/data";
+          testFsOption(args, namenode);
+          testConfOption(args, namenode);
+          testPropertyOption(args, namenode);
+        } finally {
+          if (cluster != null) { cluster.shutdown(); }
+        }
+      }
+
+    private void testFsOption(String [] args, String namenode) {        
+        // prepare arguments to create a directory /data
+        args[0] = "-fs";
+        args[1] = namenode;
+        execute(args, namenode);
+    }
+    
+    private void testConfOption(String[] args, String namenode) {
+        // prepare configuration hadoop-site.xml
+        File configDir = new File("conf", "minidfs");
+        configDir.mkdirs();
+        File siteFile = new File(configDir, "hadoop-site.xml");
+        PrintWriter pw;
+        try {
+            pw = new PrintWriter(siteFile);
+            pw.print("<?xml version=\"1.0\"?>\n"+
+                    "<?xml-stylesheet type=\"text/xsl\" href=\"configuration.xsl\"?>\n"+
+                    "<configuration>\n"+
+                    " <property>\n"+
+                    "   <name>fs.default.name</name>\n"+
+                    "   <value>"+namenode+"</value>\n"+
+                    " </property>\n"+
+                    "</configuration>\n");
+            pw.close();
+    
+            // prepare arguments to create a directory /data
+            args[0] = "-conf";
+            args[1] = "conf/minidfs/hadoop-site.xml";
+            execute(args, namenode); 
+        } catch (FileNotFoundException e) {
+            // TODO Auto-generated catch block
+            e.printStackTrace();
+        }
+        
+        configDir.delete();
+    }
+    
+    private void testPropertyOption(String[] args, String namenode) {
+        // prepare arguments to create a directory /data
+        args[0] = "-D";
+        args[1] = "fs.default.name="+namenode;
+        execute(args, namenode);        
+    }
+    
+    private void execute( String [] args, String namenode ) {
+        DFSShell shell=new DFSShell();
+        FileSystem fs=null;
+        try {
+            shell.doMain(new Configuration(), args);
+            fs = new DistributedFileSystem(
+                    DataNode.createSocketAddr(namenode), 
+                    shell.getConf());
+            assertTrue( "Directory does not get created", 
+                    fs.isDirectory(new Path("/data")) );
+            fs.delete(new Path("/data"));
+        } catch (Exception e) {
+            System.err.println(e.getMessage());
+            e.printStackTrace();
+        } finally {
+            if( fs!=null ) {
+                try {
+                  fs.close();
+                } catch (IOException ignored) {
+                }
+            }
+        }
+    }
+
+}