You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sh...@apache.org on 2008/10/13 20:53:00 UTC

svn commit: r704202 - in /hadoop/core/branches/branch-0.19: ./ src/core/org/apache/hadoop/fs/ src/test/org/apache/hadoop/cli/ src/test/org/apache/hadoop/cli/util/ src/test/org/apache/hadoop/hdfs/

Author: shv
Date: Mon Oct 13 11:52:59 2008
New Revision: 704202

URL: http://svn.apache.org/viewvc?rev=704202&view=rev
Log:
HADOOP-4014. Merge -r 704194:704195 from trunk to branch 0.19.

Modified:
    hadoop/core/branches/branch-0.19/CHANGES.txt
    hadoop/core/branches/branch-0.19/src/core/org/apache/hadoop/fs/FileUtil.java
    hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/cli/TestCLI.java
    hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/cli/testConf.xml
    hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/cli/util/CommandExecutor.java
    hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/hdfs/MiniDFSCluster.java

Modified: hadoop/core/branches/branch-0.19/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/CHANGES.txt?rev=704202&r1=704201&r2=704202&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/CHANGES.txt (original)
+++ hadoop/core/branches/branch-0.19/CHANGES.txt Mon Oct 13 11:52:59 2008
@@ -833,6 +833,8 @@
     HADOOP-3883. Limit namenode to assign at most one generation stamp for
     a particular block within a short period. (szetszwo)
 
+    HADOOP-4014. Create hard links with 'fsutil hardlink' on Windows. (shv)
+
 Release 0.18.2 - Unreleased
 
   BUG FIXES

Modified: hadoop/core/branches/branch-0.19/src/core/org/apache/hadoop/fs/FileUtil.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/core/org/apache/hadoop/fs/FileUtil.java?rev=704202&r1=704201&r2=704202&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/core/org/apache/hadoop/fs/FileUtil.java (original)
+++ hadoop/core/branches/branch-0.19/src/core/org/apache/hadoop/fs/FileUtil.java Mon Oct 13 11:52:59 2008
@@ -189,7 +189,7 @@
                              Configuration conf) throws IOException {
     dst = checkDest(src.getName(), dstFS, dst, overwrite);
 
-    if (srcFS.isDirectory(src)) {
+    if (srcFS.getFileStatus(src).isDir()) {
       checkDependencies(srcFS, src, dstFS, dst);
       if (!dstFS.mkdirs(dst)) {
         return false;
@@ -230,7 +230,7 @@
                                   Configuration conf, String addString) throws IOException {
     dstFile = checkDest(srcDir.getName(), dstFS, dstFile, false);
 
-    if (!srcFS.isDirectory(srcDir))
+    if (!srcFS.getFileStatus(srcDir).isDir())
       return false;
    
     OutputStream out = dstFS.create(dstFile);
@@ -305,7 +305,7 @@
   public static boolean copy(FileSystem srcFS, Path src, 
                              File dst, boolean deleteSource,
                              Configuration conf) throws IOException {
-    if (srcFS.isDirectory(src)) {
+    if (srcFS.getFileStatus(src).isDir()) {
       if (!dst.mkdirs()) {
         return false;
       }
@@ -392,7 +392,7 @@
    * @throws IOException on windows, there can be problems with the subprocess
    */
   public static String makeShellPath(File file) throws IOException {
-    return makeShellPath(file.toString());
+    return makeShellPath(file.getCanonicalPath());
   }
 
   /**
@@ -427,13 +427,13 @@
    * @throws IOException
    */
   public static void unZip(File inFile, File unzipDir) throws IOException {
-    Enumeration entries;
+    Enumeration<? extends ZipEntry> entries;
     ZipFile zipFile = new ZipFile(inFile);
 
     try {
       entries = zipFile.entries();
       while (entries.hasMoreElements()) {
-        ZipEntry entry = (ZipEntry) entries.nextElement();
+        ZipEntry entry = entries.nextElement();
         if (!entry.isDirectory()) {
           InputStream in = zipFile.getInputStream(entry);
           try {
@@ -523,10 +523,11 @@
   
     private static String[] hardLinkCommand;
     private static String[] getLinkCountCommand;
-    private static String osName = System.getProperty("os.name");
+    private static OSType osType;
     
     static {
-      switch(getOSType()) {
+      osType = getOSType();
+      switch(osType) {
       case OS_TYPE_WINXP:
         hardLinkCommand = new String[] {"fsutil","hardlink","create", null, null};
         getLinkCountCommand = new String[] {"stat","-c%h"};
@@ -547,8 +548,9 @@
     }
 
     static private OSType getOSType() {
+      String osName = System.getProperty("os.name");
       if (osName.indexOf("Windows") >= 0 && 
-          (osName.indexOf("XpP") >= 0 || osName.indexOf("2003") >= 0))
+          (osName.indexOf("XP") >= 0 || osName.indexOf("2003") >= 0 || osName.indexOf("Vista") >= 0))
         return OSType.OS_TYPE_WINXP;
       else if (osName.indexOf("SunOS") >= 0)
          return OSType.OS_TYPE_SOLARIS;
@@ -564,8 +566,13 @@
     public static void createHardLink(File target, 
                                       File linkName) throws IOException {
       int len = hardLinkCommand.length;
-      hardLinkCommand[len-2] = target.getCanonicalPath();
-      hardLinkCommand[len-1] = linkName.getCanonicalPath();
+      if (osType == OSType.OS_TYPE_WINXP) {
+       hardLinkCommand[len-1] = target.getCanonicalPath();
+       hardLinkCommand[len-2] = linkName.getCanonicalPath();
+      } else {
+       hardLinkCommand[len-2] = makeShellPath(target);
+       hardLinkCommand[len-1] = makeShellPath(linkName);
+      }
       // execute shell command
       Process process = Runtime.getRuntime().exec(hardLinkCommand);
       try {

Modified: hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/cli/TestCLI.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/cli/TestCLI.java?rev=704202&r1=704201&r2=704202&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/cli/TestCLI.java (original)
+++ hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/cli/TestCLI.java Mon Oct 13 11:52:59 2008
@@ -20,7 +20,6 @@
 
 import java.io.File;
 import java.util.ArrayList;
-import java.util.Properties;
 
 import javax.xml.parsers.SAXParser;
 import javax.xml.parsers.SAXParserFactory;
@@ -36,14 +35,9 @@
 
 import org.apache.hadoop.cli.util.CommandExecutor;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FsShell;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.util.ToolRunner;
 
 /**
  * Tests for the Command Line Interface (CLI)
@@ -60,6 +54,8 @@
   // This can be useful populate the testConfig.xml file the first time
   // a new command is added
   public static final String TESTMODE_NOCOMPARE = "nocompare";
+  public static final String TEST_CACHE_DATA_DIR =
+    System.getProperty("test.cache.data", "build/test/cache");
   
   //By default, run the tests. The other mode is to run the commands and not
   // compare the output
@@ -86,9 +82,7 @@
     
     if (testsFromConfigFile == null) {
       boolean success = false;
-      Properties props = System.getProperties();
-      testConfigFile = 
-        props.getProperty("test.cache.data") + File.separator + testConfigFile;
+      testConfigFile = TEST_CACHE_DATA_DIR + File.separator + testConfigFile;
       try {
         SAXParser p = (SAXParserFactory.newInstance()).newSAXParser();
         p.parse(testConfigFile, new TestConfigFileParser());
@@ -113,7 +107,7 @@
     conf = new Configuration();
     cluster = new MiniDFSCluster(conf, 1, true, null);
     namenode = conf.get("fs.default.name", "file:///");
-    clitestDataDir = new File(System.getProperty("test.cache.data")).
+    clitestDataDir = new File(TEST_CACHE_DATA_DIR).
       toURI().toString().replace(' ', '+');
     username = System.getProperty("user.name");
 

Modified: hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/cli/testConf.xml
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/cli/testConf.xml?rev=704202&r1=704201&r2=704202&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/cli/testConf.xml (original)
+++ hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/cli/testConf.xml Mon Oct 13 11:52:59 2008
@@ -1185,7 +1185,7 @@
       <comparators>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^cp: /file: No such file or directory</expected-output>
+          <expected-output>^cp: File does not exist: /file</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -1201,7 +1201,7 @@
       <comparators>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^cp: file1: No such file or directory</expected-output>
+          <expected-output>^cp: File does not exist: file1</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -1785,7 +1785,7 @@
       <comparators>
         <comparator>
           <type>TokenComparator</type>
-          <expected-output>put: /user/wrongdata: No such file or directory</expected-output>
+          <expected-output>put: File /user/wrongdata does not exist</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -1801,7 +1801,7 @@
       <comparators>
         <comparator>
           <type>TokenComparator</type>
-          <expected-output>put: wrongdata: No such file or directory</expected-output>
+          <expected-output>put: File wrongdata does not exist</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -2041,7 +2041,7 @@
       <comparators>
         <comparator>
           <type>TokenComparator</type>
-          <expected-output>copyFromLocal: /user/wrongdata: No such file or directory</expected-output>
+          <expected-output>copyFromLocal: File /user/wrongdata does not exist</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -2057,7 +2057,7 @@
       <comparators>
         <comparator>
           <type>TokenComparator</type>
-          <expected-output>copyFromLocal: wrongdata: No such file or directory</expected-output>
+          <expected-output>copyFromLocal: File wrongdata does not exist</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -3108,7 +3108,7 @@
       <comparators>
         <comparator>
           <type>TokenComparator</type>
-          <expected-output>moveFromLocal: /user/wrongdata: No such file or directory</expected-output>
+          <expected-output>moveFromLocal: File /user/wrongdata does not exist</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -3124,7 +3124,7 @@
       <comparators>
         <comparator>
           <type>TokenComparator</type>
-          <expected-output>moveFromLocal: wrongdata: No such file or directory</expected-output>
+          <expected-output>moveFromLocal: File wrongdata does not exist</expected-output>
         </comparator>
       </comparators>
     </test>

Modified: hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/cli/util/CommandExecutor.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/cli/util/CommandExecutor.java?rev=704202&r1=704201&r2=704202&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/cli/util/CommandExecutor.java (original)
+++ hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/cli/util/CommandExecutor.java Mon Oct 13 11:52:59 2008
@@ -20,15 +20,12 @@
 
 import java.io.ByteArrayOutputStream;
 import java.io.File;
-import java.io.InputStreamReader;
-import java.io.BufferedReader;
 import java.io.PrintStream;
 import java.util.StringTokenizer;
 
 import org.apache.hadoop.fs.FsShell;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.hadoop.conf.Configuration;
-
+import org.apache.hadoop.cli.TestCLI;
 
 /**
  *
@@ -51,7 +48,7 @@
 
       args[i] = args[i].replaceAll("NAMENODE", namenode);
       args[i] = args[i].replaceAll("CLITEST_DATA", 
-        new File(System.getProperty("test.cache.data")).
+        new File(TestCLI.TEST_CACHE_DATA_DIR).
         toURI().toString().replace(' ', '+'));
       args[i] = args[i].replaceAll("USERNAME", System.getProperty("user.name"));
 

Modified: hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/hdfs/MiniDFSCluster.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/hdfs/MiniDFSCluster.java?rev=704202&r1=704201&r2=704202&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/hdfs/MiniDFSCluster.java (original)
+++ hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/hdfs/MiniDFSCluster.java Mon Oct 13 11:52:59 2008
@@ -31,9 +31,7 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.net.*;
 import org.apache.hadoop.hdfs.protocol.Block;
-import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.FSConstants.DatanodeReportType;
-import org.apache.hadoop.hdfs.server.common.HdfsConstants;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants.StartupOption;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.datanode.FSDatasetInterface;
@@ -241,7 +239,7 @@
       ioe.initCause(e);
       throw ioe;
     }
-    base_dir = new File(System.getProperty("test.build.data"), "dfs/");
+    base_dir = new File(System.getProperty("test.build.data", "build/test/data"), "dfs/");
     data_dir = new File(base_dir, "data");
     
     // Setup the NameNode configuration
@@ -579,12 +577,13 @@
   boolean corruptBlockOnDataNode(int i, String blockName) throws Exception {
     Random random = new Random();
     boolean corrupted = false;
-    File baseDir = new File(System.getProperty("test.build.data"), "dfs/data");
+    File dataDir = new File(System.getProperty("test.build.data", "build/test/data"), "dfs/data");
     if (i < 0 || i >= dataNodes.size())
       return false;
     for (int dn = i*2; dn < i*2+2; dn++) {
-      File blockFile = new File(baseDir, "data" + (dn+1) + "/current/" +
+      File blockFile = new File(dataDir, "data" + (dn+1) + "/current/" +
                                 blockName);
+      System.out.println("Corrupting for: " + blockFile);
       if (blockFile.exists()) {
         // Corrupt replica by writing random bytes into replica
         RandomAccessFile raFile = new RandomAccessFile(blockFile, "rw");
@@ -717,10 +716,9 @@
     InetSocketAddress addr = new InetSocketAddress("localhost",
                                                    getNameNodePort());
     DFSClient client = new DFSClient(addr, conf);
-    DatanodeInfo[] dnInfos;
 
     // make sure all datanodes are alive
-    while((dnInfos = client.datanodeReport(DatanodeReportType.LIVE)).length
+    while(client.datanodeReport(DatanodeReportType.LIVE).length
         != numDataNodes) {
       try {
         Thread.sleep(500);
@@ -732,7 +730,7 @@
   }
   
   public void formatDataNodeDirs() throws IOException {
-    base_dir = new File(System.getProperty("test.build.data"), "dfs/");
+    base_dir = new File(System.getProperty("test.build.data", "build/test/data"), "dfs/");
     data_dir = new File(base_dir, "data");
     if (data_dir.exists() && !FileUtil.fullyDelete(data_dir)) {
       throw new IOException("Cannot remove data directory: " + data_dir);