You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by to...@apache.org on 2012/10/11 08:14:38 UTC

svn commit: r1396918 - in /hadoop/common/branches/HDFS-3077: ./ dev-support/ hadoop-project/ hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/

Author: todd
Date: Thu Oct 11 06:14:26 2012
New Revision: 1396918

URL: http://svn.apache.org/viewvc?rev=1396918&view=rev
Log:
Merge trunk into branch

Removed:
    hadoop/common/branches/HDFS-3077/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCp.java
Modified:
    hadoop/common/branches/HDFS-3077/   (props changed)
    hadoop/common/branches/HDFS-3077/dev-support/test-patch.sh
    hadoop/common/branches/HDFS-3077/hadoop-project/pom.xml
    hadoop/common/branches/HDFS-3077/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestIntegration.java

Propchange: hadoop/common/branches/HDFS-3077/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk:r1390199-1396916

Modified: hadoop/common/branches/HDFS-3077/dev-support/test-patch.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/dev-support/test-patch.sh?rev=1396918&r1=1396917&r2=1396918&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/dev-support/test-patch.sh (original)
+++ hadoop/common/branches/HDFS-3077/dev-support/test-patch.sh Thu Oct 11 06:14:26 2012
@@ -335,7 +335,7 @@ checkTests () {
         echo "The patch appears to be a documentation patch that doesn't require tests."
         JIRA_COMMENT="$JIRA_COMMENT
 
-    +0 tests included.  The patch appears to be a documentation patch that doesn't require tests."
+    {color:green}+0 tests included{color}.  The patch appears to be a documentation patch that doesn't require tests."
         return 0
       fi
     fi
@@ -681,12 +681,46 @@ runTests () {
 
   failed_tests=""
   modules=$(findModules)
-  for module in $modules;
-  do
+  #
+  # If we are building hadoop-hdfs-project, we must build the native component
+  # of hadoop-common-project first.  In order to accomplish this, we move the
+  # hadoop-hdfs subprojects to the end of the list so that common will come
+  # first.
+  #
+  # Of course, we may not be building hadoop-common at all-- in this case, we
+  # explicitly insert a mvn compile -Pnative of common, to ensure that the
+  # native libraries show up where we need them.
+  #
+  building_common=0
+  for module in $modules; do
+      if [[ $module == hadoop-hdfs-project* ]]; then
+          hdfs_modules="$hdfs_modules $module"
+      elif [[ $module == hadoop-common-project* ]]; then
+          ordered_modules="$ordered_modules $module"
+          building_common=1
+      else
+          ordered_modules="$ordered_modules $module"
+      fi
+  done
+  if [ -n $hdfs_modules ]; then
+      ordered_modules="$ordered_modules $hdfs_modules"
+      if [[ $building_common -eq 0 ]]; then
+          echo "  Building hadoop-common with -Pnative in order to provide \
+libhadoop.so to the hadoop-hdfs unit tests."
+          echo "  $MVN compile -Pnative -D${PROJECT_NAME}PatchProcess"
+          if ! $MVN compile -Pnative -D${PROJECT_NAME}PatchProcess; then
+              JIRA_COMMENT="$JIRA_COMMENT
+        {color:red}-1 core tests{color}.  Failed to build the native portion \
+of hadoop-common prior to running the unit tests in $ordered_modules"
+              return 1
+          fi
+      fi
+  fi
+  for module in $ordered_modules; do
     cd $module
     echo "  Running tests in $module"
     echo "  $MVN clean install -fn -Pnative -D${PROJECT_NAME}PatchProcess"
-    $MVN clean install -fn -Pnative -D${PROJECT_NAME}PatchProcess
+    $MVN clean install -fn -Pnative -Drequire.test.libhadoop -D${PROJECT_NAME}PatchProcess
     module_failed_tests=`find . -name 'TEST*.xml' | xargs $GREP  -l -E "<failure|<error" | sed -e "s|.*target/surefire-reports/TEST-|                  |g" | sed -e "s|\.xml||g"`
     # With -fn mvn always exits with a 0 exit code.  Because of this we need to
     # find the errors instead of using the exit code.  We assume that if the build
@@ -914,6 +948,7 @@ if [[ $RESULT != 0 ]] ; then
 fi
 buildWithPatch
 checkAuthor
+(( RESULT = RESULT + $? ))
 
 if [[ $JENKINS == "true" ]] ; then
   cleanUpXml

Modified: hadoop/common/branches/HDFS-3077/hadoop-project/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-project/pom.xml?rev=1396918&r1=1396917&r2=1396918&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-project/pom.xml (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-project/pom.xml Thu Oct 11 06:14:26 2012
@@ -698,7 +698,7 @@
         <plugin>
           <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-surefire-plugin</artifactId>
-          <version>2.12</version>
+          <version>2.12.3</version>
         </plugin>
         <plugin>
           <groupId>org.apache.maven.plugins</groupId>
@@ -834,9 +834,9 @@
         <configuration>
           <forkMode>always</forkMode>
           <forkedProcessTimeoutInSeconds>900</forkedProcessTimeoutInSeconds>
-          <argLine>-Xmx1024m</argLine>
+          <argLine>-Xmx1024m -XX:+HeapDumpOnOutOfMemoryError</argLine>
           <environmentVariables>
-            <LD_LIBRARY_PATH>${env.LD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib</LD_LIBRARY_PATH>
+            <LD_LIBRARY_PATH>${env.LD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib:${basedir}/../../hadoop-common-project/hadoop-common/target/native/target/usr/local/lib/</LD_LIBRARY_PATH>
             <MALLOC_ARENA_MAX>4</MALLOC_ARENA_MAX>
           </environmentVariables>
           <systemPropertyVariables>
@@ -854,6 +854,7 @@
             <java.net.preferIPv4Stack>true</java.net.preferIPv4Stack>
             <java.security.krb5.conf>${basedir}/src/test/resources/krb5.conf</java.security.krb5.conf>
             <java.security.egd>file:///dev/urandom</java.security.egd>
+            <require.test.libhadoop>${require.test.libhadoop}</require.test.libhadoop>
           </systemPropertyVariables>
           <includes>
             <include>**/Test*.java</include>

Modified: hadoop/common/branches/HDFS-3077/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestIntegration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestIntegration.java?rev=1396918&r1=1396917&r2=1396918&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestIntegration.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestIntegration.java Thu Oct 11 06:14:26 2012
@@ -21,8 +21,11 @@ package org.apache.hadoop.tools;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.Cluster;
+import org.apache.hadoop.mapreduce.JobSubmissionFiles;
 import org.apache.hadoop.tools.util.TestDistCpUtils;
 import org.junit.Assert;
 import org.junit.BeforeClass;
@@ -30,6 +33,8 @@ import org.junit.Test;
 
 import java.io.IOException;
 import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.List;
 
 public class TestIntegration {
   private static final Log LOG = LogFactory.getLog(TestIntegration.class);
@@ -317,6 +322,58 @@ public class TestIntegration {
       TestDistCpUtils.delete(fs, root);
     }
   }
+  
+  @Test
+  public void testDeleteMissingInDestination() {
+    
+    try {
+      addEntries(listFile, "srcdir");
+      createFiles("srcdir/file1", "dstdir/file1", "dstdir/file2");
+      
+      Path target = new Path(root + "/dstdir");
+      runTest(listFile, target, true, true, false);
+      
+      checkResult(target, 1, "file1");
+    } catch (IOException e) {
+      LOG.error("Exception encountered while running distcp", e);
+      Assert.fail("distcp failure");
+    } finally {
+      TestDistCpUtils.delete(fs, root);
+      TestDistCpUtils.delete(fs, "target/tmp1");
+    }
+  }
+  
+  @Test
+  public void testOverwrite() {
+    byte[] contents1 = "contents1".getBytes();
+    byte[] contents2 = "contents2".getBytes();
+    Assert.assertEquals(contents1.length, contents2.length);
+    
+    try {
+      addEntries(listFile, "srcdir");
+      createWithContents("srcdir/file1", contents1);
+      createWithContents("dstdir/file1", contents2);
+      
+      Path target = new Path(root + "/dstdir");
+      runTest(listFile, target, false, false, true);
+      
+      checkResult(target, 1, "file1");
+      
+      // make sure dstdir/file1 has been overwritten with the contents
+      // of srcdir/file1
+      FSDataInputStream is = fs.open(new Path(root + "/dstdir/file1"));
+      byte[] dstContents = new byte[contents1.length];
+      is.readFully(dstContents);
+      is.close();
+      Assert.assertArrayEquals(contents1, dstContents);
+    } catch (IOException e) {
+      LOG.error("Exception encountered while running distcp", e);
+      Assert.fail("distcp failure");
+    } finally {
+      TestDistCpUtils.delete(fs, root);
+      TestDistCpUtils.delete(fs, "target/tmp1");
+    }
+  }
 
   @Test
   public void testGlobTargetMissingSingleLevel() {
@@ -410,7 +467,33 @@ public class TestIntegration {
       TestDistCpUtils.delete(fs, "target/tmp1");
     }
   }
+  
+  @Test
+  public void testCleanup() {
+    try {
+      Path sourcePath = new Path("noscheme:///file");
+      List<Path> sources = new ArrayList<Path>();
+      sources.add(sourcePath);
+
+      DistCpOptions options = new DistCpOptions(sources, target);
+
+      Configuration conf = getConf();
+      Path stagingDir = JobSubmissionFiles.getStagingDir(
+              new Cluster(conf), conf);
+      stagingDir.getFileSystem(conf).mkdirs(stagingDir);
 
+      try {
+        new DistCp(conf, options).execute();
+      } catch (Throwable t) {
+        Assert.assertEquals(stagingDir.getFileSystem(conf).
+            listStatus(stagingDir).length, 0);
+      }
+    } catch (Exception e) {
+      LOG.error("Exception encountered ", e);
+      Assert.fail("testCleanup failed " + e.getMessage());
+    }
+  }
+  
   private void addEntries(Path listFile, String... entries) throws IOException {
     OutputStream out = fs.create(listFile);
     try {
@@ -434,16 +517,32 @@ public class TestIntegration {
       }
     }
   }
+  
+  private void createWithContents(String entry, byte[] contents) throws IOException {
+    OutputStream out = fs.create(new Path(root + "/" + entry));
+    try {
+      out.write(contents);
+    } finally {
+      out.close();
+    }
+  }
 
   private void mkdirs(String... entries) throws IOException {
     for (String entry : entries){
       fs.mkdirs(new Path(entry));
     }
   }
-
+    
   private void runTest(Path listFile, Path target, boolean sync) throws IOException {
+    runTest(listFile, target, sync, false, false);
+  }
+  
+  private void runTest(Path listFile, Path target, boolean sync, boolean delete,
+      boolean overwrite) throws IOException {
     DistCpOptions options = new DistCpOptions(listFile, target);
     options.setSyncFolder(sync);
+    options.setDeleteMissing(delete);
+    options.setOverwrite(overwrite);
     try {
       new DistCp(getConf(), options).execute();
     } catch (Exception e) {