You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by je...@apache.org on 2012/07/31 23:26:08 UTC

svn commit: r1367789 - in /hadoop/common/trunk/hadoop-mapreduce-project: ./ hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ src/test/mapred/org/apache/hadoop/mapred/

Author: jeagles
Date: Tue Jul 31 21:26:08 2012
New Revision: 1367789

URL: http://svn.apache.org/viewvc?rev=1367789&view=rev
Log:
MAPREDUCE-4234. SortValidator.java is incompatible with multi-user or parallel use (due to a /tmp file with static name) (Robert Evans via jeagles)

Modified:
    hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/SortValidator.java
    hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/SortValidator.java

Modified: hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt?rev=1367789&r1=1367788&r2=1367789&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt Tue Jul 31 21:26:08 2012
@@ -785,6 +785,10 @@ Release 0.23.3 - UNRELEASED
     MAPREDUCE-4457. mr job invalid transition TA_TOO_MANY_FETCH_FAILURE at 
     FAILED  (Robert Evans via tgraves)
 
+    MAPREDUCE-4234. SortValidator.java is incompatible with multi-user or
+    parallel use (due to a /tmp file with static name) (Robert Evans via
+    jeagles)
+
 Release 0.23.2 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/SortValidator.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/SortValidator.java?rev=1367789&r1=1367788&r2=1367789&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/SortValidator.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/SortValidator.java Tue Jul 31 21:26:08 2012
@@ -33,7 +33,6 @@ import org.apache.hadoop.io.WritableComp
 import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.mapred.lib.HashPartitioner;
-import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.fs.*;
@@ -345,7 +344,8 @@ public class SortValidator extends Confi
 
       FileInputFormat.setInputPaths(jobConf, sortInput);
       FileInputFormat.addInputPath(jobConf, sortOutput);
-      Path outputPath = new Path("/tmp/sortvalidate/recordstatschecker");
+      Path outputPath = new Path(new Path(jobConf.get("hadoop.tmp.dir", "/tmp"),
+           "sortvalidate"), UUID.randomUUID().toString());
       if (defaultfs.exists(outputPath)) {
         defaultfs.delete(outputPath, true);
       }
@@ -365,31 +365,44 @@ public class SortValidator extends Confi
       Date startTime = new Date();
       System.out.println("Job started: " + startTime);
       JobClient.runJob(jobConf);
-      Date end_time = new Date();
-      System.out.println("Job ended: " + end_time);
-      System.out.println("The job took " + 
-                         (end_time.getTime() - startTime.getTime()) /1000 + " seconds.");
-      
-      // Check to ensure that the statistics of the 
-      // framework's sort-input and sort-output match
-      SequenceFile.Reader stats = new SequenceFile.Reader(defaultfs,
-                                                          new Path(outputPath, "part-00000"), defaults);
-      IntWritable k1 = new IntWritable();
-      IntWritable k2 = new IntWritable();
-      RecordStatsWritable v1 = new RecordStatsWritable();
-      RecordStatsWritable v2 = new RecordStatsWritable();
-      if (!stats.next(k1, v1)) {
-        throw new IOException("Failed to read record #1 from reduce's output");
-      }
-      if (!stats.next(k2, v2)) {
-        throw new IOException("Failed to read record #2 from reduce's output");
-      }
-
-      if ((v1.getBytes() != v2.getBytes()) || (v1.getRecords() != v2.getRecords()) || 
-          v1.getChecksum() != v2.getChecksum()) {
-        throw new IOException("(" + 
-                              v1.getBytes() + ", " + v1.getRecords() + ", " + v1.getChecksum() + ") v/s (" +
-                              v2.getBytes() + ", " + v2.getRecords() + ", " + v2.getChecksum() + ")");
+      try {
+        Date end_time = new Date();
+        System.out.println("Job ended: " + end_time);
+        System.out.println("The job took " + 
+            (end_time.getTime() - startTime.getTime()) /1000 + " seconds.");
+
+        // Check to ensure that the statistics of the 
+        // framework's sort-input and sort-output match
+        SequenceFile.Reader stats = new SequenceFile.Reader(defaultfs,
+            new Path(outputPath, "part-00000"), defaults);
+        try {
+          IntWritable k1 = new IntWritable();
+          IntWritable k2 = new IntWritable();
+          RecordStatsWritable v1 = new RecordStatsWritable();
+          RecordStatsWritable v2 = new RecordStatsWritable();
+          if (!stats.next(k1, v1)) {
+            throw new IOException(
+                "Failed to read record #1 from reduce's output");
+          }
+          if (!stats.next(k2, v2)) {
+            throw new IOException(
+                "Failed to read record #2 from reduce's output");
+          }
+
+          if ((v1.getBytes() != v2.getBytes()) || 
+              (v1.getRecords() != v2.getRecords()) || 
+              v1.getChecksum() != v2.getChecksum()) {
+            throw new IOException("(" + 
+                v1.getBytes() + ", " + v1.getRecords() + ", " + v1.getChecksum()
+                + ") v/s (" +
+                v2.getBytes() + ", " + v2.getRecords() + ", " + v2.getChecksum()
+                + ")");
+          }
+        } finally {
+          stats.close();
+        }
+      } finally {
+        defaultfs.delete(outputPath, true);
       }
     }
 

Modified: hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/SortValidator.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/SortValidator.java?rev=1367789&r1=1367788&r2=1367789&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/SortValidator.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/SortValidator.java Tue Jul 31 21:26:08 2012
@@ -33,7 +33,6 @@ import org.apache.hadoop.io.WritableComp
 import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.mapred.lib.HashPartitioner;
-import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.fs.*;
@@ -345,7 +344,8 @@ public class SortValidator extends Confi
 
       FileInputFormat.setInputPaths(jobConf, sortInput);
       FileInputFormat.addInputPath(jobConf, sortOutput);
-      Path outputPath = new Path("/tmp/sortvalidate/recordstatschecker");
+      Path outputPath = new Path(new Path(jobConf.get("hadoop.tmp.dir", "/tmp"),
+           "sortvalidate"), UUID.randomUUID().toString());
       if (defaultfs.exists(outputPath)) {
         defaultfs.delete(outputPath, true);
       }
@@ -365,31 +365,44 @@ public class SortValidator extends Confi
       Date startTime = new Date();
       System.out.println("Job started: " + startTime);
       JobClient.runJob(jobConf);
-      Date end_time = new Date();
-      System.out.println("Job ended: " + end_time);
-      System.out.println("The job took " + 
-                         (end_time.getTime() - startTime.getTime()) /1000 + " seconds.");
-      
-      // Check to ensure that the statistics of the 
-      // framework's sort-input and sort-output match
-      SequenceFile.Reader stats = new SequenceFile.Reader(defaultfs,
-                                                          new Path(outputPath, "part-00000"), defaults);
-      IntWritable k1 = new IntWritable();
-      IntWritable k2 = new IntWritable();
-      RecordStatsWritable v1 = new RecordStatsWritable();
-      RecordStatsWritable v2 = new RecordStatsWritable();
-      if (!stats.next(k1, v1)) {
-        throw new IOException("Failed to read record #1 from reduce's output");
-      }
-      if (!stats.next(k2, v2)) {
-        throw new IOException("Failed to read record #2 from reduce's output");
-      }
-
-      if ((v1.getBytes() != v2.getBytes()) || (v1.getRecords() != v2.getRecords()) || 
-          v1.getChecksum() != v2.getChecksum()) {
-        throw new IOException("(" + 
-                              v1.getBytes() + ", " + v1.getRecords() + ", " + v1.getChecksum() + ") v/s (" +
-                              v2.getBytes() + ", " + v2.getRecords() + ", " + v2.getChecksum() + ")");
+      try {
+        Date end_time = new Date();
+        System.out.println("Job ended: " + end_time);
+        System.out.println("The job took " + 
+            (end_time.getTime() - startTime.getTime()) /1000 + " seconds.");
+
+        // Check to ensure that the statistics of the 
+        // framework's sort-input and sort-output match
+        SequenceFile.Reader stats = new SequenceFile.Reader(defaultfs,
+            new Path(outputPath, "part-00000"), defaults);
+        try {
+          IntWritable k1 = new IntWritable();
+          IntWritable k2 = new IntWritable();
+          RecordStatsWritable v1 = new RecordStatsWritable();
+          RecordStatsWritable v2 = new RecordStatsWritable();
+          if (!stats.next(k1, v1)) {
+            throw new IOException(
+                "Failed to read record #1 from reduce's output");
+          }
+          if (!stats.next(k2, v2)) {
+            throw new IOException(
+                "Failed to read record #2 from reduce's output");
+          }
+
+          if ((v1.getBytes() != v2.getBytes()) || 
+              (v1.getRecords() != v2.getRecords()) || 
+              v1.getChecksum() != v2.getChecksum()) {
+            throw new IOException("(" + 
+                v1.getBytes() + ", " + v1.getRecords() + ", " + v1.getChecksum()
+                + ") v/s (" +
+                v2.getBytes() + ", " + v2.getRecords() + ", " + v2.getChecksum()
+                + ")");
+          }
+        } finally {
+          stats.close();
+        }
+      } finally {
+        defaultfs.delete(outputPath, true);
       }
     }