You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by ar...@apache.org on 2013/12/04 01:45:40 UTC

svn commit: r1547658 - in /hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project: ./ hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/tes...

Author: arp
Date: Wed Dec  4 00:45:38 2013
New Revision: 1547658

URL: http://svn.apache.org/r1547658
Log:
Merging r1547474 through r1547657 from trunk to branch HDFS-2832

Modified:
    hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/   (props changed)
    hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java
    hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java

Propchange: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project:r1547474-1547657

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/CHANGES.txt?rev=1547658&r1=1547657&r2=1547658&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/CHANGES.txt Wed Dec  4 00:45:38 2013
@@ -226,6 +226,9 @@ Release 2.4.0 - UNRELEASED
     MAPREDUCE-5631. TestJobEndNotifier.testNotifyRetries fails with Should
     have taken more than 5 seconds in jdk7 (Jonathan Eagles via jlowe)
 
+    MAPREDUCE-5645. TestFixedLengthInputFormat fails with native libs (Mit
+    Desai via jeagles)
+
 Release 2.3.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt:r1547474-1547657

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java?rev=1547658&r1=1547657&r2=1547658&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java Wed Dec  4 00:45:38 2013
@@ -197,17 +197,17 @@ public class TestFixedLengthInputFormat 
   public void testGzipWithTwoInputs() throws IOException {
     CompressionCodec gzip = new GzipCodec();
     localFs.delete(workDir, true);
-    // Create files with fixed length records with 5 byte long records.
-    writeFile(localFs, new Path(workDir, "part1.txt.gz"), gzip, 
-        "one  two  threefour five six  seveneightnine ten  ");
-    writeFile(localFs, new Path(workDir, "part2.txt.gz"), gzip,
-        "ten  nine eightsevensix  five four threetwo  one  ");
     FixedLengthInputFormat format = new FixedLengthInputFormat();
     JobConf job = new JobConf(defaultConf);
     format.setRecordLength(job, 5);
     FileInputFormat.setInputPaths(job, workDir);
     ReflectionUtils.setConf(gzip, job);
     format.configure(job);
+    // Create files with fixed length records with 5 byte long records.
+    writeFile(localFs, new Path(workDir, "part1.txt.gz"), gzip, 
+        "one  two  threefour five six  seveneightnine ten  ");
+    writeFile(localFs, new Path(workDir, "part2.txt.gz"), gzip,
+        "ten  nine eightsevensix  five four threetwo  one  ");
     InputSplit[] splits = format.getSplits(job, 100);
     assertEquals("compressed splits == 2", 2, splits.length);
     FileSplit tmp = (FileSplit) splits[0];
@@ -283,12 +283,16 @@ public class TestFixedLengthInputFormat 
       int fileSize = (totalRecords * recordLength);
       LOG.info("totalRecords=" + totalRecords + " recordLength="
           + recordLength);
+      // Create the job 
+      JobConf job = new JobConf(defaultConf);
+      if (codec != null) {
+        ReflectionUtils.setConf(codec, job);
+      }
       // Create the test file
       ArrayList<String> recordList
           = createFile(file, codec, recordLength, totalRecords);
       assertTrue(localFs.exists(file));
-      // Create the job and set the fixed length record length config property 
-      JobConf job = new JobConf(defaultConf);
+      //set the fixed length record length config property for the job
       FixedLengthInputFormat.setRecordLength(job, recordLength);
 
       int numSplits = 1;
@@ -383,8 +387,6 @@ public class TestFixedLengthInputFormat 
     if (codec != null) {
       fileName.append(".gz");
     }
-    writeFile(localFs, new Path(workDir, fileName.toString()), codec,
-        "one  two  threefour five six  seveneightnine ten");
     FixedLengthInputFormat format = new FixedLengthInputFormat();
     JobConf job = new JobConf(defaultConf);
     format.setRecordLength(job, 5);
@@ -393,6 +395,8 @@ public class TestFixedLengthInputFormat 
       ReflectionUtils.setConf(codec, job);
     }
     format.configure(job);
+    writeFile(localFs, new Path(workDir, fileName.toString()), codec,
+            "one  two  threefour five six  seveneightnine ten");
     InputSplit[] splits = format.getSplits(job, 100);
     if (codec != null) {
       assertEquals("compressed splits == 1", 1, splits.length);

Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java?rev=1547658&r1=1547657&r2=1547658&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java Wed Dec  4 00:45:38 2013
@@ -225,16 +225,16 @@ public class TestFixedLengthInputFormat 
   public void testGzipWithTwoInputs() throws Exception {
     CompressionCodec gzip = new GzipCodec();
     localFs.delete(workDir, true);
-    // Create files with fixed length records with 5 byte long records.
-    writeFile(localFs, new Path(workDir, "part1.txt.gz"), gzip, 
-        "one  two  threefour five six  seveneightnine ten  ");
-    writeFile(localFs, new Path(workDir, "part2.txt.gz"), gzip,
-        "ten  nine eightsevensix  five four threetwo  one  ");
     Job job = Job.getInstance(defaultConf);
     FixedLengthInputFormat format = new FixedLengthInputFormat();
     format.setRecordLength(job.getConfiguration(), 5);
     ReflectionUtils.setConf(gzip, job.getConfiguration());
     FileInputFormat.setInputPaths(job, workDir);
+    // Create files with fixed length records with 5 byte long records.
+    writeFile(localFs, new Path(workDir, "part1.txt.gz"), gzip, 
+        "one  two  threefour five six  seveneightnine ten  ");
+    writeFile(localFs, new Path(workDir, "part2.txt.gz"), gzip,
+        "ten  nine eightsevensix  five four threetwo  one  ");
     List<InputSplit> splits = format.getSplits(job);
     assertEquals("compressed splits == 2", 2, splits.size());
     FileSplit tmp = (FileSplit) splits.get(0);
@@ -310,12 +310,16 @@ public class TestFixedLengthInputFormat 
       int fileSize = (totalRecords * recordLength);
       LOG.info("totalRecords=" + totalRecords + " recordLength="
           + recordLength);
+      // Create the job 
+      Job job = Job.getInstance(defaultConf);
+      if (codec != null) {
+        ReflectionUtils.setConf(codec, job.getConfiguration());
+      }
       // Create the test file
       ArrayList<String> recordList =
           createFile(file, codec, recordLength, totalRecords);
       assertTrue(localFs.exists(file));
-      // Create the job and set the fixed length record length config property 
-      Job job = Job.getInstance(defaultConf);
+      //set the fixed length record length config property for the job
       FixedLengthInputFormat.setRecordLength(job.getConfiguration(),
           recordLength);