You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by je...@apache.org on 2013/11/17 15:29:34 UTC
svn commit: r1542733 - in /hadoop/common/trunk/hadoop-mapreduce-project: ./
hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/
hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/ap...
Author: jeagles
Date: Sun Nov 17 14:29:33 2013
New Revision: 1542733
URL: http://svn.apache.org/r1542733
Log:
MAPREDUCE-5625. TestFixedLengthInputFormat fails in jdk7 environment (Mariappan Asokan via jeagles)
Modified:
hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java
Modified: hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt?rev=1542733&r1=1542732&r2=1542733&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt Sun Nov 17 14:29:33 2013
@@ -220,6 +220,9 @@ Release 2.3.0 - UNRELEASED
MAPREDUCE-5616. MR Client-AppMaster RPC max retries on socket timeout is too
high. (cnauroth)
+ MAPREDUCE-5625. TestFixedLengthInputFormat fails in jdk7 environment
+ (Mariappan Asokan via jeagles)
+
Release 2.2.1 - UNRELEASED
INCOMPATIBLE CHANGES
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java?rev=1542733&r1=1542732&r2=1542733&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java Sun Nov 17 14:29:33 2013
@@ -99,8 +99,7 @@ public class TestFixedLengthInputFormat
Path file = new Path(workDir, new String("testFormat.txt"));
createFile(file, null, 10, 10);
// Set the fixed length record length config property
- Configuration testConf = new Configuration(defaultConf);
- JobConf job = new JobConf(testConf);
+ JobConf job = new JobConf(defaultConf);
FileInputFormat.setInputPaths(job, workDir);
FixedLengthInputFormat format = new FixedLengthInputFormat();
format.configure(job);
@@ -127,8 +126,7 @@ public class TestFixedLengthInputFormat
Path file = new Path(workDir, new String("testFormat.txt"));
createFile(file, null, 10, 10);
// Set the fixed length record length config property
- Configuration testConf = new Configuration(defaultConf);
- JobConf job = new JobConf(testConf);
+ JobConf job = new JobConf(defaultConf);
FileInputFormat.setInputPaths(job, workDir);
FixedLengthInputFormat format = new FixedLengthInputFormat();
format.setRecordLength(job, 0);
@@ -156,8 +154,7 @@ public class TestFixedLengthInputFormat
Path file = new Path(workDir, new String("testFormat.txt"));
createFile(file, null, 10, 10);
// Set the fixed length record length config property
- Configuration testConf = new Configuration(defaultConf);
- JobConf job = new JobConf(testConf);
+ JobConf job = new JobConf(defaultConf);
FileInputFormat.setInputPaths(job, workDir);
FixedLengthInputFormat format = new FixedLengthInputFormat();
format.setRecordLength(job, -10);
@@ -206,8 +203,8 @@ public class TestFixedLengthInputFormat
writeFile(localFs, new Path(workDir, "part2.txt.gz"), gzip,
"ten nine eightsevensix five four threetwo one ");
FixedLengthInputFormat format = new FixedLengthInputFormat();
- format.setRecordLength(defaultConf, 5);
JobConf job = new JobConf(defaultConf);
+ format.setRecordLength(job, 5);
FileInputFormat.setInputPaths(job, workDir);
ReflectionUtils.setConf(gzip, job);
format.configure(job);
@@ -290,9 +287,9 @@ public class TestFixedLengthInputFormat
ArrayList<String> recordList
= createFile(file, codec, recordLength, totalRecords);
assertTrue(localFs.exists(file));
- // Set the fixed length record length config property
- Configuration testConf = new Configuration(defaultConf);
- FixedLengthInputFormat.setRecordLength(testConf, recordLength);
+ // Create the job and set the fixed length record length config property
+ JobConf job = new JobConf(defaultConf);
+ FixedLengthInputFormat.setRecordLength(job, recordLength);
int numSplits = 1;
// Arbitrarily set number of splits.
@@ -313,8 +310,7 @@ public class TestFixedLengthInputFormat
LOG.info("Number of splits set to: " + numSplits);
}
- // Create the job, and setup the input path
- JobConf job = new JobConf(testConf);
+ // Setup the input path
FileInputFormat.setInputPaths(job, workDir);
// Try splitting the file in a variety of sizes
FixedLengthInputFormat format = new FixedLengthInputFormat();
@@ -390,8 +386,8 @@ public class TestFixedLengthInputFormat
writeFile(localFs, new Path(workDir, fileName.toString()), codec,
"one two threefour five six seveneightnine ten");
FixedLengthInputFormat format = new FixedLengthInputFormat();
- format.setRecordLength(defaultConf, 5);
JobConf job = new JobConf(defaultConf);
+ format.setRecordLength(job, 5);
FileInputFormat.setInputPaths(job, workDir);
if (codec != null) {
ReflectionUtils.setConf(codec, job);
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java?rev=1542733&r1=1542732&r2=1542733&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java Sun Nov 17 14:29:33 2013
@@ -104,9 +104,8 @@ public class TestFixedLengthInputFormat
localFs.delete(workDir, true);
Path file = new Path(workDir, new String("testFormat.txt"));
createFile(file, null, 10, 10);
- // Set the fixed length record length config property
- Configuration testConf = new Configuration(defaultConf);
- Job job = Job.getInstance(testConf);
+ // Create the job and do not set fixed record length
+ Job job = Job.getInstance(defaultConf);
FileInputFormat.setInputPaths(job, workDir);
FixedLengthInputFormat format = new FixedLengthInputFormat();
List<InputSplit> splits = format.getSplits(job);
@@ -139,11 +138,10 @@ public class TestFixedLengthInputFormat
localFs.delete(workDir, true);
Path file = new Path(workDir, new String("testFormat.txt"));
createFile(file, null, 10, 10);
+ Job job = Job.getInstance(defaultConf);
// Set the fixed length record length config property
- Configuration testConf = new Configuration(defaultConf);
FixedLengthInputFormat format = new FixedLengthInputFormat();
- format.setRecordLength(testConf, 0);
- Job job = Job.getInstance(testConf);
+ format.setRecordLength(job.getConfiguration(), 0);
FileInputFormat.setInputPaths(job, workDir);
List<InputSplit> splits = format.getSplits(job);
boolean exceptionThrown = false;
@@ -177,10 +175,9 @@ public class TestFixedLengthInputFormat
Path file = new Path(workDir, new String("testFormat.txt"));
createFile(file, null, 10, 10);
// Set the fixed length record length config property
- Configuration testConf = new Configuration(defaultConf);
+ Job job = Job.getInstance(defaultConf);
FixedLengthInputFormat format = new FixedLengthInputFormat();
- format.setRecordLength(testConf, -10);
- Job job = Job.getInstance(testConf);
+ format.setRecordLength(job.getConfiguration(), -10);
FileInputFormat.setInputPaths(job, workDir);
List<InputSplit> splits = format.getSplits(job);
boolean exceptionThrown = false;
@@ -233,10 +230,10 @@ public class TestFixedLengthInputFormat
"one two threefour five six seveneightnine ten ");
writeFile(localFs, new Path(workDir, "part2.txt.gz"), gzip,
"ten nine eightsevensix five four threetwo one ");
- FixedLengthInputFormat format = new FixedLengthInputFormat();
- format.setRecordLength(defaultConf, 5);
- ReflectionUtils.setConf(gzip, defaultConf);
Job job = Job.getInstance(defaultConf);
+ FixedLengthInputFormat format = new FixedLengthInputFormat();
+ format.setRecordLength(job.getConfiguration(), 5);
+ ReflectionUtils.setConf(gzip, job.getConfiguration());
FileInputFormat.setInputPaths(job, workDir);
List<InputSplit> splits = format.getSplits(job);
assertEquals("compressed splits == 2", 2, splits.size());
@@ -317,9 +314,10 @@ public class TestFixedLengthInputFormat
ArrayList<String> recordList =
createFile(file, codec, recordLength, totalRecords);
assertTrue(localFs.exists(file));
- // Set the fixed length record length config property
- Configuration testConf = new Configuration(defaultConf);
- FixedLengthInputFormat.setRecordLength(testConf, recordLength);
+ // Create the job and set the fixed length record length config property
+ Job job = Job.getInstance(defaultConf);
+ FixedLengthInputFormat.setRecordLength(job.getConfiguration(),
+ recordLength);
int numSplits = 1;
// Arbitrarily set number of splits.
@@ -339,11 +337,11 @@ public class TestFixedLengthInputFormat
}
LOG.info("Number of splits set to: " + numSplits);
}
- testConf.setLong("mapreduce.input.fileinputformat.split.maxsize",
+ job.getConfiguration().setLong(
+ "mapreduce.input.fileinputformat.split.maxsize",
(long)(fileSize/numSplits));
- // Create the job, and setup the input path
- Job job = Job.getInstance(testConf);
+ // setup the input path
FileInputFormat.setInputPaths(job, workDir);
// Try splitting the file in a variety of sizes
FixedLengthInputFormat format = new FixedLengthInputFormat();
@@ -429,18 +427,18 @@ public class TestFixedLengthInputFormat
private void runPartialRecordTest(CompressionCodec codec) throws Exception {
localFs.delete(workDir, true);
+ Job job = Job.getInstance(defaultConf);
// Create a file with fixed length records with 5 byte long
// records with a partial record at the end.
StringBuilder fileName = new StringBuilder("testFormat.txt");
if (codec != null) {
fileName.append(".gz");
- ReflectionUtils.setConf(codec, defaultConf);
+ ReflectionUtils.setConf(codec, job.getConfiguration());
}
writeFile(localFs, new Path(workDir, fileName.toString()), codec,
"one two threefour five six seveneightnine ten");
FixedLengthInputFormat format = new FixedLengthInputFormat();
- format.setRecordLength(defaultConf, 5);
- Job job = Job.getInstance(defaultConf);
+ format.setRecordLength(job.getConfiguration(), 5);
FileInputFormat.setInputPaths(job, workDir);
List<InputSplit> splits = format.getSplits(job);
if (codec != null) {