You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@avro.apache.org by th...@apache.org on 2010/04/23 16:02:42 UTC
svn commit: r937307 - in /hadoop/avro/trunk: CHANGES.txt lang/java/build.xml
lang/java/src/test/java/org/apache/avro/mapred/TestWordCountGeneric.java
lang/java/src/test/java/org/apache/avro/mapred/TestWordCountSpecific.java
Author: thiru
Date: Fri Apr 23 14:02:42 2010
New Revision: 937307
URL: http://svn.apache.org/viewvc?rev=937307&view=rev
Log:
AVRO-521. Out of memory and other issues with Junit tests for mapreduce
Modified:
hadoop/avro/trunk/CHANGES.txt
hadoop/avro/trunk/lang/java/build.xml
hadoop/avro/trunk/lang/java/src/test/java/org/apache/avro/mapred/TestWordCountGeneric.java
hadoop/avro/trunk/lang/java/src/test/java/org/apache/avro/mapred/TestWordCountSpecific.java
Modified: hadoop/avro/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/CHANGES.txt?rev=937307&r1=937306&r2=937307&view=diff
==============================================================================
--- hadoop/avro/trunk/CHANGES.txt (original)
+++ hadoop/avro/trunk/CHANGES.txt Fri Apr 23 14:02:42 2010
@@ -27,6 +27,8 @@ Avro 1.4.0 (unreleased)
AVRO-520. Refactor C++ validation code. (sbanacho)
+ AVRO-521. Out of memory and other issues with Junit tests for mapreduce (thiru)
+
BUG FIXES
AVRO-461. Skipping primitives in the ruby side (jmhodges)
Modified: hadoop/avro/trunk/lang/java/build.xml
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/lang/java/build.xml?rev=937307&r1=937306&r2=937307&view=diff
==============================================================================
--- hadoop/avro/trunk/lang/java/build.xml (original)
+++ hadoop/avro/trunk/lang/java/build.xml Fri Apr 23 14:02:42 2010
@@ -340,6 +340,7 @@
printsummary="withOutAndErr"
haltonfailure="no"
fork="yes" forkMode="once"
+ maxmemory="128m"
errorProperty="tests.failed" failureProperty="tests.failed">
<sysproperty key="test.count" value="${test.count}"/>
<sysproperty key="test.dir" value="@{test.dir}"/>
Modified: hadoop/avro/trunk/lang/java/src/test/java/org/apache/avro/mapred/TestWordCountGeneric.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/lang/java/src/test/java/org/apache/avro/mapred/TestWordCountGeneric.java?rev=937307&r1=937306&r2=937307&view=diff
==============================================================================
--- hadoop/avro/trunk/lang/java/src/test/java/org/apache/avro/mapred/TestWordCountGeneric.java (original)
+++ hadoop/avro/trunk/lang/java/src/test/java/org/apache/avro/mapred/TestWordCountGeneric.java Fri Apr 23 14:02:42 2010
@@ -18,8 +18,6 @@
package org.apache.avro.mapred;
-import junit.framework.TestCase;
-
import java.io.IOException;
import java.util.StringTokenizer;
@@ -33,8 +31,9 @@ import org.apache.avro.Schema;
import org.apache.avro.util.Utf8;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.GenericData;
+import org.junit.Test;
-public class TestWordCountGeneric extends TestCase {
+public class TestWordCountGeneric {
private static GenericRecord newWordCount(String word, int count) {
GenericRecord value = new GenericData.Record(WordCount.SCHEMA$);
@@ -75,27 +74,34 @@ public class TestWordCountGeneric extend
}
+ @Test
+ @SuppressWarnings("deprecation")
public void testJob() throws Exception {
- WordCountUtil.writeLinesFile();
-
+ String dir = System.getProperty("test.dir", ".") + "/mapred";
+ Path outputPath = new Path(dir + "/out");
JobConf job = new JobConf();
- job.setJobName("wordcount");
-
- AvroJob.setInputGeneric(job, Schema.create(Schema.Type.STRING));
- AvroJob.setOutputGeneric(job, WordCount.SCHEMA$);
-
- job.setMapperClass(MapImpl.class);
- job.setCombinerClass(ReduceImpl.class);
- job.setReducerClass(ReduceImpl.class);
-
- String dir = System.getProperty("test.dir",".")+"/mapred";
- FileInputFormat.setInputPaths(job, new Path(dir+"/in"));
- FileOutputFormat.setOutputPath(job, new Path(dir+"/out"));
- FileOutputFormat.setCompressOutput(job, true);
-
- JobClient.runJob(job);
-
- WordCountUtil.validateCountsFile();
+ try {
+ WordCountUtil.writeLinesFile();
+
+ job.setJobName("wordcount");
+
+ AvroJob.setInputGeneric(job, Schema.create(Schema.Type.STRING));
+ AvroJob.setOutputGeneric(job, WordCount.SCHEMA$);
+
+ job.setMapperClass(MapImpl.class);
+ job.setCombinerClass(ReduceImpl.class);
+ job.setReducerClass(ReduceImpl.class);
+
+ FileInputFormat.setInputPaths(job, new Path(dir + "/in"));
+ FileOutputFormat.setOutputPath(job, outputPath);
+ FileOutputFormat.setCompressOutput(job, true);
+
+ JobClient.runJob(job);
+
+ WordCountUtil.validateCountsFile();
+ } finally {
+ outputPath.getFileSystem(job).delete(outputPath);
+ }
}
}
Modified: hadoop/avro/trunk/lang/java/src/test/java/org/apache/avro/mapred/TestWordCountSpecific.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/lang/java/src/test/java/org/apache/avro/mapred/TestWordCountSpecific.java?rev=937307&r1=937306&r2=937307&view=diff
==============================================================================
--- hadoop/avro/trunk/lang/java/src/test/java/org/apache/avro/mapred/TestWordCountSpecific.java (original)
+++ hadoop/avro/trunk/lang/java/src/test/java/org/apache/avro/mapred/TestWordCountSpecific.java Fri Apr 23 14:02:42 2010
@@ -18,8 +18,6 @@
package org.apache.avro.mapred;
-import junit.framework.TestCase;
-
import java.io.IOException;
import java.util.StringTokenizer;
@@ -31,8 +29,9 @@ import org.apache.hadoop.mapred.FileOutp
import org.apache.avro.Schema;
import org.apache.avro.util.Utf8;
+import org.junit.Test;
-public class TestWordCountSpecific extends TestCase {
+public class TestWordCountSpecific {
private static WordCount newWordCount(String word, int count) {
WordCount value = new WordCount();
@@ -42,6 +41,7 @@ public class TestWordCountSpecific exten
}
public static class MapImpl extends AvroMapper<Utf8, WordCount> {
+ @Override
public void map(Utf8 text) throws IOException {
StringTokenizer tokens = new StringTokenizer(text.toString());
while (tokens.hasMoreTokens())
@@ -53,6 +53,7 @@ public class TestWordCountSpecific exten
private WordCount previous;
+ @Override
public void reduce(WordCount current) throws IOException {
if (current.equals(previous)) {
previous.count++;
@@ -63,6 +64,7 @@ public class TestWordCountSpecific exten
}
}
+ @Override
public void close() throws IOException {
if (previous != null)
collect(previous);
@@ -70,27 +72,35 @@ public class TestWordCountSpecific exten
}
+ @Test
+ @SuppressWarnings("deprecation")
public void testJob() throws Exception {
- WordCountUtil.writeLinesFile();
-
JobConf job = new JobConf();
- job.setJobName("wordcount");
-
- AvroJob.setInputSpecific(job, Schema.create(Schema.Type.STRING));
- AvroJob.setOutputSpecific(job, WordCount.SCHEMA$);
-
- job.setMapperClass(MapImpl.class);
- job.setCombinerClass(ReduceImpl.class);
- job.setReducerClass(ReduceImpl.class);
-
- String dir = System.getProperty("test.dir",".")+"/mapred";
- FileInputFormat.setInputPaths(job, new Path(dir+"/in"));
- FileOutputFormat.setOutputPath(job, new Path(dir+"/out"));
- FileOutputFormat.setCompressOutput(job, true);
-
- JobClient.runJob(job);
-
- WordCountUtil.validateCountsFile();
+ String dir = System.getProperty("test.dir", ".") + "/mapred";
+ Path outputPath = new Path(dir + "/out");
+
+ try {
+ WordCountUtil.writeLinesFile();
+
+ job.setJobName("wordcount");
+
+ AvroJob.setInputSpecific(job, Schema.create(Schema.Type.STRING));
+ AvroJob.setOutputSpecific(job, WordCount.SCHEMA$);
+
+ job.setMapperClass(MapImpl.class);
+ job.setCombinerClass(ReduceImpl.class);
+ job.setReducerClass(ReduceImpl.class);
+
+ FileInputFormat.setInputPaths(job, new Path(dir + "/in"));
+ FileOutputFormat.setOutputPath(job, outputPath);
+ FileOutputFormat.setCompressOutput(job, true);
+
+ JobClient.runJob(job);
+
+ WordCountUtil.validateCountsFile();
+ } finally {
+ outputPath.getFileSystem(job).delete(outputPath);
+ }
}