You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@accumulo.apache.org by ec...@apache.org on 2013/11/26 16:48:14 UTC
[25/39] git commit: ACCUMULO-287 added mock instance to
AccumuloOutputFormat - merged to trunk
ACCUMULO-287 added mock instance to AccumuloOutputFormat - merged to trunk
git-svn-id: https://svn.apache.org/repos/asf/incubator/accumulo/trunk@1229605 13f79535-47bb-0310-9956-ffa450edef68
(cherry picked from commit 63545d307599b3f1c1db4884a61fe880911cef7a)
Reason: Testing
Author: Billie Rinaldi <bi...@apache.org>
merged uncleanly, left in deprecated things removed in 1.5. squashed in test changes because AccumuloOutputFormatTest got created by ACCUMULO-287 independently in the 1.5 and 1.4 branches
Author: Sean Busbey <bu...@cloudera.com>
Signed-off-by: Eric Newton <er...@gmail.com>
Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/792af9a1
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/792af9a1
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/792af9a1
Branch: refs/heads/1.6.0-SNAPSHOT
Commit: 792af9a19133d81edb1e49e9e51407724a53cfc3
Parents: a3264e4
Author: Billie Rinaldi <bi...@apache.org>
Authored: Tue Jan 10 15:47:22 2012 +0000
Committer: Eric Newton <er...@gmail.com>
Committed: Mon Nov 25 16:06:42 2013 -0500
----------------------------------------------------------------------
.../client/mapreduce/AccumuloOutputFormatTest.java | 17 +++++++----------
1 file changed, 7 insertions(+), 10 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/accumulo/blob/792af9a1/src/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormatTest.java
----------------------------------------------------------------------
diff --git a/src/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormatTest.java b/src/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormatTest.java
index 8bc2b45..94e8bf0 100644
--- a/src/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormatTest.java
+++ b/src/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormatTest.java
@@ -33,7 +33,7 @@ import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.security.Authorizations;
-import org.apache.hadoop.conf.Configuration;
+import org.apache.accumulo.core.util.ContextFactory;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Job;
@@ -41,7 +41,6 @@ import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.junit.Test;
/**
@@ -94,11 +93,10 @@ public class AccumuloOutputFormatTest {
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Mutation.class);
job.setNumReduceTasks(0);
- Configuration conf = job.getConfiguration();
- AccumuloInputFormat.setInputInfo(conf, "root", "".getBytes(), "testtable1", new Authorizations());
- AccumuloInputFormat.setMockInstance(conf, "testmrinstance");
- AccumuloOutputFormat.setOutputInfo(conf, "root", "".getBytes(), false, "testtable2");
- AccumuloOutputFormat.setMockInstance(conf, "testmrinstance");
+ AccumuloInputFormat.setInputInfo(job.getConfiguration(), "root", "".getBytes(), "testtable1", new Authorizations());
+ AccumuloInputFormat.setMockInstance(job.getConfiguration(), "testmrinstance");
+ AccumuloOutputFormat.setOutputInfo(job, "root", "".getBytes(), false, "testtable2");
+ AccumuloOutputFormat.setMockInstance(job, "testmrinstance");
AccumuloInputFormat input = new AccumuloInputFormat();
List<InputSplit> splits = input.getSplits(job);
@@ -108,11 +106,10 @@ public class AccumuloOutputFormatTest {
TestMapper mapper = (TestMapper) job.getMapperClass().newInstance();
for (InputSplit split : splits) {
- TaskAttemptID id = new TaskAttemptID();
- TaskAttemptContext tac = new TaskAttemptContext(job.getConfiguration(), id);
+ TaskAttemptContext tac = ContextFactory.createTaskAttemptContext(job);
RecordReader<Key,Value> reader = input.createRecordReader(split, tac);
RecordWriter<Text,Mutation> writer = output.getRecordWriter(tac);
- Mapper<Key,Value,Text,Mutation>.Context context = mapper.new Context(job.getConfiguration(), id, reader, writer, null, null, split);
+ Mapper<Key,Value,Text,Mutation>.Context context = ContextFactory.createMapContext(mapper, tac, reader, writer, split);
reader.initialize(split, context);
mapper.run(context);
writer.close(context);