You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@accumulo.apache.org by el...@apache.org on 2013/11/24 00:52:04 UTC

[16/30] Squashed commit of the following:

http://git-wip-us.apache.org/repos/asf/accumulo/blob/73114819/src/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloInputFormatTest.java
----------------------------------------------------------------------
diff --git a/src/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloInputFormatTest.java b/src/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloInputFormatTest.java
index ba647e9..7239b01 100644
--- a/src/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloInputFormatTest.java
+++ b/src/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloInputFormatTest.java
@@ -20,15 +20,17 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
+import java.util.Collection;
+import java.util.Collections;
 import java.util.List;
 import java.util.regex.Pattern;
 
 import org.apache.accumulo.core.client.BatchWriter;
 import org.apache.accumulo.core.client.Connector;
+import org.apache.accumulo.core.client.Instance;
 import org.apache.accumulo.core.client.IteratorSetting;
 import org.apache.accumulo.core.client.mapreduce.InputFormatBase.AccumuloIterator;
 import org.apache.accumulo.core.client.mapreduce.InputFormatBase.AccumuloIteratorOption;
-import org.apache.accumulo.core.client.mapreduce.InputFormatBase.RangeInputSplit;
 import org.apache.accumulo.core.client.mapreduce.InputFormatBase.RegexType;
 import org.apache.accumulo.core.client.mock.MockInstance;
 import org.apache.accumulo.core.data.Key;
@@ -36,6 +38,7 @@ import org.apache.accumulo.core.data.Mutation;
 import org.apache.accumulo.core.data.Value;
 import org.apache.accumulo.core.iterators.user.WholeRowIterator;
 import org.apache.accumulo.core.security.Authorizations;
+import org.apache.accumulo.core.util.Pair;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.InputSplit;
@@ -46,15 +49,16 @@ import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.log4j.Level;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Test;
 
 public class AccumuloInputFormatTest {
-  
+
   @After
   public void tearDown() throws Exception {}
-  
+
   /**
    * Test basic setting & getting of max versions.
    * 
@@ -68,7 +72,7 @@ public class AccumuloInputFormatTest {
     int version = AccumuloInputFormat.getMaxVersions(job.getConfiguration());
     assertEquals(1, version);
   }
-  
+
   /**
    * Test max versions with an invalid value.
    * 
@@ -80,7 +84,7 @@ public class AccumuloInputFormatTest {
     JobContext job = new JobContext(new Configuration(), new JobID());
     AccumuloInputFormat.setMaxVersions(job.getConfiguration(), 0);
   }
-  
+
   /**
    * Test no max version configured.
    */
@@ -89,7 +93,7 @@ public class AccumuloInputFormatTest {
     JobContext job = new JobContext(new Configuration(), new JobID());
     assertEquals(-1, AccumuloInputFormat.getMaxVersions(job.getConfiguration()));
   }
-  
+
   /**
    * Check that the iterator configuration is getting stored in the Job conf correctly.
    */
@@ -97,45 +101,45 @@ public class AccumuloInputFormatTest {
   @Test
   public void testSetIterator() {
     JobContext job = new JobContext(new Configuration(), new JobID());
-    
+
     AccumuloInputFormat.setIterator(job, 1, "org.apache.accumulo.core.iterators.WholeRowIterator", "WholeRow");
     Configuration conf = job.getConfiguration();
     String iterators = conf.get("AccumuloInputFormat.iterators");
     assertEquals("1:org.apache.accumulo.core.iterators.WholeRowIterator:WholeRow", iterators);
   }
-  
+
   @Test
   public void testAddIterator() {
     JobContext job = new JobContext(new Configuration(), new JobID());
-    
+
     AccumuloInputFormat.addIterator(job.getConfiguration(), new IteratorSetting(1, "WholeRow", WholeRowIterator.class));
     AccumuloInputFormat.addIterator(job.getConfiguration(), new IteratorSetting(2, "Versions", "org.apache.accumulo.core.iterators.VersioningIterator"));
     IteratorSetting iter = new IteratorSetting(3, "Count", "org.apache.accumulo.core.iterators.CountingIterator");
     iter.addOption("v1", "1");
     iter.addOption("junk", "\0omg:!\\xyzzy");
     AccumuloInputFormat.addIterator(job.getConfiguration(), iter);
-    
+
     List<AccumuloIterator> list = AccumuloInputFormat.getIterators(job.getConfiguration());
-    
+
     // Check the list size
     assertTrue(list.size() == 3);
-    
+
     // Walk the list and make sure our settings are correct
     AccumuloIterator setting = list.get(0);
     assertEquals(1, setting.getPriority());
     assertEquals("org.apache.accumulo.core.iterators.user.WholeRowIterator", setting.getIteratorClass());
     assertEquals("WholeRow", setting.getIteratorName());
-    
+
     setting = list.get(1);
     assertEquals(2, setting.getPriority());
     assertEquals("org.apache.accumulo.core.iterators.VersioningIterator", setting.getIteratorClass());
     assertEquals("Versions", setting.getIteratorName());
-    
+
     setting = list.get(2);
     assertEquals(3, setting.getPriority());
     assertEquals("org.apache.accumulo.core.iterators.CountingIterator", setting.getIteratorClass());
     assertEquals("Count", setting.getIteratorName());
-    
+
     List<AccumuloIteratorOption> iteratorOptions = AccumuloInputFormat.getIteratorOptions(job.getConfiguration());
     assertEquals(2, iteratorOptions.size());
     assertEquals("Count", iteratorOptions.get(0).getIteratorName());
@@ -145,7 +149,7 @@ public class AccumuloInputFormatTest {
     assertEquals("junk", iteratorOptions.get(1).getKey());
     assertEquals("\0omg:!\\xyzzy", iteratorOptions.get(1).getValue());
   }
-  
+
   /**
    * Test adding iterator options where the keys and values contain both the FIELD_SEPARATOR character (':') and ITERATOR_SEPARATOR (',') characters. There
    * should be no exceptions thrown when trying to parse these types of option entries.
@@ -160,16 +164,16 @@ public class AccumuloInputFormatTest {
     someSetting.addOption(key, value);
     Job job = new Job();
     AccumuloInputFormat.addIterator(job.getConfiguration(), someSetting);
-    
+
     final String rawConfigOpt = new AccumuloIteratorOption("iterator", key, value).toString();
-    
+
     assertEquals(rawConfigOpt, job.getConfiguration().get("AccumuloInputFormat.iterators.options"));
-    
+
     List<AccumuloIteratorOption> opts = AccumuloInputFormat.getIteratorOptions(job.getConfiguration());
     assertEquals(1, opts.size());
     assertEquals(opts.get(0).getKey(), key);
     assertEquals(opts.get(0).getValue(), value);
-    
+
     someSetting.addOption(key + "2", value);
     someSetting.setPriority(2);
     someSetting.setName("it2");
@@ -181,7 +185,7 @@ public class AccumuloInputFormatTest {
       assertEquals(opt.getValue(), value);
     }
   }
-  
+
   /**
    * Test getting iterator settings for multiple iterators set
    */
@@ -189,34 +193,34 @@ public class AccumuloInputFormatTest {
   @Test
   public void testGetIteratorSettings() {
     JobContext job = new JobContext(new Configuration(), new JobID());
-    
+
     AccumuloInputFormat.setIterator(job, 1, "org.apache.accumulo.core.iterators.WholeRowIterator", "WholeRow");
     AccumuloInputFormat.setIterator(job, 2, "org.apache.accumulo.core.iterators.VersioningIterator", "Versions");
     AccumuloInputFormat.setIterator(job, 3, "org.apache.accumulo.core.iterators.CountingIterator", "Count");
-    
+
     List<AccumuloIterator> list = AccumuloInputFormat.getIterators(job);
-    
+
     // Check the list size
     assertTrue(list.size() == 3);
-    
+
     // Walk the list and make sure our settings are correct
     AccumuloIterator setting = list.get(0);
     assertEquals(1, setting.getPriority());
     assertEquals("org.apache.accumulo.core.iterators.WholeRowIterator", setting.getIteratorClass());
     assertEquals("WholeRow", setting.getIteratorName());
-    
+
     setting = list.get(1);
     assertEquals(2, setting.getPriority());
     assertEquals("org.apache.accumulo.core.iterators.VersioningIterator", setting.getIteratorClass());
     assertEquals("Versions", setting.getIteratorName());
-    
+
     setting = list.get(2);
     assertEquals(3, setting.getPriority());
     assertEquals("org.apache.accumulo.core.iterators.CountingIterator", setting.getIteratorClass());
     assertEquals("Count", setting.getIteratorName());
-    
+
   }
-  
+
   /**
    * Check that the iterator options are getting stored in the Job conf correctly.
    */
@@ -225,12 +229,12 @@ public class AccumuloInputFormatTest {
   public void testSetIteratorOption() {
     JobContext job = new JobContext(new Configuration(), new JobID());
     AccumuloInputFormat.setIteratorOption(job, "someIterator", "aKey", "aValue");
-    
+
     Configuration conf = job.getConfiguration();
     String options = conf.get("AccumuloInputFormat.iterators.options");
     assertEquals(new String("someIterator:aKey:aValue"), options);
   }
-  
+
   /**
    * Test getting iterator options for multiple options set
    */
@@ -238,49 +242,49 @@ public class AccumuloInputFormatTest {
   @Test
   public void testGetIteratorOption() {
     JobContext job = new JobContext(new Configuration(), new JobID());
-    
+
     AccumuloInputFormat.setIteratorOption(job, "iterator1", "key1", "value1");
     AccumuloInputFormat.setIteratorOption(job, "iterator2", "key2", "value2");
     AccumuloInputFormat.setIteratorOption(job, "iterator3", "key3", "value3");
-    
+
     List<AccumuloIteratorOption> list = AccumuloInputFormat.getIteratorOptions(job);
-    
+
     // Check the list size
     assertEquals(3, list.size());
-    
+
     // Walk the list and make sure all the options are correct
     AccumuloIteratorOption option = list.get(0);
     assertEquals("iterator1", option.getIteratorName());
     assertEquals("key1", option.getKey());
     assertEquals("value1", option.getValue());
-    
+
     option = list.get(1);
     assertEquals("iterator2", option.getIteratorName());
     assertEquals("key2", option.getKey());
     assertEquals("value2", option.getValue());
-    
+
     option = list.get(2);
     assertEquals("iterator3", option.getIteratorName());
     assertEquals("key3", option.getKey());
     assertEquals("value3", option.getValue());
   }
-  
+
   @SuppressWarnings("deprecation")
   @Test
   public void testSetRegex() {
     JobContext job = new JobContext(new Configuration(), new JobID());
-    
+
     String regex = ">\"*%<>\'\\";
-    
+
     AccumuloInputFormat.setRegex(job, RegexType.ROW, regex);
-    
+
     assertTrue(regex.equals(AccumuloInputFormat.getRegex(job, RegexType.ROW)));
   }
-  
+
   static class TestMapper extends Mapper<Key,Value,Key,Value> {
     Key key = null;
     int count = 0;
-    
+
     @Override
     protected void map(Key k, Value v, Context context) throws IOException, InterruptedException {
       if (key != null)
@@ -291,7 +295,7 @@ public class AccumuloInputFormatTest {
       count++;
     }
   }
-  
+
   @Test
   public void testMap() throws Exception {
     MockInstance mockInstance = new MockInstance("testmapinstance");
@@ -304,20 +308,27 @@ public class AccumuloInputFormatTest {
       bw.addMutation(m);
     }
     bw.close();
-    
+
     Job job = new Job(new Configuration());
     job.setInputFormatClass(AccumuloInputFormat.class);
     job.setMapperClass(TestMapper.class);
     job.setNumReduceTasks(0);
     AccumuloInputFormat.setInputInfo(job.getConfiguration(), "root", "".getBytes(), "testtable", new Authorizations());
     AccumuloInputFormat.setMockInstance(job.getConfiguration(), "testmapinstance");
-    
+
     AccumuloInputFormat input = new AccumuloInputFormat();
     List<InputSplit> splits = input.getSplits(job);
     assertEquals(splits.size(), 1);
-    
+
     TestMapper mapper = (TestMapper) job.getMapperClass().newInstance();
     for (InputSplit split : splits) {
+      RangeInputSplit risplit = (RangeInputSplit) split;
+      Assert.assertEquals("root", risplit.getUsername());
+      Assert.assertArrayEquals(new byte[0], risplit.getPassword());
+      Assert.assertEquals("testtable", risplit.getTable());
+      Assert.assertEquals(new Authorizations(), risplit.getAuths());
+      Assert.assertEquals("testmapinstance", risplit.getInstanceName());
+      
       TaskAttemptID id = new TaskAttemptID();
       TaskAttemptContext attempt = new TaskAttemptContext(job.getConfiguration(), id);
       RecordReader<Key,Value> reader = input.createRecordReader(split, attempt);
@@ -326,7 +337,7 @@ public class AccumuloInputFormatTest {
       mapper.run(context);
     }
   }
-  
+
   @Test
   public void testSimple() throws Exception {
     MockInstance mockInstance = new MockInstance("testmapinstance");
@@ -339,7 +350,7 @@ public class AccumuloInputFormatTest {
       bw.addMutation(m);
     }
     bw.close();
-    
+
     JobContext job = new JobContext(new Configuration(), new JobID());
     AccumuloInputFormat.setInputInfo(job.getConfiguration(), "root", "".getBytes(), "testtable2", new Authorizations());
     AccumuloInputFormat.setMockInstance(job.getConfiguration(), "testmapinstance");
@@ -348,14 +359,14 @@ public class AccumuloInputFormatTest {
     TaskAttemptContext tac = new TaskAttemptContext(job.getConfiguration(), new TaskAttemptID());
     RecordReader<Key,Value> rr = input.createRecordReader(ris, tac);
     rr.initialize(ris, tac);
-    
+
     TestMapper mapper = new TestMapper();
     Mapper<Key,Value,Key,Value>.Context context = mapper.new Context(job.getConfiguration(), tac.getTaskAttemptID(), rr, null, null, null, ris);
     while (rr.nextKeyValue()) {
       mapper.map(rr.getCurrentKey(), rr.getCurrentValue(), context);
     }
   }
-  
+
   @SuppressWarnings("deprecation")
   @Test
   public void testRegex() throws Exception {
@@ -369,7 +380,7 @@ public class AccumuloInputFormatTest {
       bw.addMutation(m);
     }
     bw.close();
-    
+
     JobContext job = new JobContext(new Configuration(), new JobID());
     AccumuloInputFormat.setInputInfo(job.getConfiguration(), "root", "".getBytes(), "testtable3", new Authorizations());
     AccumuloInputFormat.setMockInstance(job.getConfiguration(), "testmapinstance");
@@ -380,10 +391,144 @@ public class AccumuloInputFormatTest {
     TaskAttemptContext tac = new TaskAttemptContext(job.getConfiguration(), new TaskAttemptID());
     RecordReader<Key,Value> rr = input.createRecordReader(ris, tac);
     rr.initialize(ris, tac);
-    
+
     Pattern p = Pattern.compile(regex);
     while (rr.nextKeyValue()) {
-      Assert.assertTrue( p.matcher( rr.getCurrentKey().getRow().toString()).matches());
+      Assert.assertTrue(p.matcher(rr.getCurrentKey().getRow().toString()).matches());
+    }
+  }
+
+  @SuppressWarnings("deprecation")
+  @Test
+  public void testCorrectRangeInputSplits() throws Exception {
+    JobContext job = new JobContext(new Configuration(), new JobID());
+
+    String username = "user", table = "table", rowRegex = "row.*", colfRegex = "colf.*", colqRegex = "colq.*";
+    String valRegex = "val.*", instance = "instance";
+    byte[] password = "password".getBytes();
+    Authorizations auths = new Authorizations("foo");
+    Collection<Pair<Text,Text>> fetchColumns = Collections.singleton(new Pair<Text,Text>(new Text("foo"), new Text("bar")));
+    boolean isolated = true, localIters = true;
+    int maxVersions = 5;
+    Level level = Level.WARN;
+
+    Instance inst = new MockInstance(instance);
+    Connector connector = inst.getConnector(username, password);
+    connector.tableOperations().create(table);
+
+    AccumuloInputFormat.setInputInfo(job, username, password, table, auths);
+    AccumuloInputFormat.setMockInstance(job, instance);
+    AccumuloInputFormat.setRegex(job, RegexType.ROW, rowRegex);
+    AccumuloInputFormat.setRegex(job, RegexType.COLUMN_FAMILY, colfRegex);
+    AccumuloInputFormat.setRegex(job, RegexType.COLUMN_QUALIFIER, colqRegex);
+    AccumuloInputFormat.setRegex(job, RegexType.VALUE, valRegex);
+    AccumuloInputFormat.setIsolated(job, isolated);
+    AccumuloInputFormat.setLocalIterators(job, localIters);
+    AccumuloInputFormat.setMaxVersions(job, maxVersions);
+    AccumuloInputFormat.fetchColumns(job, fetchColumns);
+    AccumuloInputFormat.setLogLevel(job, level);
+    
+    AccumuloInputFormat aif = new AccumuloInputFormat();
+    
+    List<InputSplit> splits = aif.getSplits(job);
+    
+    Assert.assertEquals(1, splits.size());
+    
+    InputSplit split = splits.get(0);
+    
+    Assert.assertEquals(RangeInputSplit.class, split.getClass());
+    
+    RangeInputSplit risplit = (RangeInputSplit) split;
+    
+    Assert.assertEquals(username, risplit.getUsername());
+    Assert.assertEquals(table, risplit.getTable());
+    Assert.assertArrayEquals(password, risplit.getPassword());
+    Assert.assertEquals(auths, risplit.getAuths());
+    Assert.assertEquals(instance, risplit.getInstanceName());
+    Assert.assertEquals(rowRegex, risplit.getRowRegex());
+    Assert.assertEquals(colfRegex, risplit.getColfamRegex());
+    Assert.assertEquals(colqRegex, risplit.getColqualRegex());
+    Assert.assertEquals(valRegex, risplit.getValueRegex());
+    Assert.assertEquals(isolated, risplit.isIsolatedScan());
+    Assert.assertEquals(localIters, risplit.usesLocalIterators());
+    Assert.assertEquals(maxVersions, risplit.getMaxVersions().intValue());
+    Assert.assertEquals(fetchColumns, risplit.getFetchedColumns());
+    Assert.assertEquals(level, risplit.getLogLevel());
+  }
+
+  @Test
+  public void testPartialInputSplitDelegationToConfiguration() throws Exception {
+    MockInstance mockInstance = new MockInstance("testPartialInputSplitDelegationToConfiguration");
+    Connector c = mockInstance.getConnector("root", new byte[] {});
+    c.tableOperations().create("testtable");
+    BatchWriter bw = c.createBatchWriter("testtable", 10000L, 1000L, 4);
+    for (int i = 0; i < 100; i++) {
+      Mutation m = new Mutation(new Text(String.format("%09x", i + 1)));
+      m.put(new Text(), new Text(), new Value(String.format("%09x", i).getBytes()));
+      bw.addMutation(m);
+    }
+    bw.close();
+
+    Job job = new Job(new Configuration());
+    job.setInputFormatClass(AccumuloInputFormat.class);
+    job.setMapperClass(TestMapper.class);
+    job.setNumReduceTasks(0);
+    AccumuloInputFormat.setInputInfo(job.getConfiguration(), "root", "".getBytes(), "testtable", new Authorizations());
+    AccumuloInputFormat.setMockInstance(job.getConfiguration(), "testPartialInputSplitDelegationToConfiguration");
+
+    AccumuloInputFormat input = new AccumuloInputFormat();
+    List<InputSplit> splits = input.getSplits(job);
+    assertEquals(splits.size(), 1);
+
+    TestMapper mapper = (TestMapper) job.getMapperClass().newInstance();
+    
+    RangeInputSplit emptySplit = new RangeInputSplit();
+    
+    // Using an empty split should fall back to the information in the Job's Configuration
+    TaskAttemptID id = new TaskAttemptID();
+    TaskAttemptContext attempt = new TaskAttemptContext(job.getConfiguration(), id);
+    RecordReader<Key,Value> reader = input.createRecordReader(emptySplit, attempt);
+    Mapper<Key,Value,Key,Value>.Context context = mapper.new Context(job.getConfiguration(), id, reader, null, null, null, emptySplit);
+    reader.initialize(emptySplit, context);
+    mapper.run(context);
+  }
+
+  @Test(expected = IOException.class)
+  public void testPartialFailedInputSplitDelegationToConfiguration() throws Exception {
+    MockInstance mockInstance = new MockInstance("testPartialFailedInputSplitDelegationToConfiguration");
+    Connector c = mockInstance.getConnector("root", new byte[] {});
+    c.tableOperations().create("testtable");
+    BatchWriter bw = c.createBatchWriter("testtable", 10000L, 1000L, 4);
+    for (int i = 0; i < 100; i++) {
+      Mutation m = new Mutation(new Text(String.format("%09x", i + 1)));
+      m.put(new Text(), new Text(), new Value(String.format("%09x", i).getBytes()));
+      bw.addMutation(m);
     }
+    bw.close();
+
+    Job job = new Job(new Configuration());
+    job.setInputFormatClass(AccumuloInputFormat.class);
+    job.setMapperClass(TestMapper.class);
+    job.setNumReduceTasks(0);
+    AccumuloInputFormat.setInputInfo(job.getConfiguration(), "root", "".getBytes(), "testtable", new Authorizations());
+    AccumuloInputFormat.setMockInstance(job.getConfiguration(), "testPartialFailedInputSplitDelegationToConfiguration");
+
+    AccumuloInputFormat input = new AccumuloInputFormat();
+    List<InputSplit> splits = input.getSplits(job);
+    assertEquals(splits.size(), 1);
+
+    TestMapper mapper = (TestMapper) job.getMapperClass().newInstance();
+    
+    RangeInputSplit emptySplit = new RangeInputSplit();
+    emptySplit.setUsername("root");
+    emptySplit.setPassword("anythingelse".getBytes());
+    
+    // Using an empty split should fall back to the information in the Job's Configuration
+    TaskAttemptID id = new TaskAttemptID();
+    TaskAttemptContext attempt = new TaskAttemptContext(job.getConfiguration(), id);
+    RecordReader<Key,Value> reader = input.createRecordReader(emptySplit, attempt);
+    Mapper<Key,Value,Key,Value>.Context context = mapper.new Context(job.getConfiguration(), id, reader, null, null, null, emptySplit);
+    reader.initialize(emptySplit, context);
+    mapper.run(context);
   }
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/73114819/src/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloRowInputFormatTest.java
----------------------------------------------------------------------
diff --git a/src/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloRowInputFormatTest.java b/src/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloRowInputFormatTest.java
index 0673f1b..d9f9da0 100644
--- a/src/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloRowInputFormatTest.java
+++ b/src/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloRowInputFormatTest.java
@@ -27,7 +27,6 @@ import java.util.Map.Entry;
 
 import org.apache.accumulo.core.client.BatchWriter;
 import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.mapreduce.InputFormatBase.RangeInputSplit;
 import org.apache.accumulo.core.client.mock.MockInstance;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.KeyValue;

http://git-wip-us.apache.org/repos/asf/accumulo/blob/73114819/src/core/src/test/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplitTest.java
----------------------------------------------------------------------
diff --git a/src/core/src/test/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplitTest.java b/src/core/src/test/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplitTest.java
new file mode 100644
index 0000000..22fb6e1
--- /dev/null
+++ b/src/core/src/test/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplitTest.java
@@ -0,0 +1,100 @@
+package org.apache.accumulo.core.client.mapreduce;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.accumulo.core.data.Key;
+import org.apache.accumulo.core.data.Range;
+import org.apache.accumulo.core.security.Authorizations;
+import org.apache.accumulo.core.util.Pair;
+import org.apache.hadoop.io.Text;
+import org.apache.log4j.Level;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class RangeInputSplitTest {
+
+  @Test
+  public void testSimpleWritable() throws IOException {
+    RangeInputSplit split = new RangeInputSplit(new Range(new Key("a"), new Key("b")), new String[]{"localhost"});
+    
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    DataOutputStream dos = new DataOutputStream(baos);
+    split.write(dos);
+    
+    RangeInputSplit newSplit = new RangeInputSplit();
+    
+    ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
+    DataInputStream dis = new DataInputStream(bais);
+    newSplit.readFields(dis);
+    
+    Assert.assertEquals(split.getRange(), newSplit.getRange());
+    Assert.assertTrue(Arrays.equals(split.getLocations(), newSplit.getLocations()));
+  }
+
+
+
+  @Test
+  public void testAllFieldsWritable() throws IOException {
+    RangeInputSplit split = new RangeInputSplit(new Range(new Key("a"), new Key("b")), new String[]{"localhost"});
+    
+    Set<Pair<Text,Text>> fetchedColumns = new HashSet<Pair<Text,Text>>();
+    
+    fetchedColumns.add(new Pair<Text,Text>(new Text("colf1"), new Text("colq1")));
+    fetchedColumns.add(new Pair<Text,Text>(new Text("colf2"), new Text("colq2")));
+    
+    split.setAuths(new Authorizations("foo"));
+    split.setOffline(true);
+    split.setIsolatedScan(true);
+    split.setUsesLocalIterators(true);
+    split.setMaxVersions(5);
+    split.setRowRegex("row");
+    split.setColfamRegex("colf");
+    split.setColqualRegex("colq");
+    split.setValueRegex("value");
+    split.setFetchedColumns(fetchedColumns);
+    split.setPassword("password".getBytes());
+    split.setUsername("root");
+    split.setInstanceName("instance");
+    split.setMockInstance(true);
+    split.setZooKeepers("localhost");
+    split.setLogLevel(Level.WARN);
+    
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    DataOutputStream dos = new DataOutputStream(baos);
+    split.write(dos);
+    
+    RangeInputSplit newSplit = new RangeInputSplit();
+    
+    ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
+    DataInputStream dis = new DataInputStream(bais);
+    newSplit.readFields(dis);
+    
+    Assert.assertEquals(split.getRange(), newSplit.getRange());
+    Assert.assertArrayEquals(split.getLocations(), newSplit.getLocations());
+    
+    Assert.assertEquals(split.getAuths(), newSplit.getAuths());
+    Assert.assertEquals(split.isOffline(), newSplit.isOffline());
+    Assert.assertEquals(split.isIsolatedScan(), newSplit.isOffline());
+    Assert.assertEquals(split.usesLocalIterators(), newSplit.usesLocalIterators());
+    Assert.assertEquals(split.getMaxVersions(), newSplit.getMaxVersions());
+    Assert.assertEquals(split.getRowRegex(), newSplit.getRowRegex());
+    Assert.assertEquals(split.getColfamRegex(), newSplit.getColfamRegex());
+    Assert.assertEquals(split.getColqualRegex(), newSplit.getColqualRegex());
+    Assert.assertEquals(split.getValueRegex(), newSplit.getValueRegex());
+    Assert.assertEquals(split.getFetchedColumns(), newSplit.getFetchedColumns());
+    Assert.assertEquals(new String(split.getPassword()), new String(newSplit.getPassword()));
+    Assert.assertEquals(split.getUsername(), newSplit.getUsername());
+    Assert.assertEquals(split.getInstanceName(), newSplit.getInstanceName());
+    Assert.assertEquals(split.isMockInstance(), newSplit.isMockInstance());
+    Assert.assertEquals(split.getZooKeepers(), newSplit.getZooKeepers());
+    Assert.assertEquals(split.getLogLevel(), newSplit.getLogLevel());
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/73114819/src/examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormatTest.java
----------------------------------------------------------------------
diff --git a/src/examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormatTest.java b/src/examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormatTest.java
index c31c738..af12302 100644
--- a/src/examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormatTest.java
+++ b/src/examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormatTest.java
@@ -30,15 +30,13 @@ import org.apache.accumulo.core.client.BatchWriter;
 import org.apache.accumulo.core.client.Connector;
 import org.apache.accumulo.core.client.TableExistsException;
 import org.apache.accumulo.core.client.TableNotFoundException;
-import org.apache.accumulo.core.client.mapreduce.InputFormatBase.RangeInputSplit;
+import org.apache.accumulo.core.client.mapreduce.RangeInputSplit;
 import org.apache.accumulo.core.client.mock.MockInstance;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Mutation;
 import org.apache.accumulo.core.data.Value;
 import org.apache.accumulo.core.security.Authorizations;
 import org.apache.accumulo.core.security.ColumnVisibility;
-import org.apache.accumulo.examples.simple.filedata.ChunkInputFormat;
-import org.apache.accumulo.examples.simple.filedata.ChunkInputStream;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.JobID;