You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@accumulo.apache.org by el...@apache.org on 2013/11/24 00:52:16 UTC

[28/30] git commit: Merge branch '1.5.1-SNAPSHOT' into 1.6.0-SNAPSHOT

Merge branch '1.5.1-SNAPSHOT' into 1.6.0-SNAPSHOT

Conflicts:
	core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloInputFormatTest.java


Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/0f3f93fd
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/0f3f93fd
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/0f3f93fd

Branch: refs/heads/1.6.0-SNAPSHOT
Commit: 0f3f93fdc9f66c49bde97019f8aea9086c9f0087
Parents: 10d2679 c88d87a
Author: Josh Elser <el...@apache.org>
Authored: Sat Nov 23 14:21:36 2013 -0500
Committer: Josh Elser <el...@apache.org>
Committed: Sat Nov 23 14:21:36 2013 -0500

----------------------------------------------------------------------
 .../mapreduce/AccumuloInputFormatTest.java      | 477 +++----------------
 .../BadPasswordSplitsAccumuloInputFormat.java   |  26 +
 .../EmptySplitsAccumuloInputFormat.java         |  29 ++
 3 files changed, 131 insertions(+), 401 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/accumulo/blob/0f3f93fd/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloInputFormatTest.java
----------------------------------------------------------------------
diff --cc core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloInputFormatTest.java
index 6a97d67,93dba65..397b203
--- a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloInputFormatTest.java
+++ b/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloInputFormatTest.java
@@@ -23,7 -23,7 +23,6 @@@ import static org.junit.Assert.assertTr
  import java.io.ByteArrayOutputStream;
  import java.io.DataOutputStream;
  import java.io.IOException;
- import java.net.URI;
 -import java.util.Arrays;
  import java.util.Collection;
  import java.util.Collections;
  import java.util.List;
@@@ -53,20 -50,12 +49,8 @@@ import org.apache.hadoop.io.Text
  import org.apache.hadoop.mapreduce.InputFormat;
  import org.apache.hadoop.mapreduce.InputSplit;
  import org.apache.hadoop.mapreduce.Job;
- import org.apache.hadoop.mapreduce.JobID;
 -import org.apache.hadoop.mapreduce.JobContext;
  import org.apache.hadoop.mapreduce.Mapper;
- import org.apache.hadoop.mapreduce.Mapper.Context;
- import org.apache.hadoop.mapreduce.OutputCommitter;
- import org.apache.hadoop.mapreduce.OutputFormat;
- import org.apache.hadoop.mapreduce.Partitioner;
--import org.apache.hadoop.mapreduce.RecordReader;
- import org.apache.hadoop.mapreduce.Reducer;
--import org.apache.hadoop.mapreduce.TaskAttemptContext;
--import org.apache.hadoop.mapreduce.TaskAttemptID;
  import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
- import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
- import org.apache.hadoop.security.Credentials;
  import org.apache.hadoop.util.Tool;
  import org.apache.hadoop.util.ToolRunner;
  import org.apache.log4j.Level;
@@@ -74,21 -63,58 +58,19 @@@ import org.junit.Assert
  import org.junit.Test;
  
  public class AccumuloInputFormatTest {
-   
+ 
    private static final String PREFIX = AccumuloInputFormatTest.class.getSimpleName();
-   private static final String INSTANCE_NAME = PREFIX + "_mapreduce_instance";
-   private static final String TEST_TABLE_1 = PREFIX + "_mapreduce_table_1";
-   
+ 
    /**
 -   * Test basic setting & getting of max versions.
 -   * 
 -   * @throws IOException
 -   *           Signals that an I/O exception has occurred.
 -   */
 -  @Deprecated
 -  @Test
 -  public void testMaxVersions() throws IOException {
 -    Job job = new Job();
 -    AccumuloInputFormat.setMaxVersions(job.getConfiguration(), 1);
 -    int version = AccumuloInputFormat.getMaxVersions(job.getConfiguration());
 -    assertEquals(1, version);
 -  }
 -
 -  /**
 -   * Test max versions with an invalid value.
 -   * 
 -   * @throws IOException
 -   *           Signals that an I/O exception has occurred.
 -   */
 -  @Deprecated
 -  @Test(expected = IOException.class)
 -  public void testMaxVersionsLessThan1() throws IOException {
 -    Job job = new Job();
 -    AccumuloInputFormat.setMaxVersions(job.getConfiguration(), 0);
 -  }
 -
 -  /**
 -   * Test no max version configured.
 -   * 
 -   * @throws IOException
 -   */
 -  @Deprecated
 -  @Test
 -  public void testNoMaxVersion() throws IOException {
 -    Job job = new Job();
 -    assertEquals(-1, AccumuloInputFormat.getMaxVersions(job.getConfiguration()));
 -  }
 -
 -  /**
     * Check that the iterator configuration is getting stored in the Job conf correctly.
     * 
     * @throws IOException
     */
    @Test
    public void testSetIterator() throws IOException {
 +    @SuppressWarnings("deprecation")
      Job job = new Job();
-     
+ 
      IteratorSetting is = new IteratorSetting(1, "WholeRow", "org.apache.accumulo.core.iterators.WholeRowIterator");
      AccumuloInputFormat.addIterator(job, is);
      Configuration conf = job.getConfiguration();
@@@ -97,12 -123,11 +79,12 @@@
      String iterators = conf.get("AccumuloInputFormat.ScanOpts.Iterators");
      assertEquals(new String(Base64.encodeBase64(baos.toByteArray())), iterators);
    }
-   
+ 
    @Test
    public void testAddIterator() throws IOException {
 +    @SuppressWarnings("deprecation")
      Job job = new Job();
-     
+ 
      AccumuloInputFormat.addIterator(job, new IteratorSetting(1, "WholeRow", WholeRowIterator.class));
      AccumuloInputFormat.addIterator(job, new IteratorSetting(2, "Versions", "org.apache.accumulo.core.iterators.VersioningIterator"));
      IteratorSetting iter = new IteratorSetting(3, "Count", "org.apache.accumulo.core.iterators.CountingIterator");
@@@ -149,10 -174,9 +131,10 @@@
      String value = "comma,delimited,value";
      IteratorSetting someSetting = new IteratorSetting(1, "iterator", "Iterator.class");
      someSetting.addOption(key, value);
 +    @SuppressWarnings("deprecation")
      Job job = new Job();
      AccumuloInputFormat.addIterator(job, someSetting);
-     
+ 
      List<IteratorSetting> list = AccumuloInputFormat.getIterators(job);
      assertEquals(1, list.size());
      assertEquals(1, list.get(0).getOptions().size());
@@@ -178,9 -202,8 +160,9 @@@
     */
    @Test
    public void testGetIteratorSettings() throws IOException {
 +    @SuppressWarnings("deprecation")
      Job job = new Job();
-     
+ 
      AccumuloInputFormat.addIterator(job, new IteratorSetting(1, "WholeRow", "org.apache.accumulo.core.iterators.WholeRowIterator"));
      AccumuloInputFormat.addIterator(job, new IteratorSetting(2, "Versions", "org.apache.accumulo.core.iterators.VersioningIterator"));
      AccumuloInputFormat.addIterator(job, new IteratorSetting(3, "Count", "org.apache.accumulo.core.iterators.CountingIterator"));
@@@ -205,16 -228,15 +187,16 @@@
      assertEquals(3, setting.getPriority());
      assertEquals("org.apache.accumulo.core.iterators.CountingIterator", setting.getIteratorClass());
      assertEquals("Count", setting.getName());
-     
+ 
    }
-   
+ 
    @Test
    public void testSetRegex() throws IOException {
 +    @SuppressWarnings("deprecation")
      Job job = new Job();
-     
+ 
      String regex = ">\"*%<>\'\\";
-     
+ 
      IteratorSetting is = new IteratorSetting(50, regex, RegExFilter.class);
      RegExFilter.setRegexs(is, regex, null, null, null, false);
      AccumuloInputFormat.addIterator(job, is);
@@@ -264,17 -286,19 +246,22 @@@
        String user = args[0];
        String pass = args[1];
        String table = args[2];
 +
+       String instanceName = args[3];
+       String inputFormatClassName = args[4];
++      @SuppressWarnings({"rawtypes", "unchecked"})
+       Class<? extends InputFormat> inputFormatClass = (Class<? extends InputFormat>) Class.forName(inputFormatClassName);
+ 
 +      @SuppressWarnings("deprecation")
        Job job = new Job(getConf(), this.getClass().getSimpleName() + "_" + System.currentTimeMillis());
        job.setJarByClass(this.getClass());
-       
-       job.setInputFormatClass(AccumuloInputFormat.class);
-       
+ 
+       job.setInputFormatClass(inputFormatClass);
+ 
        AccumuloInputFormat.setConnectorInfo(job, user, new PasswordToken(pass));
        AccumuloInputFormat.setInputTableName(job, table);
-       AccumuloInputFormat.setMockInstance(job, INSTANCE_NAME);
-       
+       AccumuloInputFormat.setMockInstance(job, instanceName);
+ 
        job.setMapperClass(TestMapper.class);
        job.setMapOutputKeyClass(Key.class);
        job.setMapOutputValueClass(Value.class);
@@@ -334,21 -360,21 +323,21 @@@
      AccumuloInputFormat.setLocalIterators(job, localIters);
      AccumuloInputFormat.fetchColumns(job, fetchColumns);
      AccumuloInputFormat.setLogLevel(job, level);
-     
+ 
      AccumuloInputFormat aif = new AccumuloInputFormat();
-     
+ 
      List<InputSplit> splits = aif.getSplits(job);
-     
+ 
      Assert.assertEquals(1, splits.size());
-     
+ 
      InputSplit split = splits.get(0);
-     
+ 
      Assert.assertEquals(RangeInputSplit.class, split.getClass());
-     
+ 
      RangeInputSplit risplit = (RangeInputSplit) split;
-     
+ 
      Assert.assertEquals(username, risplit.getPrincipal());
 -    Assert.assertEquals(table, risplit.getTable());
 +    Assert.assertEquals(table, risplit.getTableName());
      Assert.assertEquals(password, risplit.getToken());
      Assert.assertEquals(auths, risplit.getAuths());
      Assert.assertEquals(instance, risplit.getInstanceName());
@@@ -357,22 -383,22 +346,7 @@@
      Assert.assertEquals(fetchColumns, risplit.getFetchedColumns());
      Assert.assertEquals(level, risplit.getLogLevel());
    }
-   
-   static class TestMapper extends Mapper<Key,Value,Key,Value> {
-     Key key = null;
-     int count = 0;
- 
-     @Override
-     protected void map(Key k, Value v, Context context) throws IOException, InterruptedException {
-       if (key != null)
-         assertEquals(key.getRow().toString(), new String(v.get()));
-       assertEquals(k.getRow(), new Text(String.format("%09x", count + 1)));
-       assertEquals(new String(v.get()), String.format("%09x", count));
-       key = new Key(k);
-       count++;
-     }
-   }
  
 -  static class TestMapper extends Mapper<Key,Value,Key,Value> {
 -    Key key = null;
 -    int count = 0;
 -
 -    @Override
 -    protected void map(Key k, Value v, Context context) throws IOException, InterruptedException {
 -      if (key != null)
 -        assertEquals(key.getRow().toString(), new String(v.get()));
 -      assertEquals(k.getRow(), new Text(String.format("%09x", count + 1)));
 -      assertEquals(new String(v.get()), String.format("%09x", count));
 -      key = new Key(k);
 -      count++;
 -    }
 -  }
 -
    @Test
    public void testPartialInputSplitDelegationToConfiguration() throws Exception {
      String user = "testPartialInputSplitUser";
@@@ -389,289 -415,13 +363,12 @@@
      }
      bw.close();
  
-     Job job = Job.getInstance();
-     job.setInputFormatClass(AccumuloInputFormat.class);
-     job.setMapperClass(TestMapper.class);
-     job.setNumReduceTasks(0);
-     AccumuloInputFormat.setConnectorInfo(job, user, password);
-     AccumuloInputFormat.setInputTableName(job, "testtable");
-     AccumuloInputFormat.setScanAuthorizations(job, new Authorizations());
-     AccumuloInputFormat.setMockInstance(job, "testPartialInputSplitDelegationToConfiguration");
- 
-     AccumuloInputFormat input = new AccumuloInputFormat();
-     List<InputSplit> splits = input.getSplits(job);
-     assertEquals(splits.size(), 1);
- 
-     TestMapper mapper = (TestMapper) job.getMapperClass().newInstance();
-     
-     RangeInputSplit emptySplit = new RangeInputSplit();
-     emptySplit.setTableName("testtable");
-     emptySplit.setTableId(c.tableOperations().tableIdMap().get("testtable"));
-     
-     // Using an empty split should fall back to the information in the Job's Configuration
-     TaskAttemptID id = new TaskAttemptID();
-     TaskAttemptContext attempt = new TaskAttemptContextImpl(job.getConfiguration(), id);
-     RecordReader<Key,Value> reader = input.createRecordReader(emptySplit, attempt);
- 
-     reader.initialize(emptySplit, attempt);
-     Context nullContext = mapper.new Context() {
- 
-       @Override
-       public InputSplit getInputSplit() {
-         return null;
-       }
- 
-       @Override
-       public boolean nextKeyValue() throws IOException, InterruptedException {
-         return false;
-       }
- 
-       @Override
-       public Key getCurrentKey() throws IOException, InterruptedException {
-         return null;
-       }
- 
-       @Override
-       public Value getCurrentValue() throws IOException, InterruptedException {
-         return null;
-       }
- 
-       @Override
-       public void write(Key key, Value value) throws IOException, InterruptedException {
-         
-       }
- 
-       @Override
-       public OutputCommitter getOutputCommitter() {
-         return null;
-       }
- 
-       @Override
-       public TaskAttemptID getTaskAttemptID() {
-         return null;
-       }
- 
-       @Override
-       public void setStatus(String msg) {
-         
-       }
- 
-       @Override
-       public String getStatus() {
-         return null;
-       }
- 
-       @Override
-       public float getProgress() {
-         return 0;
-       }
- 
-       @Override
-       public Counter getCounter(Enum<?> counterName) {
-         return null;
-       }
- 
-       @Override
-       public Counter getCounter(String groupName, String counterName) {
-         return null;
-       }
- 
-       @Override
-       public Configuration getConfiguration() {
-         return null;
-       }
- 
-       @Override
-       public Credentials getCredentials() {
-         return null;
-       }
- 
-       @Override
-       public JobID getJobID() {
-         return null;
-       }
- 
-       @Override
-       public int getNumReduceTasks() {
-         return 0;
-       }
- 
-       @Override
-       public Path getWorkingDirectory() throws IOException {
-         return null;
-       }
- 
-       @Override
-       public Class<?> getOutputKeyClass() {
-         return null;
-       }
- 
-       @Override
-       public Class<?> getOutputValueClass() {
-         return null;
-       }
- 
-       @Override
-       public Class<?> getMapOutputKeyClass() {
-         return null;
-       }
- 
-       @Override
-       public Class<?> getMapOutputValueClass() {
-         return null;
-       }
- 
-       @Override
-       public String getJobName() {
-         return null;
-       }
- 
-       @Override
-       public Class<? extends InputFormat<?,?>> getInputFormatClass() throws ClassNotFoundException {
-         return null;
-       }
- 
-       @Override
-       public Class<? extends Mapper<?,?,?,?>> getMapperClass() throws ClassNotFoundException {
-         return null;
-       }
- 
-       @Override
-       public Class<? extends Reducer<?,?,?,?>> getCombinerClass() throws ClassNotFoundException {
-         return null;
-       }
- 
-       @Override
-       public Class<? extends Reducer<?,?,?,?>> getReducerClass() throws ClassNotFoundException {
-         return null;
-       }
- 
-       @Override
-       public Class<? extends OutputFormat<?,?>> getOutputFormatClass() throws ClassNotFoundException {
-         return null;
-       }
- 
-       @Override
-       public Class<? extends Partitioner<?,?>> getPartitionerClass() throws ClassNotFoundException {
-         return null;
-       }
- 
-       @Override
-       public RawComparator<?> getSortComparator() {
-         return null;
-       }
- 
-       @Override
-       public String getJar() {
-         return null;
-       }
- 
-       @Override
-       public RawComparator<?> getGroupingComparator() {
-         return null;
-       }
- 
-       @Override
-       public boolean getJobSetupCleanupNeeded() {
-         return false;
-       }
- 
-       @Override
-       public boolean getTaskCleanupNeeded() {
-         return false;
-       }
- 
-       @Override
-       public boolean getProfileEnabled() {
-         return false;
-       }
- 
-       @Override
-       public String getProfileParams() {
-         return null;
-       }
- 
-       @Override
-       public IntegerRanges getProfileTaskRange(boolean isMap) {
-         return null;
-       }
- 
-       @Override
-       public String getUser() {
-         return null;
-       }
- 
-       @Override
-       public boolean getSymlink() {
-         return false;
-       }
- 
-       @Override
-       public Path[] getArchiveClassPaths() {
-         return null;
-       }
- 
-       @Override
-       public URI[] getCacheArchives() throws IOException {
-         return null;
-       }
- 
-       @Override
-       public URI[] getCacheFiles() throws IOException {
-         return null;
-       }
- 
-       @Override
-       public Path[] getLocalCacheArchives() throws IOException {
-         return null;
-       }
- 
-       @Override
-       public Path[] getLocalCacheFiles() throws IOException {
-         return null;
-       }
- 
-       @Override
-       public Path[] getFileClassPaths() {
-         return null;
-       }
- 
-       @Override
-       public String[] getArchiveTimestamps() {
-         return null;
-       }
- 
-       @Override
-       public String[] getFileTimestamps() {
-         return null;
-       }
- 
-       @Override
-       public int getMaxMapAttempts() {
-         return 0;
-       }
- 
-       @Override
-       public int getMaxReduceAttempts() {
-         return 0;
-       }
- 
-       @Override
-       public void progress() {
-         
-       }
- 
-     };
-     
-     while (reader.nextKeyValue()) {
-       Key key = reader.getCurrentKey();
-       Value value = reader.getCurrentValue();
-       
-       mapper.map(key, value, nullContext);
-     }
-   }
+     Assert.assertEquals(0, MRTester.main(new String[] {user, "", "testtable", "testPartialInputSplitDelegationToConfiguration",
+         EmptySplitsAccumuloInputFormat.class.getCanonicalName()}));
+     assertNull(e1);
 -    assertNull(e2);
 -  }
++    assertNull(e2);  }
  
-   @Test(expected = IOException.class)
+   @Test
    public void testPartialFailedInputSplitDelegationToConfiguration() throws Exception {
      String user = "testPartialFailedInputSplit";
      PasswordToken password = new PasswordToken("");

http://git-wip-us.apache.org/repos/asf/accumulo/blob/0f3f93fd/core/src/test/java/org/apache/accumulo/core/client/mapreduce/EmptySplitsAccumuloInputFormat.java
----------------------------------------------------------------------
diff --cc core/src/test/java/org/apache/accumulo/core/client/mapreduce/EmptySplitsAccumuloInputFormat.java
index 0000000,91db378..7130f24
mode 000000,100644..100644
--- a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/EmptySplitsAccumuloInputFormat.java
+++ b/core/src/test/java/org/apache/accumulo/core/client/mapreduce/EmptySplitsAccumuloInputFormat.java
@@@ -1,0 -1,21 +1,29 @@@
+ package org.apache.accumulo.core.client.mapreduce;
+ 
+ import java.io.IOException;
 -import java.util.Arrays;
++import java.util.ArrayList;
+ import java.util.List;
+ 
+ import org.apache.hadoop.mapreduce.InputSplit;
+ import org.apache.hadoop.mapreduce.JobContext;
+ 
+ /**
+  * AccumuloInputFormat which returns an "empty" RangeInputSplit
+  */
+ public class EmptySplitsAccumuloInputFormat extends AccumuloInputFormat {
+   
+   @Override
+   public List<InputSplit> getSplits(JobContext context) throws IOException {
 -    super.getSplits(context);
++    List<InputSplit> oldSplits = super.getSplits(context);
++    List<InputSplit> newSplits = new ArrayList<InputSplit>(oldSplits.size());
+     
 -    return Arrays.<InputSplit> asList(new RangeInputSplit());
++    // Copy only the necessary information
++    for (InputSplit oldSplit : oldSplits) {
++      RangeInputSplit newSplit = new RangeInputSplit((RangeInputSplit) oldSplit);
++      newSplits.add(newSplit);
++    }
++    
++    
++    return newSplits;
+   }
+ }