You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by ac...@apache.org on 2010/05/20 07:44:28 UTC
svn commit: r946533 [5/7] - in /hadoop/mapreduce/branches/branch-0.21: ./
conf/ src/c++/ src/contrib/ src/contrib/block_forensics/
src/contrib/capacity-scheduler/ src/contrib/data_join/
src/contrib/dynamic-scheduler/ src/contrib/eclipse-plugin/ src/con...
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/chain/Chain.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/chain/Chain.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/chain/Chain.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/chain/Chain.java Thu May 20 05:44:18 2010
@@ -22,6 +22,8 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.DefaultStringifier;
import org.apache.hadoop.io.Stringifier;
@@ -43,6 +45,8 @@ import org.apache.hadoop.util.Reflection
* The Chain class provides all the common functionality for the
* {@link ChainMapper} and the {@link ChainReducer} classes.
*/
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
public class Chain {
protected static final String CHAIN_MAPPER = "mapreduce.chain.mapper";
protected static final String CHAIN_REDUCER = "mapreduce.chain.reducer";
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/chain/ChainMapper.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/chain/ChainMapper.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/chain/ChainMapper.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/chain/ChainMapper.java Thu May 20 05:44:18 2010
@@ -19,6 +19,8 @@ package org.apache.hadoop.mapreduce.lib.
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
@@ -77,6 +79,8 @@ import org.apache.hadoop.mapreduce.lib.c
* ...
* </pre>
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class ChainMapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT> extends
Mapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT> {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/chain/ChainReducer.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/chain/ChainReducer.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/chain/ChainReducer.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/chain/ChainReducer.java Thu May 20 05:44:18 2010
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.mapreduce.lib.chain;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
@@ -81,6 +83,8 @@ import java.io.IOException;
* ...
* </pre>
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class ChainReducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT> extends
Reducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT> {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/BigDecimalSplitter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/BigDecimalSplitter.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/BigDecimalSplitter.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/BigDecimalSplitter.java Thu May 20 05:44:18 2010
@@ -27,6 +27,8 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.MRJobConfig;
@@ -34,6 +36,8 @@ import org.apache.hadoop.mapreduce.MRJob
/**
* Implement DBSplitter over BigDecimal values.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class BigDecimalSplitter implements DBSplitter {
private static final Log LOG = LogFactory.getLog(BigDecimalSplitter.class);
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/BooleanSplitter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/BooleanSplitter.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/BooleanSplitter.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/BooleanSplitter.java Thu May 20 05:44:18 2010
@@ -23,12 +23,16 @@ import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.InputSplit;
/**
* Implement DBSplitter over boolean values.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class BooleanSplitter implements DBSplitter {
public List<InputSplit> split(Configuration conf, ResultSet results, String colName)
throws SQLException {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBConfiguration.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBConfiguration.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBConfiguration.java Thu May 20 05:44:18 2010
@@ -22,6 +22,8 @@ import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat.NullDBWritable;
@@ -39,6 +41,8 @@ import org.apache.hadoop.mapreduce.lib.d
* @see DBInputFormat#setInput(Job, Class, String, String, String, String...)
* @see DBOutputFormat#setOutput(Job, String, String...)
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class DBConfiguration {
/** The JDBC Driver class name */
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java Thu May 20 05:44:18 2010
@@ -40,6 +40,8 @@ import org.apache.hadoop.mapreduce.MRJob
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
/**
@@ -51,6 +53,8 @@ import org.apache.hadoop.conf.Configurat
* The SQL query, and input class can be using one of the two
* setInput methods.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class DBInputFormat<T extends DBWritable>
extends InputFormat<LongWritable, T> implements Configurable {
@@ -59,6 +63,7 @@ public class DBInputFormat<T extends DBW
/**
* A Class that does nothing, implementing DBWritable
*/
+ @InterfaceStability.Evolving
public static class NullDBWritable implements DBWritable, Writable {
@Override
public void readFields(DataInput in) throws IOException { }
@@ -73,6 +78,7 @@ public class DBInputFormat<T extends DBW
/**
* A InputSplit that spans a set of rows
*/
+ @InterfaceStability.Evolving
public static class DBInputSplit extends InputSplit implements Writable {
private long end = 0;
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBOutputFormat.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBOutputFormat.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBOutputFormat.java Thu May 20 05:44:18 2010
@@ -25,6 +25,8 @@ import java.sql.SQLException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.OutputCommitter;
@@ -43,6 +45,8 @@ import org.apache.hadoop.util.StringUtil
* writes <b>only the key</b> to the database with a batch SQL query.
*
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class DBOutputFormat<K extends DBWritable, V>
extends OutputFormat<K,V> {
@@ -59,6 +63,7 @@ extends OutputFormat<K,V> {
/**
* A RecordWriter that writes the reduce output to a SQL table
*/
+ @InterfaceStability.Evolving
public class DBRecordWriter
extends RecordWriter<K, V> {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBRecordReader.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBRecordReader.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBRecordReader.java Thu May 20 05:44:18 2010
@@ -41,6 +41,8 @@ import org.apache.hadoop.mapreduce.JobCo
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
@@ -49,6 +51,8 @@ import org.apache.hadoop.conf.Configurat
* Emits LongWritables containing the record number as
* key and DBWritables as value.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class DBRecordReader<T extends DBWritable> extends
RecordReader<LongWritable, T> {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBSplitter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBSplitter.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBSplitter.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBSplitter.java Thu May 20 05:44:18 2010
@@ -22,6 +22,8 @@ import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.InputSplit;
@@ -33,6 +35,8 @@ import org.apache.hadoop.mapreduce.Input
* DBSplitter implementations should perform this for a data type or family
* of data types.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public interface DBSplitter {
/**
* Given a ResultSet containing one record (and already advanced to that record)
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBWritable.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBWritable.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBWritable.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DBWritable.java Thu May 20 05:44:18 2010
@@ -21,6 +21,8 @@ import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.Writable;
/**
@@ -73,6 +75,8 @@ import org.apache.hadoop.io.Writable;
* }
* </pre></p>
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public interface DBWritable {
/**
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DataDrivenDBInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DataDrivenDBInputFormat.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DataDrivenDBInputFormat.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DataDrivenDBInputFormat.java Thu May 20 05:44:18 2010
@@ -45,6 +45,8 @@ import org.apache.hadoop.mapreduce.MRJob
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
@@ -54,6 +56,8 @@ import org.apache.hadoop.conf.Configurat
* splits, it tries to generate WHERE clauses which separate the data into roughly
* equivalent shards.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class DataDrivenDBInputFormat<T extends DBWritable>
extends DBInputFormat<T> implements Configurable {
@@ -67,6 +71,7 @@ public class DataDrivenDBInputFormat<T e
/**
* A InputSplit that spans a set of rows
*/
+ @InterfaceStability.Evolving
public static class DataDrivenDBInputSplit extends DBInputFormat.DBInputSplit {
private String lowerBoundClause;
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DataDrivenDBRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DataDrivenDBRecordReader.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DataDrivenDBRecordReader.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DataDrivenDBRecordReader.java Thu May 20 05:44:18 2010
@@ -41,6 +41,8 @@ import org.apache.hadoop.mapreduce.JobCo
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
@@ -50,6 +52,8 @@ import org.apache.hadoop.conf.Configurat
* Emits LongWritables containing the record number as
* key and DBWritables as value.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class DataDrivenDBRecordReader<T extends DBWritable> extends DBRecordReader<T> {
private static final Log LOG = LogFactory.getLog(DataDrivenDBRecordReader.class);
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DateSplitter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DateSplitter.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DateSplitter.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/DateSplitter.java Thu May 20 05:44:18 2010
@@ -30,6 +30,8 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.MRJobConfig;
@@ -39,6 +41,8 @@ import org.apache.hadoop.mapreduce.MRJob
* Make use of logic from IntegerSplitter, since date/time are just longs
* in Java.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class DateSplitter extends IntegerSplitter {
private static final Log LOG = LogFactory.getLog(DateSplitter.class);
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/FloatSplitter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/FloatSplitter.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/FloatSplitter.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/FloatSplitter.java Thu May 20 05:44:18 2010
@@ -26,6 +26,8 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.MRJobConfig;
@@ -33,6 +35,8 @@ import org.apache.hadoop.mapreduce.MRJob
/**
* Implement DBSplitter over floating-point values.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class FloatSplitter implements DBSplitter {
private static final Log LOG = LogFactory.getLog(FloatSplitter.class);
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/IntegerSplitter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/IntegerSplitter.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/IntegerSplitter.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/IntegerSplitter.java Thu May 20 05:44:18 2010
@@ -23,6 +23,8 @@ import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.MRJobConfig;
@@ -30,6 +32,8 @@ import org.apache.hadoop.mapreduce.MRJob
/**
* Implement DBSplitter over integer values.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class IntegerSplitter implements DBSplitter {
public List<InputSplit> split(Configuration conf, ResultSet results, String colName)
throws SQLException {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/MySQLDBRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/MySQLDBRecordReader.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/MySQLDBRecordReader.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/MySQLDBRecordReader.java Thu May 20 05:44:18 2010
@@ -23,11 +23,15 @@ import java.sql.ResultSet;
import java.sql.PreparedStatement;
import java.sql.SQLException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
/**
* A RecordReader that reads records from a MySQL table.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class MySQLDBRecordReader<T extends DBWritable> extends DBRecordReader<T> {
public MySQLDBRecordReader(DBInputFormat.DBInputSplit split,
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/MySQLDataDrivenDBRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/MySQLDataDrivenDBRecordReader.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/MySQLDataDrivenDBRecordReader.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/MySQLDataDrivenDBRecordReader.java Thu May 20 05:44:18 2010
@@ -23,11 +23,15 @@ import java.sql.ResultSet;
import java.sql.PreparedStatement;
import java.sql.SQLException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
/**
* A RecordReader that reads records from a MySQL table via DataDrivenDBRecordReader
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class MySQLDataDrivenDBRecordReader<T extends DBWritable>
extends DataDrivenDBRecordReader<T> {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/OracleDBRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/OracleDBRecordReader.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/OracleDBRecordReader.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/OracleDBRecordReader.java Thu May 20 05:44:18 2010
@@ -23,6 +23,8 @@ import java.sql.Connection;
import java.sql.SQLException;
import java.lang.reflect.Method;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -30,6 +32,8 @@ import org.apache.commons.logging.LogFac
/**
* A RecordReader that reads records from an Oracle SQL table.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class OracleDBRecordReader<T extends DBWritable> extends DBRecordReader<T> {
/** Configuration key to set to a timezone string. */
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/OracleDataDrivenDBInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/OracleDataDrivenDBInputFormat.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/OracleDataDrivenDBInputFormat.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/OracleDataDrivenDBInputFormat.java Thu May 20 05:44:18 2010
@@ -44,12 +44,16 @@ import org.apache.hadoop.mapreduce.JobCo
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
/**
* A InputFormat that reads input data from an SQL table in an Oracle db.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class OracleDataDrivenDBInputFormat<T extends DBWritable>
extends DataDrivenDBInputFormat<T> implements Configurable {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/OracleDataDrivenDBRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/OracleDataDrivenDBRecordReader.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/OracleDataDrivenDBRecordReader.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/OracleDataDrivenDBRecordReader.java Thu May 20 05:44:18 2010
@@ -23,11 +23,15 @@ import java.sql.ResultSet;
import java.sql.PreparedStatement;
import java.sql.SQLException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
/**
* A RecordReader that reads records from a Oracle table via DataDrivenDBRecordReader
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class OracleDataDrivenDBRecordReader<T extends DBWritable>
extends DataDrivenDBRecordReader<T> {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/OracleDateSplitter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/OracleDateSplitter.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/OracleDateSplitter.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/OracleDateSplitter.java Thu May 20 05:44:18 2010
@@ -20,12 +20,17 @@ package org.apache.hadoop.mapreduce.lib.
import java.util.Date;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
/**
* Implement DBSplitter over date/time values returned by an Oracle db.
* Make use of logic from DateSplitter, since this just needs to use
* some Oracle-specific functions on the formatting end when generating
* InputSplits.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class OracleDateSplitter extends DateSplitter {
@SuppressWarnings("unchecked")
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/TextSplitter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/TextSplitter.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/TextSplitter.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/db/TextSplitter.java Thu May 20 05:44:18 2010
@@ -28,6 +28,8 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.MRJobConfig;
@@ -35,6 +37,8 @@ import org.apache.hadoop.mapreduce.MRJob
/**
* Implement DBSplitter over text strings.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class TextSplitter extends BigDecimalSplitter {
private static final Log LOG = LogFactory.getLog(TextSplitter.class);
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionHelper.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionHelper.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionHelper.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionHelper.java Thu May 20 05:44:18 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
import java.util.List;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.Text;
/**
@@ -53,6 +55,8 @@ import org.apache.hadoop.io.Text;
* the key is never ignored.
*
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class FieldSelectionHelper {
public static Text emptyText = new Text("");
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionMapper.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionMapper.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionMapper.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionMapper.java Thu May 20 05:44:18 2010
@@ -24,6 +24,8 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
@@ -54,6 +56,8 @@ import org.apache.hadoop.mapreduce.lib.i
* Here is an example: "4,3,0,1:6,5,1-3,7-". It specifies to use fields
* 4,3,0 and 1 for keys, and use fields 6,5,1,2,3,7 and above for values.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class FieldSelectionMapper<K, V>
extends Mapper<K, V, Text, Text> {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionReducer.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionReducer.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionReducer.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionReducer.java Thu May 20 05:44:18 2010
@@ -24,6 +24,8 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
@@ -53,6 +55,8 @@ import org.apache.hadoop.mapreduce.Reduc
* Here is an example: "4,3,0,1:6,5,1-3,7-". It specifies to use fields
* 4,3,0 and 1 for keys, and use fields 6,5,1,2,3,7 and above for values.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class FieldSelectionReducer<K, V>
extends Reducer<Text, Text, Text, Text> {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java Thu May 20 05:44:18 2010
@@ -28,6 +28,8 @@ import java.util.Set;
import java.util.Iterator;
import java.util.Map;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
@@ -66,6 +68,8 @@ import org.apache.hadoop.net.NetworkTopo
*
* @see CombineFileSplit
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public abstract class CombineFileInputFormat<K, V>
extends FileInputFormat<K, V> {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java Thu May 20 05:44:18 2010
@@ -24,6 +24,8 @@ import java.lang.reflect.*;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.mapreduce.*;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
/**
@@ -34,7 +36,8 @@ import org.apache.hadoop.conf.Configurat
* these data chunks from different files.
* @see CombineFileSplit
*/
-
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class CombineFileRecordReader<K, V> extends RecordReader<K, V> {
static final Class [] constructorSignature = new Class []
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileSplit.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileSplit.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileSplit.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileSplit.java Thu May 20 05:44:18 2010
@@ -22,6 +22,8 @@ import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
@@ -41,6 +43,8 @@ import org.apache.hadoop.mapreduce.Recor
* @see FileSplit
* @see CombineFileInputFormat
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class CombineFileSplit extends InputSplit implements Writable {
private Path[] paths;
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingInputFormat.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingInputFormat.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingInputFormat.java Thu May 20 05:44:18 2010
@@ -26,6 +26,8 @@ import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.InputFormat;
@@ -43,6 +45,8 @@ import org.apache.hadoop.util.Reflection
*
* @see MultipleInputs#addInputPath(Job, Path, Class, Class)
*/
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
public class DelegatingInputFormat<K, V> extends InputFormat<K, V> {
@SuppressWarnings("unchecked")
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingMapper.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingMapper.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingMapper.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingMapper.java Thu May 20 05:44:18 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.util.ReflectionUtils;
@@ -30,6 +32,8 @@ import org.apache.hadoop.util.Reflection
*
* @see MultipleInputs#addInputPath(Job, Path, Class, Class)
*/
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
public class DelegatingMapper<K1, V1, K2, V2> extends Mapper<K1, V1, K2, V2> {
private Mapper<K1, V1, K2, V2> mapper;
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingRecordReader.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingRecordReader.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingRecordReader.java Thu May 20 05:44:18 2010
@@ -19,6 +19,8 @@ package org.apache.hadoop.mapreduce.lib.
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
@@ -29,6 +31,8 @@ import org.apache.hadoop.util.Reflection
* This is a delegating RecordReader, which delegates the functionality to the
* underlying record reader in {@link TaggedInputSplit}
*/
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
public class DelegatingRecordReader<K, V> extends RecordReader<K, V> {
RecordReader<K, V> originalRR;
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java Thu May 20 05:44:18 2010
@@ -24,6 +24,8 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -49,6 +51,8 @@ import org.apache.hadoop.util.StringUtil
* {@link #isSplitable(JobContext, Path)} method to ensure input-files are
* not split-up and are processed as a whole by {@link Mapper}s.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public abstract class FileInputFormat<K, V> extends InputFormat<K, V> {
public static final String COUNTER_GROUP =
"FileInputFormatCounters";
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/FileSplit.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/FileSplit.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/FileSplit.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/FileSplit.java Thu May 20 05:44:18 2010
@@ -25,6 +25,8 @@ import java.io.DataOutput;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
@@ -32,6 +34,8 @@ import org.apache.hadoop.io.Writable;
/** A section of an input file. Returned by {@link
* InputFormat#getSplits(JobContext)} and passed to
* {@link InputFormat#createRecordReader(InputSplit,TaskAttemptContext)}. */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class FileSplit extends InputSplit implements Writable {
private Path file;
private long start;
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/InvalidInputException.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/InvalidInputException.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/InvalidInputException.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/InvalidInputException.java Thu May 20 05:44:18 2010
@@ -21,11 +21,16 @@ import java.io.IOException;
import java.util.List;
import java.util.Iterator;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
/**
* This class wraps a list of problems with the input, so that the user
* can get a list of problems together instead of finding and fixing them one
* by one.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class InvalidInputException extends IOException {
private static final long serialVersionUID = -380668190578456802L;
private List<IOException> problems;
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/KeyValueLineRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/KeyValueLineRecordReader.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/KeyValueLineRecordReader.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/KeyValueLineRecordReader.java Thu May 20 05:44:18 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputSplit;
@@ -32,6 +34,8 @@ import org.apache.hadoop.mapreduce.TaskA
* under the attribute name mapreduce.input.keyvaluelinerecordreader.key.value.separator. The default
* separator is the tab character ('\t').
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class KeyValueLineRecordReader extends RecordReader<Text, Text> {
public static final String KEY_VALUE_SEPERATOR =
"mapreduce.input.keyvaluelinerecordreader.key.value.separator";
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/KeyValueTextInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/KeyValueTextInputFormat.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/KeyValueTextInputFormat.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/KeyValueTextInputFormat.java Thu May 20 05:44:18 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.CompressionCodec;
@@ -37,6 +39,8 @@ import org.apache.hadoop.mapreduce.TaskA
* Each line is divided into key and value parts by a separator byte. If no
* such a byte exists, the key will be the entire line and value will be empty.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class KeyValueTextInputFormat extends FileInputFormat<Text, Text> {
@Override
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/LineRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/LineRecordReader.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/LineRecordReader.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/LineRecordReader.java Thu May 20 05:44:18 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -46,6 +48,8 @@ import org.apache.hadoop.fs.Seekable;
/**
* Treats keys as offset in file and value as line.
*/
+@InterfaceAudience.LimitedPrivate({"MapReduce", "Pig"})
+@InterfaceStability.Evolving
public class LineRecordReader extends RecordReader<LongWritable, Text> {
private static final Log LOG = LogFactory.getLog(LineRecordReader.class);
public static final String MAX_LINE_LENGTH =
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/MultipleInputs.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/MultipleInputs.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/MultipleInputs.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/MultipleInputs.java Thu May 20 05:44:18 2010
@@ -21,6 +21,8 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.InputFormat;
@@ -33,6 +35,8 @@ import org.apache.hadoop.util.Reflection
* This class supports MapReduce jobs that have multiple input paths with
* a different {@link InputFormat} and {@link Mapper} for each path
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class MultipleInputs {
public static final String DIR_FORMATS =
"mapreduce.input.multipleinputs.dir.formats";
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/NLineInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/NLineInputFormat.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/NLineInputFormat.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/NLineInputFormat.java Thu May 20 05:44:18 2010
@@ -22,6 +22,8 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
@@ -54,7 +56,8 @@ import org.apache.hadoop.util.LineReader
* i.e. (k,v) is (LongWritable, Text).
* The location hints will span the whole mapred cluster.
*/
-
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class NLineInputFormat extends FileInputFormat<LongWritable, Text> {
public static final String LINES_PER_MAP =
"mapreduce.input.lineinputformat.linespermap";
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsBinaryInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsBinaryInputFormat.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsBinaryInputFormat.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsBinaryInputFormat.java Thu May 20 05:44:18 2010
@@ -19,6 +19,8 @@ package org.apache.hadoop.mapreduce.lib.
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -33,6 +35,8 @@ import org.apache.hadoop.mapreduce.TaskA
* InputFormat reading keys, values from SequenceFiles in binary (raw)
* format.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class SequenceFileAsBinaryInputFormat
extends SequenceFileInputFormat<BytesWritable,BytesWritable> {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsTextInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsTextInputFormat.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsTextInputFormat.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsTextInputFormat.java Thu May 20 05:44:18 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
@@ -30,6 +32,8 @@ import org.apache.hadoop.mapreduce.TaskA
* SequenceFileAsTextRecordReader which converts the input keys and values
* to their String forms by calling toString() method.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class SequenceFileAsTextInputFormat
extends SequenceFileInputFormat<Text, Text> {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsTextRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsTextRecordReader.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsTextRecordReader.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsTextRecordReader.java Thu May 20 05:44:18 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
@@ -32,6 +34,8 @@ import org.apache.hadoop.mapreduce.TaskA
* calling toString() method. This class to SequenceFileAsTextInputFormat
* class is as LineRecordReader class to TextInputFormat class.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class SequenceFileAsTextRecordReader
extends RecordReader<Text, Text> {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileInputFilter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileInputFilter.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileInputFilter.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileInputFilter.java Thu May 20 05:44:18 2010
@@ -28,6 +28,8 @@ import java.util.regex.PatternSyntaxExce
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.BytesWritable;
@@ -42,6 +44,8 @@ import org.apache.hadoop.util.Reflection
* A class that allows a map/red job to work on a sample of sequence files.
* The sample is decided by the filter class set by the job.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class SequenceFileInputFilter<K, V>
extends SequenceFileInputFormat<K, V> {
public static final Log LOG = LogFactory.getLog(FileInputFormat.class);
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileInputFormat.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileInputFormat.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileInputFormat.java Thu May 20 05:44:18 2010
@@ -21,6 +21,8 @@ package org.apache.hadoop.mapreduce.lib.
import java.io.IOException;
import java.util.List;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -34,6 +36,8 @@ import org.apache.hadoop.mapreduce.Recor
import org.apache.hadoop.mapreduce.TaskAttemptContext;
/** An {@link InputFormat} for {@link SequenceFile}s. */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class SequenceFileInputFormat<K, V> extends FileInputFormat<K, V> {
@Override
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileRecordReader.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileRecordReader.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileRecordReader.java Thu May 20 05:44:18 2010
@@ -21,6 +21,8 @@ package org.apache.hadoop.mapreduce.lib.
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -32,6 +34,8 @@ import org.apache.hadoop.mapreduce.Recor
import org.apache.hadoop.mapreduce.TaskAttemptContext;
/** An {@link RecordReader} for {@link SequenceFile}s. */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class SequenceFileRecordReader<K, V> extends RecordReader<K, V> {
private SequenceFile.Reader in;
private long start;
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/TextInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/TextInputFormat.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/TextInputFormat.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/input/TextInputFormat.java Thu May 20 05:44:18 2010
@@ -18,6 +18,8 @@
package org.apache.hadoop.mapreduce.lib.input;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
@@ -33,6 +35,8 @@ import org.apache.hadoop.mapreduce.TaskA
/** An {@link InputFormat} for plain text files. Files are broken into lines.
* Either linefeed or carriage-return are used to signal end of line. Keys are
* the position in the file, and values are the line of text.. */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class TextInputFormat extends FileInputFormat<LongWritable, Text> {
@Override
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/jobcontrol/ControlledJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/jobcontrol/ControlledJob.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/jobcontrol/ControlledJob.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/jobcontrol/ControlledJob.java Thu May 20 05:44:18 2010
@@ -23,6 +23,8 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -42,7 +44,8 @@ import org.apache.hadoop.util.StringUtil
* can get into SUCCESS or FAILED state, depending
* the status of the job execution.
*/
-
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class ControlledJob {
// A job will be in one of the following states
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/jobcontrol/JobControl.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/jobcontrol/JobControl.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/jobcontrol/JobControl.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/jobcontrol/JobControl.java Thu May 20 05:44:18 2010
@@ -25,6 +25,8 @@ import java.util.Hashtable;
import java.util.List;
import java.util.Map;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob.State;
/**
@@ -44,6 +46,8 @@ import org.apache.hadoop.mapreduce.lib.j
* for stopping the thread.
*
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class JobControl implements Runnable {
// The thread can be in one of the following state
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/ArrayListBackedIterator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/ArrayListBackedIterator.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/ArrayListBackedIterator.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/ArrayListBackedIterator.java Thu May 20 05:44:18 2010
@@ -21,6 +21,8 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;
@@ -32,6 +34,8 @@ import org.apache.hadoop.util.Reflection
* added to it, replaying them as requested.
* Prefer {@link StreamBackedIterator}.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class ArrayListBackedIterator<X extends Writable>
implements ResetableIterator<X> {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/ComposableInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/ComposableInputFormat.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/ComposableInputFormat.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/ComposableInputFormat.java Thu May 20 05:44:18 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.InputFormat;
@@ -30,6 +32,8 @@ import org.apache.hadoop.mapreduce.TaskA
* Refinement of InputFormat requiring implementors to provide
* ComposableRecordReader instead of RecordReader.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public abstract class ComposableInputFormat<K extends WritableComparable<?>,
V extends Writable>
extends InputFormat<K,V> {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/ComposableRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/ComposableRecordReader.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/ComposableRecordReader.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/ComposableRecordReader.java Thu May 20 05:44:18 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.RecordReader;
@@ -27,6 +29,8 @@ import org.apache.hadoop.mapreduce.Recor
/**
* Additional operations required of a RecordReader to participate in a join.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public abstract class ComposableRecordReader<K extends WritableComparable<?>,
V extends Writable>
extends RecordReader<K,V>
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeInputFormat.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeInputFormat.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeInputFormat.java Thu May 20 05:44:18 2010
@@ -25,6 +25,8 @@ import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.WritableComparable;
@@ -49,6 +51,8 @@ import org.apache.hadoop.mapreduce.TaskA
* @see MultiFilterRecordReader
*/
@SuppressWarnings("unchecked")
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class CompositeInputFormat<K extends WritableComparable>
extends InputFormat<K, TupleWritable> {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeInputSplit.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeInputSplit.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeInputSplit.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeInputSplit.java Thu May 20 05:44:18 2010
@@ -25,6 +25,8 @@ import java.io.DataOutputStream;
import java.io.IOException;
import java.util.HashSet;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
@@ -37,6 +39,8 @@ import org.apache.hadoop.util.Reflection
* This InputSplit contains a set of child InputSplits. Any InputSplit inserted
* into this collection must have a public default constructor.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class CompositeInputSplit extends InputSplit implements Writable {
private int fill = 0;
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeRecordReader.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeRecordReader.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeRecordReader.java Thu May 20 05:44:18 2010
@@ -23,6 +23,8 @@ import java.util.ArrayList;
import java.util.Comparator;
import java.util.PriorityQueue;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.NullWritable;
@@ -38,6 +40,8 @@ import org.apache.hadoop.util.Reflection
* A RecordReader that can effect joins of RecordReaders sharing a common key
* type and partitioning.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public abstract class CompositeRecordReader<
K extends WritableComparable<?>, // key type
V extends Writable, // accepts RecordReader<K,V> as children
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/InnerJoinRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/InnerJoinRecordReader.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/InnerJoinRecordReader.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/InnerJoinRecordReader.java Thu May 20 05:44:18 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
@@ -27,6 +29,8 @@ import org.apache.hadoop.io.WritableComp
/**
* Full inner join.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class InnerJoinRecordReader<K extends WritableComparable<?>>
extends JoinRecordReader<K> {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/JoinRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/JoinRecordReader.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/JoinRecordReader.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/JoinRecordReader.java Thu May 20 05:44:18 2010
@@ -21,6 +21,8 @@ package org.apache.hadoop.mapreduce.lib.
import java.io.IOException;
import java.util.PriorityQueue;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
@@ -30,6 +32,8 @@ import org.apache.hadoop.util.Reflection
/**
* Base class for Composite joins returning Tuples of arbitrary Writables.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public abstract class JoinRecordReader<K extends WritableComparable<?>>
extends CompositeRecordReader<K,Writable,TupleWritable> {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/MultiFilterRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/MultiFilterRecordReader.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/MultiFilterRecordReader.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/MultiFilterRecordReader.java Thu May 20 05:44:18 2010
@@ -21,6 +21,8 @@ package org.apache.hadoop.mapreduce.lib.
import java.io.IOException;
import java.util.PriorityQueue;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
@@ -33,6 +35,8 @@ import org.apache.hadoop.util.Reflection
* Base class for Composite join returning values derived from multiple
* sources, but generally not tuples.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public abstract class MultiFilterRecordReader<K extends WritableComparable<?>,
V extends Writable>
extends CompositeRecordReader<K,V,V> {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/OuterJoinRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/OuterJoinRecordReader.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/OuterJoinRecordReader.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/OuterJoinRecordReader.java Thu May 20 05:44:18 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
@@ -27,6 +29,8 @@ import org.apache.hadoop.io.WritableComp
/**
* Full outer join.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class OuterJoinRecordReader<K extends WritableComparable<?>>
extends JoinRecordReader<K> {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/OverrideRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/OverrideRecordReader.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/OverrideRecordReader.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/OverrideRecordReader.java Thu May 20 05:44:18 2010
@@ -22,6 +22,8 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.PriorityQueue;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Writable;
@@ -35,6 +37,8 @@ import org.apache.hadoop.util.Reflection
* from S3 over S2, and values from S2 over S1 for all keys
* emitted from all sources.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class OverrideRecordReader<K extends WritableComparable<?>,
V extends Writable>
extends MultiFilterRecordReader<K,V> {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/Parser.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/Parser.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/Parser.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/Parser.java Thu May 20 05:44:18 2010
@@ -32,6 +32,8 @@ import java.util.ListIterator;
import java.util.Map;
import java.util.Stack;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.mapreduce.InputFormat;
@@ -66,13 +68,19 @@ import org.apache.hadoop.util.Reflection
* {@link CompositeRecordReader#combine}) and include a property to map its
* value to an identifier in the parser.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class Parser {
+ @InterfaceAudience.Public
+ @InterfaceStability.Evolving
public enum TType { CIF, IDENT, COMMA, LPAREN, RPAREN, QUOT, NUM, }
/**
* Tagged-union type for tokens from the join expression.
* @see Parser.TType
*/
+ @InterfaceAudience.Public
+ @InterfaceStability.Evolving
public static class Token {
private TType type;
@@ -96,6 +104,8 @@ public class Parser {
}
}
+ @InterfaceAudience.Public
+ @InterfaceStability.Evolving
public static class NumToken extends Token {
private double num;
public NumToken(double num) {
@@ -105,6 +115,8 @@ public class Parser {
public double getNum() { return num; }
}
+ @InterfaceAudience.Public
+ @InterfaceStability.Evolving
public static class NodeToken extends Token {
private Node node;
NodeToken(Node node) {
@@ -116,6 +128,8 @@ public class Parser {
}
}
+ @InterfaceAudience.Public
+ @InterfaceStability.Evolving
public static class StrToken extends Token {
private String str;
public StrToken(TType type, String str) {
@@ -175,6 +189,8 @@ public class Parser {
}
@SuppressWarnings("unchecked")
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public abstract static class Node extends ComposableInputFormat {
/**
* Return the node type registered for the particular identifier.
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/ResetableIterator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/ResetableIterator.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/ResetableIterator.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/ResetableIterator.java Thu May 20 05:44:18 2010
@@ -19,6 +19,8 @@ package org.apache.hadoop.mapreduce.lib.
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.Writable;
/**
@@ -26,6 +28,8 @@ import org.apache.hadoop.io.Writable;
* added to it directly.
* Note that this does not extend {@link java.util.Iterator}.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public interface ResetableIterator<T extends Writable> {
public static class EMPTY<U extends Writable>
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/StreamBackedIterator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/StreamBackedIterator.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/StreamBackedIterator.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/StreamBackedIterator.java Thu May 20 05:44:18 2010
@@ -23,12 +23,16 @@ import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.Writable;
/**
* This class provides an implementation of ResetableIterator. This
* implementation uses a byte array to store elements added to it.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class StreamBackedIterator<X extends Writable>
implements ResetableIterator<X> {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/TupleWritable.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/TupleWritable.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/TupleWritable.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/TupleWritable.java Thu May 20 05:44:18 2010
@@ -25,6 +25,8 @@ import java.util.BitSet;
import java.util.Iterator;
import java.util.NoSuchElementException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
@@ -42,6 +44,8 @@ import org.apache.hadoop.io.WritableUtil
*
* @see org.apache.hadoop.io.Writable
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class TupleWritable implements Writable, Iterable<Writable> {
protected BitSet written;
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/WrappedRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/WrappedRecordReader.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/WrappedRecordReader.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/join/WrappedRecordReader.java Thu May 20 05:44:18 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Writable;
@@ -37,6 +39,8 @@ import org.apache.hadoop.util.Reflection
* provided RecordReader and keeps a store of values matching a key when
* this source is participating in a join.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class WrappedRecordReader<K extends WritableComparable<?>,
U extends Writable> extends ComposableRecordReader<K,U> {
Modified: hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/map/InverseMapper.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/map/InverseMapper.java?rev=946533&r1=946532&r2=946533&view=diff
==============================================================================
--- hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/map/InverseMapper.java (original)
+++ hadoop/mapreduce/branches/branch-0.21/src/java/org/apache/hadoop/mapreduce/lib/map/InverseMapper.java Thu May 20 05:44:18 2010
@@ -20,9 +20,13 @@ package org.apache.hadoop.mapreduce.lib.
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.mapreduce.Mapper;
/** A {@link Mapper} that swaps keys and values. */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class InverseMapper<K, V> extends Mapper<K,V,V,K> {
/** The inverse function. Input keys and values are swapped.*/