You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by vi...@apache.org on 2013/06/01 01:56:29 UTC

svn commit: r1488436 - in /hadoop/common/trunk/hadoop-mapreduce-project: ./ hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/db/ hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apach...

Author: vinodkv
Date: Fri May 31 23:56:28 2013
New Revision: 1488436

URL: http://svn.apache.org/r1488436
Log:
MAPREDUCE-5231. Bring back a constructor in mapred's DBInputFormat.DBRecordReader for binary compatibility with 1.x mapred APIs. Contributed by Zhijie Shen.

Modified:
    hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/db/DBInputFormat.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java

Modified: hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt?rev=1488436&r1=1488435&r2=1488436&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt Fri May 31 23:56:28 2013
@@ -255,6 +255,10 @@ Release 2.0.5-beta - UNRELEASED
     MAPREDUCE-5275. Bring back a couple of APIs in mapreduce.security.TokenCache
     for binary compatibility with 1.x mapreduce APIs. (Mayank Bansal via vinodkv)
 
+    MAPREDUCE-5231. Bring back a constructor in mapred's
+    DBInputFormat.DBRecordReader for binary compatibility with 1.x mapred APIs.
+    (Zhijie Shen via vinodkv)
+
   OPTIMIZATIONS
 
     MAPREDUCE-4974. Optimising the LineRecordReader initialize() method 

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/db/DBInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/db/DBInputFormat.java?rev=1488436&r1=1488435&r2=1488436&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/db/DBInputFormat.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/db/DBInputFormat.java Fri May 31 23:56:28 2013
@@ -37,6 +37,7 @@ import org.apache.hadoop.mapreduce.Job;
 
 @InterfaceAudience.Public
 @InterfaceStability.Stable
+@SuppressWarnings("deprecation")
 public class DBInputFormat<T  extends DBWritable>
     extends org.apache.hadoop.mapreduce.lib.db.DBInputFormat<T> 
     implements InputFormat<LongWritable, T>, JobConfigurable {
@@ -49,6 +50,17 @@ public class DBInputFormat<T  extends DB
       org.apache.hadoop.mapreduce.lib.db.DBRecordReader<T>
       implements RecordReader<LongWritable, T> {
     /**
+     * The constructor is kept to be compatible with M/R 1.x
+     *
+     * @param split The InputSplit to read data for
+     * @throws SQLException
+     */
+    protected DBRecordReader(DBInputSplit split, Class<T> inputClass,
+        JobConf job) throws SQLException {
+      super(split, inputClass, job, connection, dbConf, conditions, fieldNames, tableName);
+    }
+
+    /**
      * @param split The InputSplit to read data for
      * @throws SQLException 
      */
@@ -152,7 +164,6 @@ public class DBInputFormat<T  extends DB
   }
 
   /** {@inheritDoc} */
-  @SuppressWarnings("unchecked")
   public RecordReader<LongWritable, T> getRecordReader(InputSplit split,
       JobConf job, Reporter reporter) throws IOException {
 

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java?rev=1488436&r1=1488435&r2=1488436&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java Fri May 31 23:56:28 2013
@@ -32,6 +32,10 @@ import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapreduce.InputFormat;
@@ -41,11 +45,6 @@ import org.apache.hadoop.mapreduce.JobCo
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.Configurable;
-import org.apache.hadoop.conf.Configuration;
 /**
  * A InputFormat that reads input data from an SQL table.
  * <p>
@@ -62,7 +61,7 @@ public class DBInputFormat<T extends DBW
 
   private static final Log LOG = LogFactory.getLog(DBInputFormat.class);
   
-  private String dbProductName = "DEFAULT";
+  protected String dbProductName = "DEFAULT";
 
   /**
    * A Class that does nothing, implementing DBWritable
@@ -144,15 +143,15 @@ public class DBInputFormat<T extends DBW
     }
   }
 
-  private String conditions;
+  protected String conditions;
 
-  private Connection connection;
+  protected Connection connection;
 
-  private String tableName;
+  protected String tableName;
 
-  private String[] fieldNames;
+  protected String[] fieldNames;
 
-  private DBConfiguration dbConf;
+  protected DBConfiguration dbConf;
 
   /** {@inheritDoc} */
   public void setConf(Configuration conf) {
@@ -230,7 +229,6 @@ public class DBInputFormat<T extends DBW
   }
 
   /** {@inheritDoc} */
-  @SuppressWarnings("unchecked")
   public RecordReader<LongWritable, T> createRecordReader(InputSplit split,
       TaskAttemptContext context) throws IOException, InterruptedException {