You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by to...@apache.org on 2010/02/17 00:26:58 UTC

svn commit: r910763 - in /hadoop/mapreduce/trunk: ./ src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/ src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/

Author: tomwhite
Date: Tue Feb 16 23:26:57 2010
New Revision: 910763

URL: http://svn.apache.org/viewvc?rev=910763&view=rev
Log:
MAPREDUCE-1444. Sqoop ConnManager instances can leak Statement objects. Contributed by Aaron Kimball.

Modified:
    hadoop/mapreduce/trunk/CHANGES.txt
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/Sqoop.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/SqoopOptions.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ConnManager.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/MySQLManager.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/OracleManager.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/PostgresqlManager.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/SqlManager.java

Modified: hadoop/mapreduce/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/CHANGES.txt?rev=910763&r1=910762&r2=910763&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/CHANGES.txt (original)
+++ hadoop/mapreduce/trunk/CHANGES.txt Tue Feb 16 23:26:57 2010
@@ -354,6 +354,9 @@
     MAPREDUCE-1398. Fix TaskLauncher to stop waiting for slots on a TIP that
     is killed / failed.
     (Amareshwari Sriramadasu via yhemanth)
+
+    MAPREDUCE-1444. Sqoop ConnManager instances can leak Statement objects.
+    (Aaron Kimball via tomwhite)
  
 Release 0.21.0 - Unreleased
 

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/Sqoop.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/Sqoop.java?rev=910763&r1=910762&r2=910763&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/Sqoop.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/Sqoop.java Tue Feb 16 23:26:57 2010
@@ -63,16 +63,21 @@
   private List<String> generatedJarFiles;
 
   public Sqoop() {
-    init();
+    this((Configuration) null);
   }
 
   public Sqoop(Configuration conf) {
-    init();
-    setConf(conf);
+    this(conf, new SqoopOptions());
   }
 
-  private void init() {
+  public Sqoop(Configuration conf, SqoopOptions opts) {
     generatedJarFiles = new ArrayList<String>();
+    if (null != conf) {
+      setConf(conf);
+    }
+
+    this.options = opts;
+    this.options.setConf(getConf());
   }
 
   public SqoopOptions getOptions() {
@@ -146,8 +151,13 @@
    * Actual main entry-point for the program
    */
   public int run(String [] args) {
-    options = new SqoopOptions();
-    options.setConf(getConf());
+    if (options.getConf() == null) {
+      // Configuration wasn't initialized until after the ToolRunner
+      // got us to this point. ToolRunner gave Sqoop itself a Conf
+      // though.
+      options.setConf(getConf());
+    }
+
     try {
       options.parse(args);
       options.validate();

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/SqoopOptions.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/SqoopOptions.java?rev=910763&r1=910762&r2=910763&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/SqoopOptions.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/SqoopOptions.java Tue Feb 16 23:26:57 2010
@@ -723,6 +723,10 @@
     return tableName;
   }
 
+  public void setTableName(String tableName) {
+    this.tableName = tableName;
+  }
+
   public String getExportDir() {
     return exportDir;
   }

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ConnManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ConnManager.java?rev=910763&r1=910762&r2=910763&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ConnManager.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ConnManager.java Tue Feb 16 23:26:57 2010
@@ -83,7 +83,8 @@
    * If columns is null, all columns from the table are read. This is a direct
    * (non-parallelized) read of the table back to the current client.
    * The client is responsible for calling ResultSet.close() when done with the
-   * returned ResultSet object.
+   * returned ResultSet object, and for calling release() after that to free
+   * internal state.
    */
   public abstract ResultSet readTable(String tableName, String [] columns) throws SQLException;
 
@@ -144,5 +145,16 @@
       throws IOException, ExportException {
     throw new ExportException("This database does not support exports");
   }
+
+  /**
+   * If a method of this ConnManager has returned a ResultSet to you,
+   * you are responsible for calling release() after you close the
+   * ResultSet object, to free internal resources. ConnManager
+   * implementations do not guarantee the ability to have multiple
+   * returned ResultSets available concurrently. Requesting a new
+   * ResultSet from a ConnManager may cause other open ResulSets
+   * to close.
+   */
+  public abstract void release();
 }
 

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/MySQLManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/MySQLManager.java?rev=910763&r1=910762&r2=910763&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/MySQLManager.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/MySQLManager.java Tue Feb 16 23:26:57 2010
@@ -23,6 +23,7 @@
 import java.net.URISyntaxException;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
+import java.sql.Statement;
 import java.sql.SQLException;
 import java.util.ArrayList;
 
@@ -46,6 +47,8 @@
   // set to true after we warn the user that we can use direct fastpath.
   private static boolean warningPrinted = false;
 
+  private Statement lastStatement;
+
   public MySQLManager(final SqoopOptions opts) {
     super(DRIVER_CLASS, opts);
   }
@@ -70,6 +73,7 @@
       results = execute("SHOW DATABASES");
     } catch (SQLException sqlE) {
       LOG.error("Error executing statement: " + sqlE.toString());
+      release();
       return null;
     }
 
@@ -90,6 +94,8 @@
       } catch (SQLException sqlE) {
         LOG.warn("Exception closing ResultSet: " + sqlE.toString());
       }
+
+      release();
     }
   }
 
@@ -182,6 +188,9 @@
    * @return A ResultSet encapsulating the results or null on error
    */
   protected ResultSet execute(String stmt, Object... args) throws SQLException {
+    // Free any previous resources.
+    release();
+
     PreparedStatement statement = null;
     statement = this.getConnection().prepareStatement(stmt,
         ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
@@ -193,9 +202,22 @@
     }
 
     LOG.info("Executing SQL statement: " + stmt);
+    this.lastStatement = statement;
     return statement.executeQuery();
   }
 
+  public void release() {
+    if (null != this.lastStatement) {
+      try {
+        this.lastStatement.close();
+      } catch (SQLException e) {
+        LOG.warn("Exception closing executed Statement: " + e);
+      }
+
+      this.lastStatement = null;
+    }
+  }
+
   /**
    * When using a column name in a generated SQL query, how (if at all)
    * should we escape that column name? e.g., a column named "table"

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/OracleManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/OracleManager.java?rev=910763&r1=910762&r2=910763&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/OracleManager.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/OracleManager.java Tue Feb 16 23:26:57 2010
@@ -157,6 +157,30 @@
     importer.runImport(tableName, jarFile, splitCol, options.getConf());
   }
 
+  @Override
+  public ResultSet readTable(String tableName, String[] columns) throws SQLException {
+    if (columns == null) {
+      columns = getColumnNames(tableName);
+    }
+
+    StringBuilder sb = new StringBuilder();
+    sb.append("SELECT ");
+    boolean first = true;
+    for (String col : columns) {
+      if (!first) {
+        sb.append(", ");
+      }
+      sb.append(escapeColName(col));
+      first = false;
+    }
+    sb.append(" FROM ");
+    sb.append(escapeTableName(tableName));
+
+    String sqlCmd = sb.toString();
+    LOG.debug("Reading table with command: " + sqlCmd);
+    return execute(sqlCmd);
+  }
+
   /**
    * Resolve a database-specific type to the Java type that should contain it.
    * @param sqlType

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/PostgresqlManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/PostgresqlManager.java?rev=910763&r1=910762&r2=910763&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/PostgresqlManager.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/PostgresqlManager.java Tue Feb 16 23:26:57 2010
@@ -21,6 +21,7 @@
 import java.io.IOException;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
+import java.sql.Statement;
 import java.sql.SQLException;
 import java.util.ArrayList;
 
@@ -46,6 +47,8 @@
   // set to true after we warn the user that we can use direct fastpath.
   private static boolean warningPrinted = false;
 
+  private Statement lastStatement;
+
   public PostgresqlManager(final SqoopOptions opts) {
     super(DRIVER_CLASS, opts);
   }
@@ -108,6 +111,9 @@
    * @return A ResultSet encapsulating the results or null on error
    */
   protected ResultSet execute(String stmt, Object... args) throws SQLException {
+    // Free any previous resources used.
+    release();
+
     PreparedStatement statement = null;
     statement = this.getConnection().prepareStatement(stmt,
         ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
@@ -119,7 +125,20 @@
     }
 
     LOG.info("Executing SQL statement: " + stmt);
+    this.lastStatement = statement;
     return statement.executeQuery();
   }
+
+  public void release() {
+    if (null != this.lastStatement) {
+      try {
+        this.lastStatement.close();
+      } catch (SQLException e) {
+        LOG.warn("Exception closing executed Statement: " + e);
+      }
+
+      this.lastStatement = null;
+    }
+  }
 }
 

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/SqlManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/SqlManager.java?rev=910763&r1=910762&r2=910763&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/SqlManager.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/SqlManager.java Tue Feb 16 23:26:57 2010
@@ -33,6 +33,7 @@
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.ResultSetMetaData;
+import java.sql.Statement;
 import java.sql.SQLException;
 import java.sql.Types;
 import java.util.ArrayList;
@@ -53,6 +54,7 @@
   public static final Log LOG = LogFactory.getLog(SqlManager.class.getName());
 
   protected SqoopOptions options;
+  private Statement lastStatement;
 
   /**
    * Constructs the SqlManager
@@ -81,6 +83,7 @@
       results = execute(stmt);
     } catch (SQLException sqlE) {
       LOG.error("Error executing statement: " + sqlE.toString());
+      release();
       return null;
     }
 
@@ -106,6 +109,8 @@
       } catch (SQLException sqlE) {
         LOG.warn("SQLException closing ResultSet: " + sqlE.toString());
       }
+
+      release();
     }
   }
 
@@ -126,6 +131,7 @@
       results = execute(stmt);
     } catch (SQLException sqlE) {
       LOG.error("Error executing statement: " + sqlE.toString());
+      release();
       return null;
     }
 
@@ -152,9 +158,11 @@
       try {
         results.close();
         getConnection().commit();
-      } catch (SQLException sqlE) { 
+      } catch (SQLException sqlE) {
         LOG.warn("SQLException closing ResultSet: " + sqlE.toString());
       }
+
+      release();
     }
   }
 
@@ -179,7 +187,9 @@
     sb.append(" AS ");   // needed for hsqldb; doesn't hurt anyone else.
     sb.append(escapeTableName(tableName));
 
-    return execute(sb.toString());
+    String sqlCmd = sb.toString();
+    LOG.debug("Reading table with command: " + sqlCmd);
+    return execute(sqlCmd);
   }
 
   @Override
@@ -291,6 +301,9 @@
    * @return A ResultSet encapsulating the results or null on error
    */
   protected ResultSet execute(String stmt, Object... args) throws SQLException {
+    // Release any previously-open statement.
+    release();
+
     PreparedStatement statement = null;
     statement = this.getConnection().prepareStatement(stmt,
         ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
@@ -301,6 +314,7 @@
     }
 
     LOG.info("Executing SQL statement: " + stmt);
+    this.lastStatement = statement;
     return statement.executeQuery();
   }
 
@@ -363,6 +377,7 @@
   }
 
   public void close() throws SQLException {
+    release();
   }
 
   /**
@@ -376,6 +391,7 @@
       results = execute(s);
     } catch (SQLException sqlE) {
       LOG.error("Error executing statement: " + sqlE.toString());
+      release();
       return;
     }
 
@@ -404,6 +420,8 @@
       } catch (SQLException sqlE) {
         LOG.warn("SQLException closing ResultSet: " + sqlE.toString());
       }
+
+      release();
     }
   }
 
@@ -446,4 +464,16 @@
     ExportJob exportJob = new ExportJob(context);
     exportJob.runExport();
   }
+
+  public void release() {
+    if (null != this.lastStatement) {
+      try {
+        this.lastStatement.close();
+      } catch (SQLException e) {
+        LOG.warn("Exception closing executed Statement: " + e);
+      }
+
+      this.lastStatement = null;
+    }
+  }
 }