You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@chukwa.apache.org by ey...@apache.org on 2009/05/09 02:32:47 UTC

svn commit: r773144 - in /hadoop/chukwa: branches/chukwa-0.1/src/java/org/apache/hadoop/chukwa/extraction/database/MetricDataLoader.java trunk/src/java/org/apache/hadoop/chukwa/extraction/database/MetricDataLoader.java

Author: eyang
Date: Sat May  9 00:32:47 2009
New Revision: 773144

URL: http://svn.apache.org/viewvc?rev=773144&view=rev
Log:
CHUKWA-212. Fix file descriptor leak in MDL.  (Jerome Boulon via Eric Yang)

Modified:
    hadoop/chukwa/branches/chukwa-0.1/src/java/org/apache/hadoop/chukwa/extraction/database/MetricDataLoader.java
    hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/database/MetricDataLoader.java

Modified: hadoop/chukwa/branches/chukwa-0.1/src/java/org/apache/hadoop/chukwa/extraction/database/MetricDataLoader.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/branches/chukwa-0.1/src/java/org/apache/hadoop/chukwa/extraction/database/MetricDataLoader.java?rev=773144&r1=773143&r2=773144&view=diff
==============================================================================
--- hadoop/chukwa/branches/chukwa-0.1/src/java/org/apache/hadoop/chukwa/extraction/database/MetricDataLoader.java (original)
+++ hadoop/chukwa/branches/chukwa-0.1/src/java/org/apache/hadoop/chukwa/extraction/database/MetricDataLoader.java Sat May  9 00:32:47 2009
@@ -19,16 +19,13 @@
 package org.apache.hadoop.chukwa.extraction.database;
 
 import java.io.IOException;
-import java.net.URI;
 import java.net.URISyntaxException;
 import java.sql.Connection;
-import java.sql.DriverManager;
 import java.sql.ResultSet;
 import java.sql.ResultSetMetaData;
 import java.sql.SQLException;
 import java.sql.Statement;
 import java.text.SimpleDateFormat;
-import java.util.Calendar;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.Iterator;
@@ -171,7 +168,7 @@
 
     log.info("StreamName: " + source.getName());
 
-    SequenceFile.Reader r = new SequenceFile.Reader(fs, source, conf);
+    SequenceFile.Reader reader = new SequenceFile.Reader(fs, source, conf);
 
     try {
       // The newInstance() call is a work around for some
@@ -193,7 +190,7 @@
     ChukwaRecord record = new ChukwaRecord();
     try {
       int batch = 0;
-      while (r.next(key, record)) {
+      while (reader.next(key, record)) {
         String sqlTime = DatabaseWriter.formatTimeStamp(record.getTime());
         log.debug("Timestamp: " + record.getTime());
         log.debug("DataType: " + key.getReduceType());
@@ -437,13 +434,16 @@
           }
           if (batchMode && batch > 20000) {
             int[] updateCounts = stmt.executeBatch();
+            log.info("batchMode insert=" + updateCounts.length);
             batch = 0;
           }
         }
 
       }
+
       if (batchMode) {
         int[] updateCounts = stmt.executeBatch();
+        log.info("batchMode insert=" + updateCounts.length);
       }
     } catch (SQLException ex) {
       // handle any errors
@@ -460,6 +460,7 @@
       if (batchMode) {
         try {
           conn.commit();
+          log.info("batchMode commit done");
         } catch (SQLException ex) {
           log.error(ex, ex);
           log.error("SQLException: " + ex.getMessage());
@@ -505,6 +506,15 @@
         }
         conn = null;
       }
+      
+      if (reader != null) {
+        try {
+          reader.close();
+        } catch (Exception e) {
+          log.warn("Could not close SequenceFile.Reader:" ,e);
+        }
+        reader = null;
+      }
     }
   }
 

Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/database/MetricDataLoader.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/database/MetricDataLoader.java?rev=773144&r1=773143&r2=773144&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/database/MetricDataLoader.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/database/MetricDataLoader.java Sat May  9 00:32:47 2009
@@ -19,16 +19,13 @@
 package org.apache.hadoop.chukwa.extraction.database;
 
 import java.io.IOException;
-import java.net.URI;
 import java.net.URISyntaxException;
 import java.sql.Connection;
-import java.sql.DriverManager;
 import java.sql.ResultSet;
 import java.sql.ResultSetMetaData;
 import java.sql.SQLException;
 import java.sql.Statement;
 import java.text.SimpleDateFormat;
-import java.util.Calendar;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.Iterator;
@@ -171,7 +168,7 @@
 
     log.info("StreamName: " + source.getName());
 
-    SequenceFile.Reader r = new SequenceFile.Reader(fs, source, conf);
+    SequenceFile.Reader reader = new SequenceFile.Reader(fs, source, conf);
 
     try {
       // The newInstance() call is a work around for some
@@ -193,7 +190,7 @@
     ChukwaRecord record = new ChukwaRecord();
     try {
       int batch = 0;
-      while (r.next(key, record)) {
+      while (reader.next(key, record)) {
         String sqlTime = DatabaseWriter.formatTimeStamp(record.getTime());
         log.debug("Timestamp: " + record.getTime());
         log.debug("DataType: " + key.getReduceType());
@@ -437,13 +434,16 @@
           }
           if (batchMode && batch > 20000) {
             int[] updateCounts = stmt.executeBatch();
+            log.info("batchMode insert=" + updateCounts.length);
             batch = 0;
           }
         }
 
       }
+
       if (batchMode) {
         int[] updateCounts = stmt.executeBatch();
+        log.info("batchMode insert=" + updateCounts.length);
       }
     } catch (SQLException ex) {
       // handle any errors
@@ -460,6 +460,7 @@
       if (batchMode) {
         try {
           conn.commit();
+          log.info("batchMode commit done");
         } catch (SQLException ex) {
           log.error(ex, ex);
           log.error("SQLException: " + ex.getMessage());
@@ -505,6 +506,15 @@
         }
         conn = null;
       }
+      
+      if (reader != null) {
+        try {
+          reader.close();
+        } catch (Exception e) {
+          log.warn("Could not close SequenceFile.Reader:" ,e);
+        }
+        reader = null;
+      }
     }
   }