You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by am...@apache.org on 2011/03/22 10:54:30 UTC

svn commit: r1084119 - in /hive/trunk: contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/io/

Author: amareshwari
Date: Tue Mar 22 09:54:30 2011
New Revision: 1084119

URL: http://svn.apache.org/viewvc?rev=1084119&view=rev
Log:
HIVE-2042. Closes opened streams in error scenarios. Contributed by Chinna Rao Lalam

Modified:
    hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableOutput.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Throttle.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/SequenceFileInputFormatChecker.java

Modified: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableOutput.java
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableOutput.java?rev=1084119&r1=1084118&r2=1084119&view=diff
==============================================================================
--- hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableOutput.java (original)
+++ hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableOutput.java Tue Mar 22 09:54:30 2011
@@ -32,6 +32,7 @@ import org.apache.hadoop.io.ArrayWritabl
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.MapWritable;
@@ -229,12 +230,18 @@ public class TypedBytesWritableOutput {
   }
 
   public void writeWritable(Writable w) throws IOException {
-    ByteArrayOutputStream baos = new ByteArrayOutputStream();
-    DataOutputStream dos = new DataOutputStream(baos);
-    WritableUtils.writeString(dos, w.getClass().getName());
-    w.write(dos);
-    dos.close();
-    out.writeBytes(baos.toByteArray(), Type.WRITABLE.code);
+    DataOutputStream dos = null;
+    try {
+      ByteArrayOutputStream baos = new ByteArrayOutputStream();
+      dos = new DataOutputStream(baos);
+      WritableUtils.writeString(dos, w.getClass().getName());
+      w.write(dos);
+      out.writeBytes(baos.toByteArray(), Type.WRITABLE.code);
+      dos.close();
+      dos = null;
+    } finally {
+      IOUtils.closeStream(dos);
+    }
   }
 
   public void writeEndOfRecord() throws IOException {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java?rev=1084119&r1=1084118&r2=1084119&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java Tue Mar 22 09:54:30 2011
@@ -38,6 +38,7 @@ import org.apache.hadoop.hive.ql.DriverC
 import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.ExplainWork;
 import org.apache.hadoop.hive.ql.plan.api.StageType;
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.util.StringUtils;
 
 
@@ -55,10 +56,11 @@ public class ExplainTask extends Task<Ex
   @Override
   public int execute(DriverContext driverContext) {
 
+    PrintStream out = null;
     try {
       Path resFile = new Path(work.getResFile());
       OutputStream outS = resFile.getFileSystem(conf).create(resFile);
-      PrintStream out = new PrintStream(outS);
+      out = new PrintStream(outS);
 
       // Print out the parse AST
       outputAST(work.getAstStringTree(), out, 0);
@@ -70,12 +72,15 @@ public class ExplainTask extends Task<Ex
       // Go over all the tasks and dump out the plans
       outputStagePlans(out, work.getRootTasks(), 0);
       out.close();
+      out = null;
 
       return (0);
     } catch (Exception e) {
       console.printError("Failed with exception " + e.getMessage(), "\n"
           + StringUtils.stringifyException(e));
       return (1);
+    } finally {
+      IOUtils.closeStream(out);
     }
   }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Throttle.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Throttle.java?rev=1084119&r1=1084118&r2=1084119&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Throttle.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Throttle.java Tue Mar 22 09:54:30 2011
@@ -24,6 +24,7 @@ import java.net.URL;
 import java.util.regex.Pattern;
 
 import org.apache.commons.logging.Log;
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.mapred.JobConf;
 
 /**
@@ -62,9 +63,15 @@ public final class Throttle {
         // read in the first 1K characters from the URL
         URL url = new URL(tracker);
         LOG.debug("Throttle: URL " + tracker);
-        InputStream in = url.openStream();
-        in.read(buffer);
-        in.close();
+        InputStream in = null;
+        try {
+          in = url.openStream();
+          in.read(buffer);
+          in.close();
+          in = null;
+        } finally {
+          IOUtils.closeStream(in);
+        }
         String fetchString = new String(buffer);
 
         // fetch the xml tag <dogc>xxx</dogc>

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java?rev=1084119&r1=1084118&r2=1084119&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java Tue Mar 22 09:54:30 2011
@@ -115,6 +115,7 @@ import org.apache.hadoop.hive.serde2.Ser
 import org.apache.hadoop.hive.serde2.Serializer;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
@@ -457,10 +458,16 @@ public final class Utilities {
    * Deserialize the whole query plan.
    */
   public static QueryPlan deserializeQueryPlan(InputStream in, Configuration conf) {
-    XMLDecoder d = new XMLDecoder(in, null, null, conf.getClassLoader());
-    QueryPlan ret = (QueryPlan) d.readObject();
-    d.close();
-    return (ret);
+    XMLDecoder d = null;
+    try {
+      d = new XMLDecoder(in, null, null, conf.getClassLoader());
+      QueryPlan ret = (QueryPlan) d.readObject();
+      return (ret);
+    } finally {
+      if (null != d) {
+        d.close();
+      }
+    }
   }
 
   /**
@@ -468,19 +475,32 @@ public final class Utilities {
    * output since it closes the output stream. DO USE mapredWork.toXML() instead.
    */
   public static void serializeMapRedWork(MapredWork w, OutputStream out) {
-    XMLEncoder e = new XMLEncoder(out);
-    // workaround for java 1.5
-    e.setPersistenceDelegate(ExpressionTypes.class, new EnumDelegate());
-    e.setPersistenceDelegate(GroupByDesc.Mode.class, new EnumDelegate());
-    e.writeObject(w);
-    e.close();
+    XMLEncoder e = null;
+    try {
+      e = new XMLEncoder(out);
+      // workaround for java 1.5
+      e.setPersistenceDelegate(ExpressionTypes.class, new EnumDelegate());
+      e.setPersistenceDelegate(GroupByDesc.Mode.class, new EnumDelegate());
+      e.writeObject(w);
+    } finally {
+      if (null != e) {
+        e.close();
+      }
+    }
+
   }
 
   public static MapredWork deserializeMapRedWork(InputStream in, Configuration conf) {
-    XMLDecoder d = new XMLDecoder(in, null, null, conf.getClassLoader());
-    MapredWork ret = (MapredWork) d.readObject();
-    d.close();
-    return (ret);
+    XMLDecoder d = null;
+    try {
+      d = new XMLDecoder(in, null, null, conf.getClassLoader());
+      MapredWork ret = (MapredWork) d.readObject();
+      return (ret);
+    } finally {
+      if (null != d) {
+        d.close();
+      }
+    }
   }
 
   /**
@@ -488,19 +508,31 @@ public final class Utilities {
    * output since it closes the output stream. DO USE mapredWork.toXML() instead.
    */
   public static void serializeMapRedLocalWork(MapredLocalWork w, OutputStream out) {
-    XMLEncoder e = new XMLEncoder(out);
-    // workaround for java 1.5
-    e.setPersistenceDelegate(ExpressionTypes.class, new EnumDelegate());
-    e.setPersistenceDelegate(GroupByDesc.Mode.class, new EnumDelegate());
-    e.writeObject(w);
-    e.close();
+    XMLEncoder e = null;
+    try {
+      e = new XMLEncoder(out);
+      // workaround for java 1.5
+      e.setPersistenceDelegate(ExpressionTypes.class, new EnumDelegate());
+      e.setPersistenceDelegate(GroupByDesc.Mode.class, new EnumDelegate());
+      e.writeObject(w);
+    } finally {
+      if (null != e) {
+        e.close();
+      }
+    }
   }
 
   public static MapredLocalWork deserializeMapRedLocalWork(InputStream in, Configuration conf) {
-    XMLDecoder d = new XMLDecoder(in, null, null, conf.getClassLoader());
-    MapredLocalWork ret = (MapredLocalWork) d.readObject();
-    d.close();
-    return (ret);
+    XMLDecoder d = null;
+    try {
+      d = new XMLDecoder(in, null, null, conf.getClassLoader());
+      MapredLocalWork ret = (MapredLocalWork) d.readObject();
+      return (ret);
+    } finally {
+      if (null != d) {
+        d.close();
+      }
+    }
   }
 
   /**
@@ -610,9 +642,10 @@ public final class Utilities {
 
     @Override
     public void run() {
+      BufferedReader br = null;
       try {
         InputStreamReader isr = new InputStreamReader(is);
-        BufferedReader br = new BufferedReader(isr);
+        br = new BufferedReader(isr);
         String line = null;
         if (type != null) {
           while ((line = br.readLine()) != null) {
@@ -623,8 +656,12 @@ public final class Utilities {
             os.println(line);
           }
         }
+        br.close();
+        br=null;
       } catch (IOException ioe) {
         ioe.printStackTrace();
+      }finally{
+        IOUtils.closeStream(br);
       }
     }
   }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/SequenceFileInputFormatChecker.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/SequenceFileInputFormatChecker.java?rev=1084119&r1=1084118&r2=1084119&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/SequenceFileInputFormatChecker.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/SequenceFileInputFormatChecker.java Tue Mar 22 09:54:30 2011
@@ -24,6 +24,7 @@ import java.util.ArrayList;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.SequenceFile;
 
 /**
@@ -39,12 +40,16 @@ public class SequenceFileInputFormatChec
       return false;
     }
     for (int fileId = 0; fileId < files.size(); fileId++) {
+      SequenceFile.Reader reader = null;
       try {
-        SequenceFile.Reader reader = new SequenceFile.Reader(fs, files.get(
+        reader = new SequenceFile.Reader(fs, files.get(
             fileId).getPath(), conf);
         reader.close();
+        reader = null;
       } catch (IOException e) {
         return false;
+      }finally{
+        IOUtils.closeStream(reader);
       }
     }
     return true;