You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by pr...@apache.org on 2016/08/29 18:37:14 UTC

hive git commit: HIVE-14659: OutputStream won't close if caught exception in funtion unparseExprForValuesClause in SemanticAnalyzer.java (Fan Yunbo reviewed by Sergey Shelukhin)

Repository: hive
Updated Branches:
  refs/heads/master cf9538b9b -> 67bf8eb78


HIVE-14659: OutputStream won't close if caught exception in funtion unparseExprForValuesClause in SemanticAnalyzer.java (Fan Yunbo reviewed by Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/67bf8eb7
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/67bf8eb7
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/67bf8eb7

Branch: refs/heads/master
Commit: 67bf8eb78f9843cc5238b0b7dc90e8393cf5d664
Parents: cf9538b
Author: Prasanth Jayachandran <pr...@apache.org>
Authored: Mon Aug 29 11:33:09 2016 -0700
Committer: Prasanth Jayachandran <pr...@apache.org>
Committed: Mon Aug 29 11:37:05 2016 -0700

----------------------------------------------------------------------
 .../java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/67bf8eb7/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 66589fe..699bb11 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -864,6 +864,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
     // this file.
     Path tablePath = null;
     FileSystem fs = null;
+    FSDataOutputStream out = null;
     try {
       if(dataDir == null) {
         tablePath = Warehouse.getDnsPath(new Path(ss.getTempTableSpace(), tableName), conf);
@@ -876,7 +877,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
       fs = tablePath.getFileSystem(conf);
       fs.mkdirs(tablePath);
       Path dataFile = new Path(tablePath, "data_file");
-      FSDataOutputStream out = fs.create(dataFile);
+      out = fs.create(dataFile);
       List<FieldSchema> fields = new ArrayList<FieldSchema>();
 
       boolean firstRow = true;
@@ -900,7 +901,6 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
         writeAsText("\n", out);
         firstRow = false;
       }
-      out.close();
 
       // Step 2, create a temp table, using the created file as the data
       StorageFormat format = new StorageFormat(conf);
@@ -924,6 +924,8 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
         } catch (IOException swallowIt) {}
       }
       throw new SemanticException(errMsg, e);
+    } finally {
+        IOUtils.closeStream(out);
     }
 
     // Step 3, return a new subtree with a from clause built around that temp table