You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by se...@apache.org on 2018/03/02 19:44:05 UTC

[1/2] hive git commit: HIVE-18828 : improve error handling for codecs in LLAP IO (Sergey Shelukhin, reviewed by Gopal Vijayaraghavan)

Repository: hive
Updated Branches:
  refs/heads/master 17441e485 -> a4198f584


HIVE-18828 : improve error handling for codecs in LLAP IO (Sergey Shelukhin, reviewed by Gopal Vijayaraghavan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/5e3d85c4
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/5e3d85c4
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/5e3d85c4

Branch: refs/heads/master
Commit: 5e3d85c409b14afb6eb94bad01348d013a536503
Parents: 17441e4
Author: sergey <se...@apache.org>
Authored: Thu Mar 1 15:50:04 2018 -0800
Committer: sergey <se...@apache.org>
Committed: Fri Mar 2 11:24:34 2018 -0800

----------------------------------------------------------------------
 .../llap/io/encoded/OrcEncodedDataReader.java   | 19 +++++++++-----
 .../ql/io/orc/encoded/EncodedReaderImpl.java    | 26 +++++++++++++++-----
 2 files changed, 33 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/5e3d85c4/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
index a6d2a04..9219d28 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
@@ -752,14 +752,21 @@ public class OrcEncodedDataReader extends CallableWithNdc<Void>
     CompressionKind kind = orcReader.getCompressionKind();
     boolean isPool = useCodecPool;
     CompressionCodec codec = isPool ? OrcCodecPool.getCodec(kind) : WriterImpl.createCodec(kind);
+    boolean isCodecError = true;
     try {
-      return buildStripeFooter(Lists.<DiskRange>newArrayList(new BufferChunk(bb, 0)),
-          bb.remaining(), codec, orcReader.getCompressionSize());
+      OrcProto.StripeFooter result = buildStripeFooter(Lists.<DiskRange>newArrayList(
+          new BufferChunk(bb, 0)), bb.remaining(), codec, orcReader.getCompressionSize());
+      isCodecError = false;
+      return result;
     } finally {
-      if (isPool) {
-        OrcCodecPool.returnCodec(kind, codec);
-      } else {
-        codec.close();
+      try {
+        if (isPool && !isCodecError) {
+          OrcCodecPool.returnCodec(kind, codec);
+        } else {
+          codec.close();
+        }
+      } catch (Exception ex) {
+        LOG.error("Ignoring codec cleanup error", ex);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/5e3d85c4/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java
index 32bdf6e..893a2bb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java
@@ -130,6 +130,7 @@ class EncodedReaderImpl implements EncodedReader {
   private boolean isDataReaderOpen = false;
   private final CompressionCodec codec;
   private final boolean isCodecFromPool;
+  private boolean isCodecFailure = false;
   private final boolean isCompressed;
   private final org.apache.orc.CompressionKind compressionKind;
   private final int bufferSize;
@@ -677,12 +678,17 @@ class EncodedReaderImpl implements EncodedReader {
 
   @Override
   public void close() throws IOException {
-    if (isCodecFromPool) {
-      OrcCodecPool.returnCodec(compressionKind, codec);
-    } else {
-      codec.close();
+    try {
+      if (isCodecFromPool && !isCodecFailure) {
+        OrcCodecPool.returnCodec(compressionKind, codec);
+      } else {
+        codec.close();
+      }
+    } catch (Exception ex) {
+      LOG.error("Ignoring error from codec", ex);
+    } finally {
+      dataReader.close();
     }
-    dataReader.close();
   }
 
   /**
@@ -870,7 +876,15 @@ class EncodedReaderImpl implements EncodedReader {
     for (ProcCacheChunk chunk : toDecompress) {
       ByteBuffer dest = chunk.getBuffer().getByteBufferRaw();
       if (chunk.isOriginalDataCompressed) {
-        decompressChunk(chunk.originalData, codec, dest);
+        boolean isOk = false;
+        try {
+          decompressChunk(chunk.originalData, codec, dest);
+          isOk = true;
+        } finally {
+          if (!isOk) {
+            isCodecFailure = true;
+          }
+        }
       } else {
         copyUncompressedChunk(chunk.originalData, dest);
       }


[2/2] hive git commit: HIVE-18824 : ValidWriteIdList config should be defined on tables which has to collect stats after insert (Sergey Shelukhin, reviewed by Eugene Koifman and Sankar Hariappan)

Posted by se...@apache.org.
HIVE-18824 : ValidWriteIdList config should be defined on tables which has to collect stats after insert (Sergey Shelukhin, reviewed by Eugene Koifman and Sankar Hariappan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a4198f58
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a4198f58
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a4198f58

Branch: refs/heads/master
Commit: a4198f584aa0792a16d1e1eeb2ef3147403b8acb
Parents: 5e3d85c
Author: sergey <se...@apache.org>
Authored: Fri Mar 2 11:25:49 2018 -0800
Committer: sergey <se...@apache.org>
Committed: Fri Mar 2 11:25:49 2018 -0800

----------------------------------------------------------------------
 ql/src/java/org/apache/hadoop/hive/ql/Driver.java | 15 +++++++++------
 1 file changed, 9 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/a4198f58/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 94999fe..6999777 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -25,6 +25,7 @@ import java.io.PrintStream;
 import java.io.Serializable;
 import java.net.InetAddress;
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.LinkedHashMap;
@@ -1244,15 +1245,18 @@ public class Driver implements IDriver {
 
   // Make the list of transactional tables list which are getting read or written by current txn
   private List<String> getTransactionalTableList(QueryPlan plan) {
-    List<String> tableList = new ArrayList<>();
+    Set<String> tableList = new HashSet<>();
 
     for (ReadEntity input : plan.getInputs()) {
       addTableFromEntity(input, tableList);
     }
-    return tableList;
+    for (WriteEntity output : plan.getOutputs()) {
+      addTableFromEntity(output, tableList);
+    }
+    return new ArrayList<String>(tableList);
   }
 
-  private void addTableFromEntity(Entity entity, List<String> tableList) {
+  private void addTableFromEntity(Entity entity, Collection<String> tableList) {
     Table tbl;
     switch (entity.getType()) {
       case TABLE: {
@@ -1268,10 +1272,9 @@ public class Driver implements IDriver {
         return;
       }
     }
+    if (!AcidUtils.isTransactionalTable(tbl)) return;
     String fullTableName = AcidUtils.getFullTableName(tbl.getDbName(), tbl.getTableName());
-    if (AcidUtils.isTransactionalTable(tbl) && !tableList.contains(fullTableName)) {
-      tableList.add(fullTableName);
-    }
+    tableList.add(fullTableName);
   }
 
   private String getUserFromUGI() {