You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by dm...@apache.org on 2020/03/25 20:48:30 UTC

[hive] branch master updated: HIVE-23064: Remove Calls to printStackTrace in Module hive-exec (David Mollitor reviewed by Peter Vary)

This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new 66302c4  HIVE-23064: Remove Calls to printStackTrace in Module hive-exec (David Mollitor reviewed by Peter Vary)
66302c4 is described below

commit 66302c4f7e62236471f5060064d28d317051b9bd
Author: David Mollitor <dm...@apache.org>
AuthorDate: Wed Mar 25 16:46:19 2020 -0400

    HIVE-23064: Remove Calls to printStackTrace in Module hive-exec (David Mollitor reviewed by Peter Vary)
---
 ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java         | 12 +++++++-----
 .../hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java       |  3 ---
 .../org/apache/hadoop/hive/ql/exec/FileSinkOperator.java     |  2 --
 .../apache/hadoop/hive/ql/exec/HashTableDummyOperator.java   |  1 -
 ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java     |  6 +++++-
 .../java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java  |  3 +--
 ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java    |  6 ++++--
 .../java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java   |  2 --
 .../hadoop/hive/ql/exec/tez/CustomPartitionVertex.java       |  1 -
 ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java |  9 ++-------
 .../org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java |  1 -
 ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java      |  1 -
 ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java         |  2 +-
 .../org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java   |  6 ++----
 .../hive/ql/optimizer/physical/LocalMapJoinProcFactory.java  |  2 +-
 .../ql/optimizer/physical/SortMergeJoinTaskDispatcher.java   |  6 ++----
 16 files changed, 25 insertions(+), 38 deletions(-)

diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java b/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
index 0d7b92d..4328665 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
@@ -60,6 +60,8 @@ import org.apache.thrift.TException;
 import org.apache.thrift.protocol.TBinaryProtocol;
 import org.apache.thrift.protocol.TJSONProtocol;
 import org.apache.thrift.transport.TMemoryBuffer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.annotations.VisibleForTesting;
 
@@ -71,6 +73,8 @@ import com.google.common.annotations.VisibleForTesting;
 public class QueryPlan implements Serializable {
   private static final long serialVersionUID = 1L;
 
+  private static final Logger LOG = LoggerFactory.getLogger(QueryPlan.class);
+
   private String cboInfo;
   private String queryString;
   private String optimizedCBOPlan;
@@ -643,7 +647,7 @@ public class QueryPlan implements Serializable {
     try {
       return getJSONQuery(getQueryPlan());
     } catch (Exception e) {
-      e.printStackTrace();
+      LOG.warn("Unable to produce query plan JSON string", e);
       return e.toString();
     }
   }
@@ -655,8 +659,7 @@ public class QueryPlan implements Serializable {
     try {
       q.write(oprot);
     } catch (TException e) {
-      // TODO Auto-generated catch block
-      e.printStackTrace();
+      LOG.warn("Unable to produce query plan Thrift string", e);
       return q.toString();
     }
     return tmb.toString("UTF-8");
@@ -669,8 +672,7 @@ public class QueryPlan implements Serializable {
     try {
       q.write(oprot);
     } catch (TException e) {
-      // TODO Auto-generated catch block
-      e.printStackTrace();
+      LOG.warn("Unable to produce query plan binary string", e);
       return q.toString();
     }
     byte[] buf = new byte[tmb.length()];
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java
index 06f60ab..9d4bf79 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java
@@ -113,7 +113,6 @@ public final class DDLSemanticAnalyzerFactory {
       BaseSemanticAnalyzer analyzer = analyzerClass.getConstructor(QueryState.class).newInstance(queryState);
       return analyzer;
     } catch (Exception e) {
-      e.printStackTrace();
       throw new RuntimeException(e);
     }
   }
@@ -126,7 +125,6 @@ public final class DDLSemanticAnalyzerFactory {
           analyzerClass.getConstructor(QueryState.class, Hive.class).newInstance(queryState, db);
       return analyzer;
     } catch (Exception e) {
-      e.printStackTrace();
       throw new RuntimeException(e);
     }
   }
@@ -148,7 +146,6 @@ public final class DDLSemanticAnalyzerFactory {
           return TYPE_TO_ANALYZER.get(actualType);
         }
       } catch (Exception e) {
-        e.printStackTrace();
         throw new RuntimeException(e);
       }
     }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
index 1bb52b0..04166a2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
@@ -678,7 +678,6 @@ public class FileSinkOperator extends TerminalOperator<FileSinkDesc> implements
     } catch (HiveException e) {
       throw e;
     } catch (Exception e) {
-      e.printStackTrace();
       throw new HiveException(e);
     }
   }
@@ -797,7 +796,6 @@ public class FileSinkOperator extends TerminalOperator<FileSinkDesc> implements
         autoDelete = fs.deleteOnExit(fsp.outPaths[0]);
       }
     } catch (Exception e) {
-      e.printStackTrace();
       throw new HiveException(e);
     }
 
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java
index e8f7dd0..0b5fa97 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java
@@ -50,7 +50,6 @@ public class HashTableDummyOperator extends Operator<HashTableDummyDesc> impleme
       this.outputObjInspector = serde.getObjectInspector();
     } catch (Exception e) {
       LOG.error("Generating output obj inspector from dummy object error", e);
-      e.printStackTrace();
     }
   }
 
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java
index 1aae142..3e1100c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java
@@ -46,9 +46,13 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hive.common.util.ReflectionUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class JoinUtil {
 
+  private static final Logger LOG = LoggerFactory.getLogger(JoinUtil.class);
+
   /**
    * Represents the join result between two tables
    */
@@ -304,7 +308,7 @@ public class JoinUtil {
     try {
       SerDeUtils.initializeSerDe(sd, null, desc.getProperties(), null);
     } catch (SerDeException e) {
-      e.printStackTrace();
+      LOG.warn("Error getting spill table", e);
       return null;
     }
     return sd;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java
index 3210ca5..343c6b3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java
@@ -499,8 +499,7 @@ public class ScriptOperator extends Operator<ScriptDesc> implements
           new_abort = true;
         }
       } catch (IOException e) {
-        LOG.error("Got ioexception: " + e.getMessage());
-        e.printStackTrace();
+        LOG.error("Got exception", e);
         new_abort = true;
       } catch (InterruptedException e) {
       }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index 60aac5a..f51c0fc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -234,6 +234,8 @@ public final class Utilities {
    */
   public static final Logger FILE_OP_LOGGER = LoggerFactory.getLogger("FileOperations");
 
+  public static final Logger LOGGER = LoggerFactory.getLogger(Utilities.class);
+
   /**
    * The object in the reducer are composed of these top level fields.
    */
@@ -846,7 +848,7 @@ public final class Utilities {
         }
       }
     } catch (FileNotFoundException e) {
-      e.printStackTrace();
+      LOG.warn("Could not compare files. One or both cannot be found", e);
     }
     return false;
   }
@@ -1935,7 +1937,7 @@ public final class Utilities {
     try {
       return bucketName.split(COPY_KEYWORD)[0];
     } catch (Exception e) {
-      e.printStackTrace();
+      LOG.warn("Invalid bucket file name", e);
       return bucketName;
     }
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
index 46764d4..8a8822d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
@@ -248,7 +248,6 @@ public class ExecDriver extends Task<MapredWork> implements Serializable, Hadoop
       FileSystem fs = emptyScratchDir.getFileSystem(job);
       fs.mkdirs(emptyScratchDir);
     } catch (IOException e) {
-      e.printStackTrace();
       console.printError("Error launching map-reduce job", "\n"
           + org.apache.hadoop.util.StringUtils.stringifyException(e));
       return 5;
@@ -428,7 +427,6 @@ public class ExecDriver extends Task<MapredWork> implements Serializable, Hadoop
       returnVal = jobExecHelper.progress(rj, jc, ctx);
       success = (returnVal == 0);
     } catch (Exception e) {
-      e.printStackTrace();
       setException(e);
       String mesg = " with exception '" + Utilities.getNameMessage(e) + "'";
       if (rj != null) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java
index dfabfb8..24b801e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java
@@ -194,7 +194,6 @@ public class CustomPartitionVertex extends VertexManagerPlugin {
           MRInputUserPayloadProto.newBuilder(protoPayload).setGroupingEnabled(true).build();
       inputDescriptor.setUserPayload(UserPayload.create(updatedPayload.toByteString().asReadOnlyByteBuffer()));
     } catch (IOException e) {
-      e.printStackTrace();
       throw new RuntimeException(e);
     }
 
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
index 19f6886..9b2dae3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
@@ -949,14 +949,9 @@ public class DagUtils {
    * Helper method to create a yarn local resource.
    */
   private LocalResource createLocalResource(FileSystem remoteFs, Path file,
-      LocalResourceType type, LocalResourceVisibility visibility) {
+      LocalResourceType type, LocalResourceVisibility visibility) throws IOException {
 
-    FileStatus fstat = null;
-    try {
-      fstat = remoteFs.getFileStatus(file);
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
+    final FileStatus fstat = remoteFs.getFileStatus(file);
 
     URL resourceURL = ConverterUtils.getYarnUrlFromPath(file);
     long resourceSize = fstat.getLen();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java
index 616f2d6..4e74be0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java
@@ -71,7 +71,6 @@ public class HiveHistoryViewer implements Listener {
       HiveHistoryUtil.parseHiveHistory(historyFile, this);
     } catch (IOException e) {
       // TODO pass on this exception
-      e.printStackTrace();
       LOG.error("Error parsing hive history log file", e);
     }
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
index d5a31df..17e6cdf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
@@ -1474,7 +1474,6 @@ public class AcidUtils {
       }
       return dirToSnapshots;
     } catch (IOException e) {
-      e.printStackTrace();
       throw new IOException(e);
     }
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java b/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java
index 3e45e45..6eba2e5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java
@@ -1834,7 +1834,7 @@ public class RCFile {
         try {
           ret = nextKeyBuffer();
         } catch (EOFException eof) {
-          eof.printStackTrace();
+          LOG.warn("Failed to get next key buffer", eof);
         }
       }
       return (ret > 0) && next(readRows);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java
index b9bcda9..4b339a6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java
@@ -623,10 +623,8 @@ public class OrcRecordUpdater implements RecordUpdater {
       try {
         AcidUtils.OrcAcidVersion.writeVersionFile(path.getParent(), fs);
       } catch (Exception e) {
-        e.printStackTrace();
-        // Ignore; might have been created by another concurrent writer, writing to a different bucket
-        // within this delta/base directory
-        LOG.trace(e.fillInStackTrace().toString());
+        LOG.trace("Ignore; might have been created by another concurrent writer, writing to a"
+            + " different bucket within this delta/base directory", e);
       }
     }
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java
index 29112f8..ca840d7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java
@@ -117,7 +117,7 @@ public final class LocalMapJoinProcFactory {
       try {
         hasGroupBy(mapJoinOp, context);
       } catch (Exception e) {
-        e.printStackTrace();
+        LOG.warn("Failed to determine if has group-by", e);
       }
 
       MapJoinDesc mapJoinDesc = mapJoinOp.getConf();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java
index 575e9c2..54373ef 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java
@@ -153,8 +153,7 @@ public class SortMergeJoinTaskDispatcher extends AbstractJoinTaskDispatcher impl
       genSMBJoinWork(currJoinWork.getMapWork(), newSMBJoinOp);
       return currJoinWork;
     } catch (Exception e) {
-      e.printStackTrace();
-      throw new SemanticException("Generate Map Join Task Error: " + e.getMessage());
+      throw new SemanticException("Generate Map Join Task Error", e);
     }
   }
 
@@ -314,8 +313,7 @@ public class SortMergeJoinTaskDispatcher extends AbstractJoinTaskDispatcher impl
         taskToAliases.put(newTask, aliases);
       }
     } catch (Exception e) {
-      e.printStackTrace();
-      throw new SemanticException("Generate Map Join Task Error: ", e);
+      throw new SemanticException("Generate Map Join Task Error", e);
     }
 
     // insert current common join task to conditional task