You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/08/08 03:09:28 UTC

svn commit: r1616653 - in /hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark: KryoSerializer.java SparkClient.java

Author: brock
Date: Fri Aug  8 01:09:28 2014
New Revision: 1616653

URL: http://svn.apache.org/r1616653
Log:
HIVE-7560 - StarterProject: Fix exception handling in POC code (Chao Sun via Brock) [Spark Branch]

Modified:
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkClient.java

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java?rev=1616653&r1=1616652&r2=1616653&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java Fri Aug  8 01:09:28 2014
@@ -24,6 +24,8 @@ import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.mr.ExecMapper;
 import org.apache.hadoop.mapred.JobConf;
@@ -33,6 +35,7 @@ import com.esotericsoftware.kryo.io.Inpu
 import com.esotericsoftware.kryo.io.Output;
 
 public class KryoSerializer {
+  private static final Log LOG = LogFactory.getLog("KryoSerializer");
   private static final Kryo kryo = Utilities.runtimeSerializationKryo.get();
 
   static {
@@ -58,15 +61,13 @@ public class KryoSerializer {
     try {
       jobConf.write(new DataOutputStream(out));
     } catch (IOException e) {
-      // TODO Auto-generated catch block
-      e.printStackTrace();
+      LOG.error("Error serializing job configuration", e);
       return null;
     } finally {
       try {
         out.close();
       } catch (IOException e) {
-        // TODO Auto-generated catch block
-        e.printStackTrace();
+        LOG.error("Error closing output stream", e);
       }
     }
 
@@ -79,8 +80,7 @@ public class KryoSerializer {
     try {
       conf.readFields(new DataInputStream(new ByteArrayInputStream(buffer)));
     } catch (IOException e) {
-      // TODO Auto-generated catch block
-      e.printStackTrace();
+      LOG.error("Error de-serializing job configuration");
       return null;
     }
     return conf;

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkClient.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkClient.java?rev=1616653&r1=1616652&r2=1616653&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkClient.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkClient.java Fri Aug  8 01:09:28 2014
@@ -133,9 +133,7 @@ public class SparkClient implements Seri
       FileSystem fs = emptyScratchDir.getFileSystem(jobConf);
       fs.mkdirs(emptyScratchDir);
     } catch (IOException e) {
-      e.printStackTrace();
-      System.err.println("Error launching map-reduce job" + "\n"
-          + org.apache.hadoop.util.StringUtils.stringifyException(e));
+      LOG.error("Error launching map-reduce job", e);
       return 5;
     }
 
@@ -145,7 +143,7 @@ public class SparkClient implements Seri
     try {
       plan = gen.generate(sparkWork);
     } catch (Exception e) {
-      e.printStackTrace();
+      LOG.error("Error generating Spark Plan", e);
       return 2;
     }