You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2008/12/13 03:57:23 UTC

svn commit: r726159 - in /hadoop/hive/trunk: CHANGES.txt ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java service/src/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java service/src/gen-php/ThriftHive.php

Author: zshao
Date: Fri Dec 12 18:57:20 2008
New Revision: 726159

URL: http://svn.apache.org/viewvc?rev=726159&view=rev
Log:
HIVE-129. Fix aux.jar packaging to work properly with 0.17 and 0.18
versions of hadoop. (Joydeep Sarma via zshao)

Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
    hadoop/hive/trunk/service/src/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java
    hadoop/hive/trunk/service/src/gen-php/ThriftHive.php

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=726159&r1=726158&r2=726159&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Fri Dec 12 18:57:20 2008
@@ -36,6 +36,9 @@
 
   BUG FIXES
 
+    HIVE-129. Fix aux.jar packaging to work properly with 0.17 and 0.18 
+    versions of hadoop. (Joydeep Sarma via zshao)
+
     HIVE-162. Fix join0.q test failure with hadoop 0.17. (zshao)
 
     HIVE-146. Fix builds for non-default build directory.

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java?rev=726159&r1=726158&r2=726159&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java Fri Dec 12 18:57:20 2008
@@ -23,6 +23,8 @@
 import java.net.URI;
 import java.net.URLEncoder;
 import java.net.URLDecoder;
+import java.net.URL;
+import java.net.URLClassLoader;
 
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.lang.StringUtils;
@@ -86,13 +88,19 @@
     String realFiles = getRealFiles(job);
     if (realFiles != null && realFiles.length() > 0) {
       job.set("tmpfiles", realFiles);
+
+      // workaround for hadoop-17 - jobclient only looks at commandlineconfig
+      Configuration commandConf = JobClient.getCommandLineConfig();
+      if(commandConf != null) {
+        commandConf.set("tmpfiles", realFiles);
+      }
     }
   }
 
   /**
    * Constructor/Initialization for invocation as independent utility
    */
-  public ExecDriver(mapredWork plan, JobConf job, boolean isSilent) {
+  public ExecDriver(mapredWork plan, JobConf job, boolean isSilent) throws HiveException {
     setWork(plan);
     this.job = job;
     LOG = LogFactory.getLog(this.getClass().getName());
@@ -220,6 +228,27 @@
   }
 
   /**
+   * Add new elements to the classpath
+   * @param newPaths Array of classpath elements
+   */
+  private static void addToClassPath(String [] newPaths) throws Exception {
+    Thread curThread = Thread.currentThread();
+    URLClassLoader loader = (URLClassLoader)curThread.getContextClassLoader();
+    List<URL> curPath = Arrays.asList(loader.getURLs());
+    ArrayList<URL> newPath = new ArrayList<URL> ();
+
+    for(String onestr: newPaths) {
+      URL oneurl = (new File(onestr)).toURL();
+      if(!curPath.contains(oneurl)) {
+        newPath.add(oneurl);
+      }
+    }
+    
+    loader = new URLClassLoader(newPath.toArray(new URL[0]), loader);
+    curThread.setContextClassLoader(loader);
+  }
+
+  /**
    * Execute a query plan using Hadoop
    */
   public int execute() {
@@ -401,6 +430,20 @@
       pathData = fs.open(new Path(planFileName));
     }
     
+    // workaround for hadoop-17 - libjars are not added to classpath. this affects local
+    // mode execution
+    boolean localMode = HiveConf.getVar(conf, HiveConf.ConfVars.HADOOPJT).equals("local");
+    if(localMode) {
+      String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);
+      if (StringUtils.isNotBlank(auxJars)) {
+        try {
+          addToClassPath(StringUtils.split(auxJars, ","));
+        } catch (Exception e) {
+          throw new HiveException (e.getMessage(), e);
+        }
+      }
+    }
+
     mapredWork plan = Utilities.deserializeMapRedWork(pathData);
     ExecDriver ed = new ExecDriver(plan, conf, isSilent);
     int ret = ed.execute();

Modified: hadoop/hive/trunk/service/src/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/service/src/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java?rev=726159&r1=726158&r2=726159&view=diff
==============================================================================
--- hadoop/hive/trunk/service/src/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java (original)
+++ hadoop/hive/trunk/service/src/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java Fri Dec 12 18:57:20 2008
@@ -587,7 +587,7 @@
     public String toString() {
       StringBuilder sb = new StringBuilder("execute_result(");
       sb.append("ex:");
-      sb.append(this.ex.toString());
+      sb.append(this.ex);
       sb.append(")");
       return sb.toString();
     }
@@ -810,7 +810,7 @@
       sb.append("success:");
       sb.append(this.success);
       sb.append(",ex:");
-      sb.append(this.ex.toString());
+      sb.append(this.ex);
       sb.append(")");
       return sb.toString();
     }
@@ -1120,7 +1120,7 @@
       sb.append("success:");
       sb.append(this.success);
       sb.append(",ex:");
-      sb.append(this.ex.toString());
+      sb.append(this.ex);
       sb.append(")");
       return sb.toString();
     }
@@ -1376,7 +1376,7 @@
       sb.append("success:");
       sb.append(this.success);
       sb.append(",ex:");
-      sb.append(this.ex.toString());
+      sb.append(this.ex);
       sb.append(")");
       return sb.toString();
     }
@@ -1599,7 +1599,7 @@
       sb.append("success:");
       sb.append(this.success);
       sb.append(",ex:");
-      sb.append(this.ex.toString());
+      sb.append(this.ex);
       sb.append(")");
       return sb.toString();
     }

Modified: hadoop/hive/trunk/service/src/gen-php/ThriftHive.php
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/service/src/gen-php/ThriftHive.php?rev=726159&r1=726158&r2=726159&view=diff
==============================================================================
--- hadoop/hive/trunk/service/src/gen-php/ThriftHive.php (original)
+++ hadoop/hive/trunk/service/src/gen-php/ThriftHive.php Fri Dec 12 18:57:20 2008
@@ -744,6 +744,9 @@
     $xfer = 0;
     $xfer += $output->writeStructBegin('ThriftHive_fetchN_result');
     if ($this->success !== null) {
+      if (!is_array($this->success)) {
+        throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
+      }
       $xfer += $output->writeFieldBegin('success', TType::LST, 0);
       {
         $output->writeListBegin(TType::STRING, count($this->success));
@@ -911,6 +914,9 @@
     $xfer = 0;
     $xfer += $output->writeStructBegin('ThriftHive_fetchAll_result');
     if ($this->success !== null) {
+      if (!is_array($this->success)) {
+        throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
+      }
       $xfer += $output->writeFieldBegin('success', TType::LST, 0);
       {
         $output->writeListBegin(TType::STRING, count($this->success));