You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by js...@apache.org on 2009/06/30 15:47:12 UTC

svn commit: r789725 - in /hadoop/hive/trunk: common/src/java/org/apache/hadoop/hive/common/ common/src/java/org/apache/hadoop/hive/conf/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/io/ ql/src/java/org/apache/hadoop...

Author: jssarma
Date: Tue Jun 30 13:47:12 2009
New Revision: 789725

URL: http://svn.apache.org/viewvc?rev=789725&view=rev
Log:
HIVE-574: fixes to classloader

Modified:
    hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
    hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ThriftDeserializer.java

Modified: hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java?rev=789725&r1=789724&r2=789725&view=diff
==============================================================================
--- hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java (original)
+++ hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java Tue Jun 30 13:47:12 2009
@@ -24,9 +24,12 @@
 public class JavaUtils {
 
   /**
-   * Standard way of getting classloader across all of Hive.
+   * Standard way of getting classloader in Hive code (outside of Hadoop).
+   * 
    * Uses the context loader to get access to classpaths to auxiliary and jars
    * added with 'add jar' command. Falls back to current classloader.
+   * 
+   * In Hadoop-related code, we use Configuration.getClassLoader().
    */
   public static ClassLoader getClassLoader() {
     ClassLoader classLoader = Thread.currentThread().getContextClassLoader();

Modified: hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=789725&r1=789724&r2=789725&view=diff
==============================================================================
--- hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Tue Jun 30 13:47:12 2009
@@ -104,6 +104,10 @@
     // hive jar
     HIVEJAR("hive.jar.path", ""), 
     HIVEAUXJARS("hive.aux.jars.path", ""),
+    
+    // hive added files and jars
+    HIVEADDEDFILES("hive.added.files.path", ""),
+    HIVEADDEDJARS("hive.added.jars.path", ""),
    
     // for hive script operator
     HIVETABLENAME("hive.table.name", ""),

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java?rev=789725&r1=789724&r2=789725&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java Tue Jun 30 13:47:12 2009
@@ -40,6 +40,7 @@
 import org.apache.hadoop.mapred.FileInputFormat;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.plan.mapredWork;
 import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.partitionDesc;
@@ -105,12 +106,21 @@
   public void initialize(HiveConf conf) {
     super.initialize(conf);
     job = new JobConf(conf, ExecDriver.class);
-    initializeFiles(
-        "tmpfiles",
-        getResourceFiles(job, SessionState.ResourceType.FILE));
-    initializeFiles(
-        "tmpjars",
-        getResourceFiles(job, SessionState.ResourceType.JAR));
+    // NOTE: initialize is only called if it is in non-local mode.
+    // In case it's in non-local mode, we need to move the SessionState files
+    // and jars to jobConf.
+    // In case it's in local mode, MapRedTask will set the jobConf.
+    //
+    // "tmpfiles" and "tmpjars" are set by the method ExecDriver.execute(),
+    // which will be called by both local and NON-local mode.
+    String addedFiles = getResourceFiles(job, SessionState.ResourceType.FILE);
+    if (StringUtils.isNotBlank(addedFiles)) {
+      HiveConf.setVar(job, ConfVars.HIVEADDEDFILES, addedFiles);
+    }
+    String addedJars = getResourceFiles(job, SessionState.ResourceType.JAR);
+    if (StringUtils.isNotBlank(addedJars)) {
+      HiveConf.setVar(job, ConfVars.HIVEADDEDJARS, addedJars);
+    }
   }
 
   /**
@@ -342,12 +352,24 @@
     job.setOutputKeyClass(Text.class);
     job.setOutputValueClass(Text.class);
 
+    // Transfer HIVEAUXJARS and HIVEADDEDJARS to "tmpjars" so hadoop understands it
     String auxJars = HiveConf.getVar(job, HiveConf.ConfVars.HIVEAUXJARS);
-    if (StringUtils.isNotBlank(auxJars)) {
-      LOG.info("adding libjars: " + auxJars);
-      job.set("tmpjars", auxJars);
+    String addedJars = HiveConf.getVar(job, HiveConf.ConfVars.HIVEADDEDJARS);
+    if (StringUtils.isNotBlank(auxJars) || StringUtils.isNotBlank(addedJars)) {
+      String allJars = 
+        StringUtils.isNotBlank(auxJars)
+        ? (StringUtils.isNotBlank(addedJars) ? addedJars + "," + auxJars : auxJars)
+        : addedJars;
+      LOG.info("adding libjars: " + allJars);
+      initializeFiles("tmpjars", allJars);
     }
 
+    // Transfer HIVEADDEDFILES to "tmpfiles" so hadoop understands it
+    String addedFiles = HiveConf.getVar(job, HiveConf.ConfVars.HIVEADDEDFILES);
+    if (StringUtils.isNotBlank(addedFiles)) {
+      initializeFiles("tmpfiles", addedFiles);
+    }
+    
     int returnVal = 0;
     RunningJob rj = null, orig_rj = null;
     boolean success = false;
@@ -530,16 +552,26 @@
         .equals("local");
     if (localMode) {
       String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);
-      if (StringUtils.isNotBlank(auxJars)) {
-        try {
-          Utilities.addToClassPath(StringUtils.split(auxJars, ","));
-        } catch (Exception e) {
-          throw new HiveException(e.getMessage(), e);
+      String addedJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEADDEDJARS);
+      try {
+        ClassLoader loader = conf.getClassLoader();
+        if (StringUtils.isNotBlank(auxJars)) {
+          loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ","));
+        }
+        if (StringUtils.isNotBlank(addedJars)) {
+          loader = Utilities.addToClassPath(loader, StringUtils.split(addedJars, ","));
         }
+        conf.setClassLoader(loader);
+        // Also set this to the Thread ContextClassLoader, so new threads will inherit
+        // this class loader, and propagate into newly created Configurations by those
+        // new threads.
+        Thread.currentThread().setContextClassLoader(loader);
+      } catch (Exception e) {
+        throw new HiveException(e.getMessage(), e);
       }
     }
 
-    mapredWork plan = Utilities.deserializeMapRedWork(pathData);
+    mapredWork plan = Utilities.deserializeMapRedWork(pathData, conf);
     ExecDriver ed = new ExecDriver(plan, conf, isSilent);
     int ret = ed.execute();
     if (ret != 0) {
@@ -621,7 +653,8 @@
         !work.getAliasToWork().isEmpty()) {
       String oneAlias = (String)work.getAliasToWork().keySet().toArray()[0];
       
-      Class<? extends HiveOutputFormat> outFileFormat = (Class<? extends HiveOutputFormat>)Class.forName("org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat");
+      Class<? extends HiveOutputFormat> outFileFormat = (Class<? extends HiveOutputFormat>)
+          job.getClassByName("org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat");
       
       String newFile = hiveScratchDir + File.separator + (++numEmptyPaths);
       Path newPath = new Path(newFile);

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java?rev=789725&r1=789724&r2=789725&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java Tue Jun 30 13:47:12 2009
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.ql.exec;
 
 import java.io.*;
+import java.net.URLClassLoader;
 import java.util.*;
 
 import org.apache.hadoop.io.*;
@@ -56,6 +57,14 @@
   
   public void configure(JobConf job) {
     try {
+      l4j.info("conf classpath = " 
+          + Arrays.asList(((URLClassLoader)job.getClassLoader()).getURLs()));
+      l4j.info("thread classpath = " 
+          + Arrays.asList(((URLClassLoader)Thread.currentThread().getContextClassLoader()).getURLs()));
+    } catch (Exception e) {
+      l4j.info("cannot get classpath: " + e.getMessage());
+    }
+    try {
       init();
       jc = job;
       mapredWork mrwork = Utilities.getMapRedWork(job);

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java?rev=789725&r1=789724&r2=789725&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java Tue Jun 30 13:47:12 2009
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.ql.exec;
 
 import java.io.*;
+import java.net.URLClassLoader;
 import java.util.*;
 
 import org.apache.hadoop.mapred.*;
@@ -69,6 +70,14 @@
   }
 
   public void configure(JobConf job) {
+    try {
+      l4j.info("conf classpath = " 
+          + Arrays.asList(((URLClassLoader)job.getClassLoader()).getURLs()));
+      l4j.info("thread classpath = " 
+          + Arrays.asList(((URLClassLoader)Thread.currentThread().getContextClassLoader()).getURLs()));
+    } catch (Exception e) {
+      l4j.info("cannot get classpath: " + e.getMessage());
+    }
     jc = job;
     mapredWork gWork = Utilities.getMapRedWork(job);
     reducer = gWork.getReducer();

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java?rev=789725&r1=789724&r2=789725&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java Tue Jun 30 13:47:12 2009
@@ -20,6 +20,7 @@
 
 import java.util.*;
 import java.io.*;
+import java.net.URLClassLoader;
 
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
@@ -153,7 +154,7 @@
       if ((className == "") || (className == null)) {
         throw new HiveException("SerDe class or the SerDe class name is not set for table: " + td.getProperties().getProperty("name"));
       }
-      sdclass = MapOperator.class.getClassLoader().loadClass(className);
+      sdclass = hconf.getClassByName(className);
     }
     
     deserializer = (Deserializer) sdclass.newInstance();

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java?rev=789725&r1=789724&r2=789725&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java Tue Jun 30 13:47:12 2009
@@ -28,6 +28,7 @@
 import org.apache.hadoop.hive.ql.plan.mapredWork;
 import org.apache.hadoop.hive.ql.exec.Utilities.*;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
 import org.apache.commons.lang.StringUtils;
@@ -48,27 +49,31 @@
       String hadoopExec = conf.getVar(HiveConf.ConfVars.HADOOPBIN);
       String hiveJar = conf.getJar();
 
-      String addedJars = ExecDriver.getResourceFiles(conf, SessionState.ResourceType.JAR);
-      if (!StringUtils.isEmpty(addedJars)) {
-        // Add addedJars to auxJars
+      String libJarsOption;
+      {
+        String addedJars = ExecDriver.getResourceFiles(conf, SessionState.ResourceType.JAR);
+        conf.setVar(ConfVars.HIVEADDEDJARS, addedJars);
+
         String auxJars = conf.getAuxJars();
-        if (StringUtils.isEmpty(auxJars)) {
-          auxJars = addedJars;
+        // Put auxjars and addedjars together into libjars
+        if (StringUtils.isEmpty(addedJars)) {
+          if (StringUtils.isEmpty(auxJars)) {
+            libJarsOption = " ";
+          } else {
+            libJarsOption = " -libjars " + auxJars + " ";
+          }
         } else {
-          auxJars = auxJars + "," + addedJars;
+          if (StringUtils.isEmpty(auxJars)) {
+            libJarsOption = " -libjars " + addedJars + " ";
+          } else {
+            libJarsOption = " -libjars " + addedJars + "," + auxJars + " ";
+          }   
         }
-        conf.setAuxJars(auxJars);
-      }
-      // Generate the hiveCOnfArgs after potentially adding the jars
-      String hiveConfArgs = ExecDriver.generateCmdLine(conf);
-
-      String auxJars = conf.getAuxJars();
-      if (StringUtils.isEmpty(auxJars)) {
-        auxJars = " ";
-      } else {
-        auxJars = " -libjars " + auxJars + " ";
       }
 
+      // Generate the hiveConfArgs after potentially adding the jars
+      String hiveConfArgs = ExecDriver.generateCmdLine(conf);
+      
       mapredWork plan = getWork();
 
       File planFile = File.createTempFile("plan", ".xml");
@@ -78,7 +83,7 @@
 
       String isSilent = "true".equalsIgnoreCase(System.getProperty("test.silent"))
                         ? "-silent" : "";
-      String cmdLine = hadoopExec + " jar " + auxJars + " " + hiveJar 
+      String cmdLine = hadoopExec + " jar " + libJarsOption + " " + hiveJar 
           + " org.apache.hadoop.hive.ql.exec.ExecDriver -plan "
           + planFile.toString() + " " + isSilent + " " + hiveConfArgs; 
       

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java?rev=789725&r1=789724&r2=789725&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java Tue Jun 30 13:47:12 2009
@@ -99,7 +99,7 @@
           if(gWork != null)
             return (gWork);
           InputStream in = new FileInputStream("HIVE_PLAN");
-          mapredWork ret = deserializeMapRedWork(in);
+          mapredWork ret = deserializeMapRedWork(in, job);
           gWork = ret;
         }
         gWork.initialize();
@@ -187,8 +187,8 @@
     e.close();
   }
 
-  public static mapredWork deserializeMapRedWork (InputStream in) {
-    XMLDecoder d = new XMLDecoder(in);
+  public static mapredWork deserializeMapRedWork (InputStream in, Configuration conf) {
+    XMLDecoder d = new XMLDecoder(in, null, null, conf.getClassLoader());
     mapredWork ret = (mapredWork)d.readObject();
     d.close();
     return (ret);
@@ -723,9 +723,8 @@
    * @param newPaths
    *          Array of classpath elements
    */
-  public static void addToClassPath(String[] newPaths) throws Exception {
-    Thread curThread = Thread.currentThread();
-    URLClassLoader loader = (URLClassLoader) curThread.getContextClassLoader();
+  public static ClassLoader addToClassPath(ClassLoader cloader, String[] newPaths) throws Exception {
+    URLClassLoader loader = (URLClassLoader)cloader;
     List<URL> curPath = Arrays.asList(loader.getURLs());
     ArrayList<URL> newPath = new ArrayList<URL>();
 
@@ -746,8 +745,7 @@
       }
     }
 
-    loader = new URLClassLoader(curPath.toArray(new URL[0]), loader);
-    curThread.setContextClassLoader(loader);
+    return new URLClassLoader(curPath.toArray(new URL[0]), loader);
   }
 
   /**

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java?rev=789725&r1=789724&r2=789725&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java Tue Jun 30 13:47:12 2009
@@ -22,12 +22,16 @@
 import java.io.DataOutput;
 import java.io.IOException;
 import java.lang.reflect.Method;
+import java.net.URLClassLoader;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.plan.mapredWork;
@@ -61,7 +65,7 @@
    * HiveInputSplit encapsulates an InputSplit with its corresponding inputFormatClass.
    * The reason that it derives from FileSplit is to make sure "map.input.file" in MapTask.
    */
-  public static class HiveInputSplit extends FileSplit implements InputSplit {
+  public static class HiveInputSplit extends FileSplit implements InputSplit, Configurable {
 
     InputSplit inputSplit;
     String     inputFormatClassName;
@@ -121,7 +125,8 @@
     public void readFields(DataInput in) throws IOException {
       String inputSplitClassName = in.readUTF();
       try {
-        inputSplit = (InputSplit) ReflectionUtils.newInstance(Class.forName(inputSplitClassName), job);
+        inputSplit = (InputSplit) ReflectionUtils.newInstance(
+            conf.getClassByName(inputSplitClassName), conf);
       } catch (Exception e) {
         throw new IOException("Cannot create an instance of InputSplit class = "
             + inputSplitClassName + ":" + e.getMessage());
@@ -135,19 +140,31 @@
       inputSplit.write(out);
       out.writeUTF(inputFormatClassName);
     }
+
+    Configuration conf;
+    
+    @Override
+    public Configuration getConf() {
+      return conf;
+    }
+
+    @Override
+    public void setConf(Configuration conf) {
+      this.conf = conf;
+    }
   }
 
-  static JobConf job;
+  JobConf job;
 
   public void configure(JobConf job) {
-    HiveInputFormat.job = job;
+    this.job = job;
   }
 
   /**
    * A cache of InputFormat instances.
    */
   private static Map<Class,InputFormat<WritableComparable, Writable>> inputFormats;
-  static InputFormat<WritableComparable, Writable> getInputFormatFromCache(Class inputFormatClass) throws IOException {
+  static InputFormat<WritableComparable, Writable> getInputFormatFromCache(Class inputFormatClass, JobConf job) throws IOException {
     if (inputFormats == null) {
       inputFormats = new HashMap<Class, InputFormat<WritableComparable, Writable>>();
     }
@@ -174,12 +191,12 @@
     Class inputFormatClass = null;
     try {
       inputFormatClassName = hsplit.inputFormatClassName();
-      inputFormatClass = Class.forName(inputFormatClassName);
+      inputFormatClass = job.getClassByName(inputFormatClassName);
     } catch (Exception e) {
       throw new IOException("cannot find class " + inputFormatClassName);
     }
 
-    InputFormat inputFormat = getInputFormatFromCache(inputFormatClass);
+    InputFormat inputFormat = getInputFormatFromCache(inputFormatClass, job);
 
     return new HiveRecordReader(inputFormat.getRecordReader(inputSplit, job, reporter));
   }
@@ -208,7 +225,7 @@
       tableDesc table = getTableDescFromPath(dir);
       // create a new InputFormat instance if this is the first time to see this class
       Class inputFormatClass = table.getInputFileFormatClass();
-      InputFormat inputFormat = getInputFormatFromCache(inputFormatClass);
+      InputFormat inputFormat = getInputFormatFromCache(inputFormatClass, job);
 
       FileInputFormat.setInputPaths(newjob, dir);
       newjob.setInputFormat(inputFormat.getClass());
@@ -234,7 +251,7 @@
     for (Path dir: dirs) {
       tableDesc table = getTableDescFromPath(dir);
       // create a new InputFormat instance if this is the first time to see this class
-      InputFormat inputFormat = getInputFormatFromCache(table.getInputFileFormatClass());
+      InputFormat inputFormat = getInputFormatFromCache(table.getInputFileFormatClass(), job);
 
       FileInputFormat.setInputPaths(newjob, dir);
       newjob.setInputFormat(inputFormat.getClass());

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java?rev=789725&r1=789724&r2=789725&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java Tue Jun 30 13:47:12 2009
@@ -981,8 +981,8 @@
         throw new VersionMismatchException(VERSION[3], version);
 
       try {
-        Class<?> keyCls = Class.forName(Text.readString(in));
-        Class<?> valCls = Class.forName(Text.readString(in));
+        Class<?> keyCls = conf.getClassByName(Text.readString(in));
+        Class<?> valCls = conf.getClassByName(Text.readString(in));
         if (!keyCls.equals(KeyBuffer.class)
             || !valCls.equals(ValueBuffer.class))
           throw new IOException(file + " not a RCFile");

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=789725&r1=789724&r2=789725&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java Tue Jun 30 13:47:12 2009
@@ -314,7 +314,9 @@
   public static boolean registerJar(String newJar) {
     LogHelper console = getConsole();
     try {
-      Utilities.addToClassPath(StringUtils.split(newJar, ","));
+      ClassLoader loader = Thread.currentThread().getContextClassLoader();
+      Thread.currentThread().setContextClassLoader(
+          Utilities.addToClassPath(loader, StringUtils.split(newJar, ",")));
       console.printInfo("Added " + newJar + " to class path");
       return true;
     } catch (Exception e) {

Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ThriftDeserializer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ThriftDeserializer.java?rev=789725&r1=789724&r2=789725&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ThriftDeserializer.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ThriftDeserializer.java Tue Jun 30 13:47:12 2009
@@ -39,7 +39,7 @@
       // per Table basis
 
       String className = tbl.getProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_CLASS);
-      Class<?> recordClass = Class.forName(className);
+      Class<?> recordClass = job.getClassByName(className);
 
       String protoName = tbl.getProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT);
       if (protoName == null) {