You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by js...@apache.org on 2009/06/21 15:12:13 UTC

svn commit: r787007 - in /hadoop/hive/trunk: ./ cli/src/java/org/apache/hadoop/hive/cli/ common/src/java/org/apache/hadoop/hive/common/ hwi/src/java/org/apache/hadoop/hive/hwi/ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache/hadoop/hive/q...

Author: jssarma
Date: Sun Jun 21 13:12:12 2009
New Revision: 787007

URL: http://svn.apache.org/viewvc?rev=787007&view=rev
Log:
HIVE-338 - add/delete jar commands, Hive Server: cli commands and fixes for multi thread safety

Added:
    hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java
      - copied, changed from r787005, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/CommandProcessor.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java
      - copied, changed from r787005, hadoop/hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/SetProcessor.java
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/deletejar.q
    hadoop/hive/trunk/ql/src/test/results/clientnegative/deletejar.q.out
Removed:
    hadoop/hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/SetProcessor.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/CommandProcessor.java
Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/build-common.xml
    hadoop/hive/trunk/build.xml
    hadoop/hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
    hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/alter1.q
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/input16.q
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/input16_cc.q
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
    hadoop/hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=787007&r1=787006&r2=787007&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Sun Jun 21 13:12:12 2009
@@ -9,6 +9,10 @@
 
   NEW FEATURES
 
+    HIVE-338. add/delete jar commands, Hive Server fix for multithreading,
+    cli commands via hiveserver
+    (Min Zhou and Joydeep Sen Sarma via jssarma)
+
     HIVE-354. UDF for length of a string. (Neil Conway via namit).
 
     HIVE-402. Implement UDF regexp. (Raghotham Murthy via namit)

Modified: hadoop/hive/trunk/build-common.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/build-common.xml?rev=787007&r1=787006&r2=787007&view=diff
==============================================================================
--- hadoop/hive/trunk/build-common.xml (original)
+++ hadoop/hive/trunk/build-common.xml Sun Jun 21 13:12:12 2009
@@ -65,7 +65,6 @@
     <pathelement location="" />
     <pathelement location="${test.data.dir}/conf"/>
     <pathelement location="${hive.conf.dir}"/>
-    <fileset dir="${test.src.data.dir}" includes="files/*.jar"/>
     <fileset dir="${hive.root}" includes="testlibs/*.jar"/>
     <path refid="classpath"/>
   </path>
@@ -117,7 +116,6 @@
     <pathelement location="${build.dir.hive}/metastore/classes"/>
     <pathelement location="${build.dir.hive}/ql/classes"/>
     <pathelement location="${build.dir.hive}/cli/classes"/>
-    <fileset dir="${test.src.data.dir}" includes="files/*.jar"/>
     <fileset dir="${basedir}" includes="lib/*.jar"/>
     <path refid="common-classpath"/>
   </path>
@@ -174,6 +172,7 @@
      encoding="${build.encoding}"
      srcdir="${test.src.dir}"
      includes="org/apache/hadoop/**/*.java"
+     excludes="**/TestSerDe.java"
      destdir="${test.build.classes}"
      debug="${javac.debug}"
      optimize="${javac.optimize}"
@@ -274,7 +273,7 @@
       <sysproperty key="test.service.standalone.server" value="${standalone}"/>
       <sysproperty key="log4j.configuration" value="file://${test.data.dir}/conf/hive-log4j.properties"/>
       <sysproperty key="derby.stream.error.file" value="${test.build.dir}/derby.log"/>
-      <sysproperty key="hive.aux.jars.path" value="file://${test.build.dir}/test-udfs.jar,file://${test.src.data.dir}/files/TestSerDe.jar"/>
+      <sysproperty key="hive.aux.jars.path" value="file://${test.build.dir}/test-udfs.jar"/>
       <sysproperty key="ql.test.query.clientpositive.dir" value="${ql.test.query.clientpositive.dir}"/>
       <sysproperty key="ql.test.results.clientpositive.dir" value="${ql.test.results.clientpositive.dir}"/>
       <sysproperty key="test.log.dir" value="${test.log.dir}"/>

Modified: hadoop/hive/trunk/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/build.xml?rev=787007&r1=787006&r2=787007&view=diff
==============================================================================
--- hadoop/hive/trunk/build.xml (original)
+++ hadoop/hive/trunk/build.xml Sun Jun 21 13:12:12 2009
@@ -185,7 +185,7 @@
       <fileset dir="${hive.root}/service/src/gen-py/hive_service" excludes="**/.svn"/>
     </copy>
     <copy todir="${target.lib.dir}" preservelastmodified="true" flatten="true">
-      <fileset dir="${hive.root}" includes="*/*.jar, */*/*.jar" excludes="**/antlr-2*,**/antlr-3*"/>
+      <fileset dir="${hive.root}" includes="*/*.jar, */*/*.jar" excludes="**/antlr-2*,**/antlr-3*,**/TestSerDe.jar"/>
       <fileset file="${build.dir.hive}/cli/hive_cli.jar"/>
       <fileset file="${build.dir.hive}/common/hive_common.jar"/>
       <fileset file="${build.dir.hive}/ql/hive_exec.jar"/>

Modified: hadoop/hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java?rev=787007&r1=787006&r2=787007&view=diff
==============================================================================
--- hadoop/hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java (original)
+++ hadoop/hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java Sun Jun 21 13:12:12 2009
@@ -30,28 +30,25 @@
 import org.apache.hadoop.hive.ql.exec.Utilities.StreamPrinter;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+import org.apache.hadoop.hive.ql.processors.CommandProcessor;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorFactory;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 
 public class CliDriver {
 
   public final static String prompt = "hive";
   public final static String prompt2 = "    "; // when ';' is not yet seen
 
-  private SetProcessor sp;
-  private Driver qp;
-  private FsShell dfs;
   private LogHelper console;
   private Configuration conf;
 
   public CliDriver() {
     SessionState ss = SessionState.get();
-    sp = new SetProcessor();
-    qp = new Driver();
     conf = (ss != null) ? ss.getConf() : new Configuration ();
-    dfs = new FsShell(conf);
     Log LOG = LogFactory.getLog("CliDriver");
     console = new LogHelper(LOG);
   }
@@ -61,14 +58,10 @@
     
     String cmd_trimmed = cmd.trim();
     String[] tokens = cmd_trimmed.split("\\s+");
-    String cmd_1 = cmd_trimmed.substring(tokens[0].length());
+    String cmd_1 = cmd_trimmed.substring(tokens[0].length()).trim();
     int ret = 0;
     
-    if(tokens[0].toLowerCase().equals("set")) {
-
-      ret = sp.run(cmd_1);
-
-    } else if (cmd_trimmed.toLowerCase().equals("quit") || cmd_trimmed.toLowerCase().equals("exit")) {
+    if (cmd_trimmed.toLowerCase().equals("quit") || cmd_trimmed.toLowerCase().equals("exit")) {
 
       // if we have come this far - either the previous commands
       // are all successful or this is command line. in either case
@@ -99,26 +92,6 @@
         ret = 1;
       }
 
-    } else if (tokens[0].toLowerCase().equals("dfs")) {
-
-      String [] alt_tokens = new String [tokens.length-1];
-      System.arraycopy(tokens, 1, alt_tokens, 0, tokens.length-1);
-      tokens = alt_tokens;
-
-      try {
-        PrintStream oldOut = System.out;
-        System.setOut(ss.out);
-        ret = dfs.run(tokens);
-        System.setOut(oldOut);
-        if (ret != 0) {
-          console.printError("Command failed with exit code = " + ret);
-        }
-      } catch (Exception e) {
-        console.printError("Exception raised from DFSShell.run " + e.getLocalizedMessage(),
-                           org.apache.hadoop.util.StringUtils.stringifyException(e));
-        ret = 1;
-      }
-
     } else if (tokens[0].toLowerCase().equals("list")) {
 
       SessionState.ResourceType t;
@@ -138,67 +111,43 @@
           ss.out.println(StringUtils.join(s, "\n"));
       }
 
-    } else if (tokens[0].toLowerCase().equals("add")) {
-
-      SessionState.ResourceType t;
-      if(tokens.length < 3 || (t = SessionState.find_resource_type(tokens[1])) == null) {
-        console.printError("Usage: add [" +
-                           StringUtils.join(SessionState.ResourceType.values(),"|") +
-                           "] <value> [<value>]*");
-        ret = 1;
-      } else {
-        for(int i = 2; i<tokens.length; i++) {
-          ss.add_resource(t, tokens[i]);
-        }
-      }
-
-    } else if (tokens[0].toLowerCase().equals("delete")) {
-
-      SessionState.ResourceType t;
-      if(tokens.length < 2 || (t = SessionState.find_resource_type(tokens[1])) == null) {
-        console.printError("Usage: delete [" +
-                           StringUtils.join(SessionState.ResourceType.values(),"|") +
-                           "] [<value>]");
-        ret = 1;
-      } else if (tokens.length >= 3) {
-        for(int i = 2; i<tokens.length; i++) {
-          ss.delete_resource(t, tokens[i]);
-        }
-      } else {
-        ss.delete_resource(t);
-      }
-
-    } else if (!StringUtils.isBlank(cmd_trimmed)) {
-      PrintStream out = ss.out;
-
-      long start = System.currentTimeMillis();
-
-      ret = qp.run(cmd);
-      if (ret != 0) {
-        qp.close();
-        return ret;
-      }
+    } else {
+      CommandProcessor proc = CommandProcessorFactory.get(tokens[0]);
+      if(proc != null) {
+        if(proc instanceof Driver) {
+          Driver qp = (Driver) proc;
+          PrintStream out = ss.out;
+          long start = System.currentTimeMillis();
+
+          ret = qp.run(cmd);
+          if (ret != 0) {
+            qp.close();
+            return ret;
+          }
         
-      Vector<String> res = new Vector<String>();
-      while (qp.getResults(res)) {
-      	for (String r:res) {
-          out.println(r);
-      	}
-        res.clear();
-        if (out.checkError()) {
-          break;
-        }
-      }
+          Vector<String> res = new Vector<String>();
+          while (qp.getResults(res)) {
+            for (String r:res) {
+              out.println(r);
+            }
+            res.clear();
+            if (out.checkError()) {
+              break;
+            }
+          }
       
-      int cret = qp.close();
-      if (ret == 0) {
-        ret = cret;
-      }
+          int cret = qp.close();
+          ret = cret;
 
-      long end = System.currentTimeMillis();
-      if (end > start) {
-        double timeTaken = (double)(end-start)/1000.0;
-        console.printInfo("Time taken: " + timeTaken + " seconds", null);
+          long end = System.currentTimeMillis();
+          if (end > start) {
+            double timeTaken = (double)(end-start)/1000.0;
+            console.printInfo("Time taken: " + timeTaken + " seconds", null);
+          }
+
+        } else {
+          ret = proc.run(cmd_1);
+        }
       }
     }
 

Added: hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java?rev=787007&view=auto
==============================================================================
--- hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java (added)
+++ hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java Sun Jun 21 13:12:12 2009
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common;
+
+/**
+ * Collection of Java class loading/reflection related utilities common across Hive
+ */
+public class JavaUtils {
+
+  /**
+   * Standard way of getting classloader across all of Hive.
+   * Uses the context loader to get access to classpaths to auxiliary and jars
+   * added with 'add jar' command. Falls back to current classloader.
+   */
+  public static ClassLoader getClassLoader() {
+    ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
+    if (classLoader == null) {
+      classLoader = JavaUtils.class.getClassLoader();
+    }
+    return classLoader;
+  }
+
+}

Modified: hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java?rev=787007&r1=787006&r2=787007&view=diff
==============================================================================
--- hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java (original)
+++ hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java Sun Jun 21 13:12:12 2009
@@ -9,7 +9,7 @@
 
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.cli.OptionsProcessor;
-import org.apache.hadoop.hive.cli.SetProcessor;
+import org.apache.hadoop.hive.ql.processors.SetProcessor;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.exec.ExecDriver;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=787007&r1=787006&r2=787007&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Sun Jun 21 13:12:12 2009
@@ -42,9 +42,11 @@
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.hooks.PreExecute;
 import org.apache.hadoop.hive.ql.history.HiveHistory.Keys;
+import org.apache.hadoop.hive.ql.processors.CommandProcessor;
 import org.apache.hadoop.hive.ql.plan.tableDesc;
 import org.apache.hadoop.hive.serde2.ByteStream;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.security.UserGroupInformation;
 
@@ -52,13 +54,15 @@
 import org.apache.commons.logging.LogFactory;
 
 public class Driver implements CommandProcessor {
-  static final private Log LOG = LogFactory.getLog("hive.ql.Driver");
+
+  static final private Log LOG = LogFactory.getLog(Driver.class.getName());
+  static final private LogHelper console = new LogHelper(LOG);
+
   private int maxRows = 100;
   ByteStream.Output bos = new ByteStream.Output();
 
   private HiveConf conf;
   private DataInput resStream;
-  private LogHelper console;
   private Context ctx;
   private QueryPlan plan;
 
@@ -138,12 +142,10 @@
    * for backwards compatibility with current tests
    */
   public Driver(HiveConf conf) {
-    console = new LogHelper(LOG);
     this.conf = conf;
   }
 
   public Driver() {
-    console = new LogHelper(LOG);
     if (SessionState.get() != null) {
       conf = SessionState.get().getConf();
     }
@@ -224,7 +226,7 @@
     
     for(String peClass: peClasses) {
       try {
-        pehooks.add((PreExecute)Class.forName(peClass.trim()).newInstance());
+        pehooks.add((PreExecute)Class.forName(peClass.trim(), true, JavaUtils.getClassLoader()).newInstance());
       } catch (ClassNotFoundException e) {
         console.printError("Pre Exec Hook Class not found:" + e.getMessage());
         throw e;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=787007&r1=787006&r2=787007&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Sun Jun 21 13:12:12 2009
@@ -65,6 +65,9 @@
 import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
 import org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
+import org.apache.hadoop.hive.serde2.Deserializer;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
+import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.util.StringUtils;
 
 /**
@@ -629,6 +632,23 @@
     return 0;
   }
 
+  
+  /**
+   * Check if the given serde is valid
+   */
+  private void validateSerDe(String serdeName) throws HiveException {
+    try {
+      Deserializer d = SerDeUtils.lookupDeserializer(serdeName);
+      if(d != null) {
+        System.out.println("Found class for " + serdeName);
+      }
+    } catch (SerDeException e) {
+      throw new HiveException ("Cannot validate serde: " + serdeName, e);
+    }
+  }
+
+
+
   /**
    * Create a new table.
    * 
@@ -686,6 +706,9 @@
     if (crtTbl.getSerName() == null) {
       LOG.info("Default to LazySimpleSerDe for table " + crtTbl.getTableName() );
       tbl.setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
+    } else {
+      // let's validate that the serde exists
+      validateSerDe(crtTbl.getSerName());
     }
 
     if (crtTbl.getComment() != null)

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java?rev=787007&r1=787006&r2=787007&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java Sun Jun 21 13:12:12 2009
@@ -67,11 +67,10 @@
     super();
   }
 
-  public static String getRealFiles(Configuration conf) {
+  public static String getResourceFiles(Configuration conf, SessionState.ResourceType t) {
     // fill in local files to be added to the task environment
     SessionState ss = SessionState.get();
-    Set<String> files = (ss == null) ? null : ss.list_resource(
-        SessionState.ResourceType.FILE, null);
+    Set<String> files = (ss == null) ? null : ss.list_resource(t, null);
     if (files != null) {
       ArrayList<String> realFiles = new ArrayList<String>(files.size());
       for (String one : files) {
@@ -88,25 +87,33 @@
     }
   }
 
-  /**
-   * Initialization when invoked from QL
-   */
-  public void initialize(HiveConf conf) {
-    super.initialize(conf);
-    job = new JobConf(conf, ExecDriver.class);
-    String realFiles = getRealFiles(job);
-    if (realFiles != null && realFiles.length() > 0) {
-      job.set("tmpfiles", realFiles);
+  private void initializeFiles(String prop, String files) {
+    if (files != null && files.length() > 0) {
+      job.set(prop, files);
 
       // workaround for hadoop-17 - jobclient only looks at commandlineconfig
       Configuration commandConf = JobClient.getCommandLineConfig();
       if (commandConf != null) {
-        commandConf.set("tmpfiles", realFiles);
+        commandConf.set(prop, files);
       }
     }
   }
 
   /**
+   * Initialization when invoked from QL
+   */
+  public void initialize(HiveConf conf) {
+    super.initialize(conf);
+    job = new JobConf(conf, ExecDriver.class);
+    initializeFiles(
+        "tmpfiles",
+        getResourceFiles(job, SessionState.ResourceType.FILE));
+    initializeFiles(
+        "tmpjars",
+        getResourceFiles(job, SessionState.ResourceType.JAR));
+  }
+
+  /**
    * Constructor/Initialization for invocation as independent utility
    */
   public ExecDriver(mapredWork plan, JobConf job, boolean isSilent)
@@ -273,36 +280,6 @@
     }
   }
 
-
-  /**
-   * Add new elements to the classpath
-   * 
-   * @param newPaths
-   *          Array of classpath elements
-   */
-  private static void addToClassPath(String[] newPaths, boolean local) throws Exception {
-    Thread curThread = Thread.currentThread();
-    URLClassLoader loader = (URLClassLoader) curThread.getContextClassLoader();
-    List<URL> curPath = Arrays.asList(loader.getURLs());
-    ArrayList<URL> newPath = new ArrayList<URL>();
-
-    for (String onestr : newPaths) {
-      // special processing for hadoop-17. file:// needs to be removed
-      if (local) {
-        if (StringUtils.indexOf(onestr, "file://") == 0)
-          onestr = StringUtils.substring(onestr, 7);
-      }
-
-      URL oneurl = (new File(onestr)).toURL();
-      if (!curPath.contains(oneurl)) {
-        newPath.add(oneurl);
-      }
-    }
-
-    loader = new URLClassLoader(newPath.toArray(new URL[0]), loader);
-    curThread.setContextClassLoader(loader);
-  }
-
   /**
    * Calculate the total size of input files.
    * @param job the hadoop job conf.
@@ -555,7 +532,7 @@
       String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);
       if (StringUtils.isNotBlank(auxJars)) {
         try {
-          addToClassPath(StringUtils.split(auxJars, ","), true);
+          Utilities.addToClassPath(StringUtils.split(auxJars, ","));
         } catch (Exception e) {
           throw new HiveException(e.getMessage(), e);
         }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java?rev=787007&r1=787006&r2=787007&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java Sun Jun 21 13:12:12 2009
@@ -18,9 +18,12 @@
 
 package org.apache.hadoop.hive.ql.exec;
 
+import java.net.URLClassLoader;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.ql.plan.FunctionWork;
 import org.apache.hadoop.hive.ql.plan.createFunctionDesc;
 import org.apache.hadoop.util.StringUtils;
@@ -29,7 +32,7 @@
 public class FunctionTask extends Task<FunctionWork> {
   private static final long serialVersionUID = 1L;
   private static final Log LOG = LogFactory.getLog("hive.ql.exec.FunctionTask");
-  
+
   transient HiveConf conf;
   
   public void initialize(HiveConf conf) {
@@ -57,7 +60,6 @@
   @SuppressWarnings("unchecked")
   private Class<? extends UDF> getUdfClass(createFunctionDesc desc)
       throws ClassNotFoundException {
-    return (Class<? extends UDF>) conf.getClassByName(desc.getClassName());
+    return (Class<? extends UDF>) Class.forName(desc.getClassName(), true, JavaUtils.getClassLoader());
   }
-
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java?rev=787007&r1=787006&r2=787007&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java Sun Jun 21 13:12:12 2009
@@ -28,6 +28,7 @@
 import org.apache.hadoop.hive.ql.plan.mapredWork;
 import org.apache.hadoop.hive.ql.exec.Utilities.*;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.session.SessionState;
 
 import org.apache.commons.lang.StringUtils;
 
@@ -48,10 +49,21 @@
       String hiveJar = conf.getJar();
       String hiveConfArgs = ExecDriver.generateCmdLine(conf);
       String auxJars = conf.getAuxJars();
-      if (!StringUtils.isEmpty(auxJars)) {
-        auxJars = " -libjars " + auxJars + " ";
-      } else {
+      String addedJars = ExecDriver.getResourceFiles(conf, SessionState.ResourceType.JAR);
+
+      if (StringUtils.isEmpty(auxJars) && StringUtils.isEmpty(addedJars)) {
         auxJars = " ";
+      } else {
+        String jarList;
+        if(StringUtils.isEmpty(auxJars)) {
+          jarList = addedJars;
+        } else if (StringUtils.isEmpty(auxJars)) {
+          jarList = auxJars;
+        } else {
+          jarList = auxJars + "," + addedJars;
+        }
+
+        auxJars = " -libjars " + jarList + " ";
       }
 
       mapredWork plan = getWork();
@@ -67,7 +79,7 @@
           + " org.apache.hadoop.hive.ql.exec.ExecDriver -plan "
           + planFile.toString() + " " + isSilent + " " + hiveConfArgs; 
       
-      String files = ExecDriver.getRealFiles(conf);
+      String files = ExecDriver.getResourceFiles(conf, SessionState.ResourceType.FILE);
       if(!files.isEmpty()) {
         cmdLine = cmdLine + " -files " + files;
       }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java?rev=787007&r1=787006&r2=787007&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java Sun Jun 21 13:12:12 2009
@@ -21,6 +21,8 @@
 
 import java.io.*;
 import java.net.URI;
+import java.net.URL;
+import java.net.URLClassLoader;
 import java.util.*;
 import java.beans.*;
 
@@ -634,4 +636,61 @@
     return e.getClass().getName() + "(" +  e.getMessage() + ")";
   }
 
+  /**
+   * Add new elements to the classpath
+   * 
+   * @param newPaths
+   *          Array of classpath elements
+   */
+  public static void addToClassPath(String[] newPaths) throws Exception {
+    Thread curThread = Thread.currentThread();
+    URLClassLoader loader = (URLClassLoader) curThread.getContextClassLoader();
+    List<URL> curPath = Arrays.asList(loader.getURLs());
+    ArrayList<URL> newPath = new ArrayList<URL>();
+
+    // get a list with the current classpath components
+    for(URL onePath: curPath) {
+      newPath.add(onePath);
+    }
+    curPath = newPath;
+
+    for (String onestr : newPaths) {
+      // special processing for hadoop-17. file:// needs to be removed
+      if (StringUtils.indexOf(onestr, "file://") == 0)
+        onestr = StringUtils.substring(onestr, 7);
+
+      URL oneurl = (new File(onestr)).toURL();
+      if (!curPath.contains(oneurl)) {
+        curPath.add(oneurl);
+      }
+    }
+
+    loader = new URLClassLoader(curPath.toArray(new URL[0]), loader);
+    curThread.setContextClassLoader(loader);
+  }
+
+  /**
+   * remove elements from the classpath
+   * 
+   * @param pathsToRemove
+   *          Array of classpath elements
+   */
+  public static void removeFromClassPath(String[] pathsToRemove) throws Exception {
+    Thread curThread = Thread.currentThread();
+    URLClassLoader loader = (URLClassLoader) curThread.getContextClassLoader();
+    Set<URL> newPath = new HashSet<URL>(Arrays.asList(loader.getURLs()));
+
+    for (String onestr : pathsToRemove) {
+      // special processing for hadoop-17. file:// needs to be removed
+      if (StringUtils.indexOf(onestr, "file://") == 0)
+        onestr = StringUtils.substring(onestr, 7);
+
+      URL oneurl = (new File(onestr)).toURL();
+      newPath.remove(oneurl);
+    }
+
+    loader = new URLClassLoader(newPath.toArray(new URL[0]));
+    curThread.setContextClassLoader(loader);
+  }
+
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java?rev=787007&r1=787006&r2=787007&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java Sun Jun 21 13:12:12 2009
@@ -32,6 +32,7 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.metastore.HiveMetaException;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
@@ -359,10 +360,12 @@
       table.setTTable(tTable);
       table.setInputFormatClass((Class<? extends InputFormat<WritableComparable, Writable>>)
           Class.forName(table.getSchema().getProperty(org.apache.hadoop.hive.metastore.api.Constants.FILE_INPUT_FORMAT,
-              org.apache.hadoop.mapred.SequenceFileInputFormat.class.getName())));
+                                                      org.apache.hadoop.mapred.SequenceFileInputFormat.class.getName()),
+                        true, JavaUtils.getClassLoader()));
       table.setOutputFormatClass((Class<? extends HiveOutputFormat>)
           Class.forName(table.getSchema().getProperty(org.apache.hadoop.hive.metastore.api.Constants.FILE_OUTPUT_FORMAT,
-              HiveSequenceFileOutputFormat.class.getName()))); 
+                                                      HiveSequenceFileOutputFormat.class.getName()),
+                        true, JavaUtils.getClassLoader()));
       table.setDeserializer(MetaStoreUtils.getDeserializer(getConf(), p));
       table.setDataLocation(new URI(tTable.getSd().getLocation()));
     } catch(Exception e) {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java?rev=787007&r1=787006&r2=787007&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java Sun Jun 21 13:12:12 2009
@@ -48,6 +48,7 @@
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.InputFormat;
@@ -470,7 +471,8 @@
 
   public void setInputFormatClass(String name) throws HiveException {
     try {
-      setInputFormatClass((Class<? extends InputFormat<WritableComparable, Writable>>)Class.forName(name));
+      setInputFormatClass((Class<? extends InputFormat<WritableComparable, Writable>>)
+                          Class.forName(name, true, JavaUtils.getClassLoader()));
     } catch (ClassNotFoundException e) {
       throw new HiveException("Class not found: " + name, e);
     }
@@ -478,7 +480,7 @@
 
   public void setOutputFormatClass(String name) throws HiveException {
     try {
-      Class<?> origin = Class.forName(name);
+      Class<?> origin = Class.forName(name, true, JavaUtils.getClassLoader());
       setOutputFormatClass(HiveFileFormatUtils.getOutputFormatSubstitute(origin));
     } catch (ClassNotFoundException e) {
       throw new HiveException("Class not found: " + name, e);

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=787007&r1=787006&r2=787007&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Sun Jun 21 13:12:12 2009
@@ -33,6 +33,7 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Order;
@@ -296,7 +297,7 @@
     }
     
     try {
-      Class<?> origin = Class.forName(crtTblDesc.getOutputFormat());
+      Class<?> origin = Class.forName(crtTblDesc.getOutputFormat(), true, JavaUtils.getClassLoader());
       Class<? extends HiveOutputFormat> replaced = HiveFileFormatUtils.getOutputFormatSubstitute(origin);
       if(replaced == null)
         throw new SemanticException(ErrorMsg.INVALID_OUTPUT_FORMAT_TYPE.getMsg());

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java?rev=787007&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java Sun Jun 21 13:12:12 2009
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.processors;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+
+public class AddResourceProcessor implements CommandProcessor {
+
+  public static final Log LOG = LogFactory.getLog(AddResourceProcessor.class.getName());
+  public static final LogHelper console = new LogHelper(LOG);
+
+  public int run(String command) {
+    SessionState ss = SessionState.get();
+    String[] tokens = command.split("\\s+");
+    SessionState.ResourceType t;
+    if (tokens.length < 2 || (t = SessionState.find_resource_type(tokens[0])) == null) {
+      console.printError("Usage: add [" +
+                         StringUtils.join(SessionState.ResourceType.values(),"|") +
+                         "] <value> [<value>]*");
+      return 1;
+    }
+    for (int i = 1; i < tokens.length; i++) {
+      ss.add_resource(t, tokens[i]);
+    }
+    return 0;
+  }
+
+}

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java (from r787005, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/CommandProcessor.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/CommandProcessor.java&r1=787005&r2=787007&rev=787007&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/CommandProcessor.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java Sun Jun 21 13:12:12 2009
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql;
+package org.apache.hadoop.hive.ql.processors;
 
 public interface CommandProcessor {
   public int run(String command);

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java?rev=787007&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java Sun Jun 21 13:12:12 2009
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.processors;
+
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.commons.lang.StringUtils;
+
+public class CommandProcessorFactory {
+  
+  public static CommandProcessor get(String cmd) {
+    String cmdl = cmd.toLowerCase();
+
+    if(cmdl.equals("set")) {
+      return new SetProcessor();
+    } else if (cmdl.equals("dfs")) {
+      SessionState ss = SessionState.get();
+      return new DfsProcessor(ss.getConf());
+    } else if (cmdl.equals("add")) {
+      return new AddResourceProcessor();
+    } else if (cmdl.equals("delete")) {
+      return new DeleteResourceProcessor();
+    } else if (!StringUtils.isBlank(cmd)) {
+      return new Driver();
+    }
+    return null;
+  }
+
+}

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java?rev=787007&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java Sun Jun 21 13:12:12 2009
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.processors;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+
+
+public class DeleteResourceProcessor implements CommandProcessor {
+
+  public static final Log LOG = LogFactory.getLog(DeleteResourceProcessor.class.getName());
+  public static final LogHelper console = new LogHelper(LOG);
+
+  public int run(String command) {
+    SessionState ss = SessionState.get();
+    String[] tokens = command.split("\\s+");
+
+    SessionState.ResourceType t;
+    if(tokens.length < 1 || (t = SessionState.find_resource_type(tokens[0])) == null) {
+      console.printError("Usage: delete [" +
+                         StringUtils.join(SessionState.ResourceType.values(),"|") +
+                         "] <value> [<value>]*");
+      return 1;
+    }
+
+    if (tokens.length >= 2) {
+      for(int i = 1; i < tokens.length; i++) {
+        ss.delete_resource(t, tokens[i]);
+      }
+    } else {
+      ss.delete_resource(t);
+    }
+    
+    return 0;
+  }
+}

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java?rev=787007&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java Sun Jun 21 13:12:12 2009
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.processors;
+
+import java.io.PrintStream;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FsShell;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+
+public class DfsProcessor implements CommandProcessor {
+
+  public static final Log LOG = LogFactory.getLog(DfsProcessor.class.getName());
+  public static final LogHelper console = new LogHelper(LOG);
+
+  private FsShell dfs;
+
+  public DfsProcessor(Configuration conf) {
+    dfs = new FsShell(conf);
+  }
+
+  public int run(String command) {
+    String[] tokens = command.split("\\s+");
+
+    try {
+      SessionState ss = SessionState.get();
+      PrintStream oldOut = System.out;
+
+      if(ss != null && ss.out != null) {
+        System.setOut(ss.out);
+      }
+
+      int ret = dfs.run(tokens);
+      if(ret != 0) {
+        console.printError("Command failed with exit code = " + ret);
+      }
+
+      System.setOut(oldOut);
+      return (ret);
+
+    } catch (Exception e) {
+      console.printError("Exception raised from DFSShell.run " + e.getLocalizedMessage(),
+                    org.apache.hadoop.util.StringUtils.stringifyException(e));
+      return 1;
+    }
+  }
+
+}

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java (from r787005, hadoop/hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/SetProcessor.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java&p1=hadoop/hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/SetProcessor.java&r1=787005&r2=787007&rev=787007&view=diff
==============================================================================
--- hadoop/hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/SetProcessor.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java Sun Jun 21 13:12:12 2009
@@ -16,12 +16,11 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.cli;
+package org.apache.hadoop.hive.ql.processors;
 
 import java.util.*;
 
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.ql.CommandProcessor;
 
 public class SetProcessor implements CommandProcessor {
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=787007&r1=787006&r2=787007&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java Sun Jun 21 13:12:12 2009
@@ -311,13 +311,53 @@
     }
   }
 
+  public static boolean registerJar(String newJar) {
+    LogHelper console = getConsole();
+    try {
+      Utilities.addToClassPath(StringUtils.split(newJar, ","));
+      console.printInfo("Added " + newJar + " to class path");
+      return true;
+    } catch (Exception e) {
+      console.printError("Unable to register " + newJar + "\nException: " + e.getMessage(),
+                         "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+      return false;
+    }
+  }
+  
+  public static boolean unregisterJar(String jarsToUnregister) {
+    LogHelper console = getConsole();
+    try {
+      Utilities.removeFromClassPath(StringUtils.split(jarsToUnregister, ","));
+      console.printInfo("Deleted " + jarsToUnregister + " from class path");
+      return true;
+    } catch (Exception e) {
+      console.printError("Unable to unregister " + jarsToUnregister + "\nException: " + e.getMessage(),
+                         "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+      return false;
+    }
+  }
+
   public static interface ResourceHook {
     public String preHook(Set<String> cur, String s);
+    public boolean postHook(Set<String> cur, String s);
   }
 
   public static enum ResourceType {
     FILE(new ResourceHook () {
         public String preHook(Set<String> cur, String s) { return validateFile(cur, s); }
+        public boolean postHook(Set<String> cur, String s) { return true; }
+      }),
+
+    JAR(new ResourceHook () {
+        public String preHook(Set<String> cur, String s) {
+          String newJar = validateFile(cur, s);
+          if(newJar != null) {
+            return (registerJar(newJar) ? newJar : null);
+          } else {
+            return null;
+          }
+        }
+        public boolean postHook(Set<String> cur, String s) { return unregisterJar(s); }
       });
 
     public ResourceHook hook;
@@ -370,6 +410,10 @@
     if(resource_map.get(t) == null) {
       return false;
     }
+    if(t.hook != null) {
+      if(!t.hook.postHook(resource_map.get(t), value))
+        return false;
+    }
     return (resource_map.get(t).remove(value));
   }
 
@@ -392,6 +436,11 @@
   }
 
   public void delete_resource(ResourceType t) {
-    resource_map.remove (t);
+    if(resource_map.get(t) != null) {
+      for(String value : resource_map.get(t)) {
+        delete_resource(t, value);
+      }
+      resource_map.remove (t);
+    }
   }
 }

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/deletejar.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/deletejar.q?rev=787007&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/deletejar.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/deletejar.q Sun Jun 21 13:12:12 2009
@@ -0,0 +1,4 @@
+DROP TABLE DELETEJAR;
+ADD JAR ../data/files/TestSerDe.jar;
+DELETE JAR ../data/files/TestSerDe.jar;
+CREATE TABLE DELETEJAR(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' STORED AS TEXTFILE;

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/alter1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/alter1.q?rev=787007&r1=787006&r2=787007&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/alter1.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/alter1.q Sun Jun 21 13:12:12 2009
@@ -11,6 +11,7 @@
 alter table alter1 set serdeproperties('s1'='10', 's2' ='20');
 describe extended alter1;
 
+add jar ../data/files/TestSerDe.jar;
 alter table alter1 set serde 'org.apache.hadoop.hive.serde2.TestSerDe' with serdeproperties('s1'='9');
 describe extended alter1;
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input16.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input16.q?rev=787007&r1=787006&r2=787007&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input16.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input16.q Sun Jun 21 13:12:12 2009
@@ -1,5 +1,6 @@
 -- TestSerDe is a user defined serde where the default delimiter is Ctrl-B
 DROP TABLE INPUT16;
+ADD JAR ../data/files/TestSerDe.jar;
 CREATE TABLE INPUT16(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH '../data/files/kv1_cb.txt' INTO TABLE INPUT16;
 SELECT INPUT16.VALUE, INPUT16.KEY FROM INPUT16;

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input16_cc.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input16_cc.q?rev=787007&r1=787006&r2=787007&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input16_cc.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input16_cc.q Sun Jun 21 13:12:12 2009
@@ -1,6 +1,7 @@
 -- TestSerDe is a user defined serde where the default delimiter is Ctrl-B
 -- the user is overwriting it with ctrlC
 DROP TABLE INPUT16_CC;
+ADD JAR ../data/files/TestSerDe.jar;
 CREATE TABLE INPUT16_CC(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe'  with serdeproperties ('testserde.default.serialization.format'='\003', 'dummy.prop.not.used'='dummyy.val') STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH '../data/files/kv1_cc.txt' INTO TABLE INPUT16_CC;
 SELECT INPUT16_CC.VALUE, INPUT16_CC.KEY FROM INPUT16_CC;

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/deletejar.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/deletejar.q.out?rev=787007&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/deletejar.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/deletejar.q.out Sun Jun 21 13:12:12 2009
@@ -0,0 +1,4 @@
+query: DROP TABLE DELETEJAR
+query: CREATE TABLE DELETEJAR(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' STORED AS TEXTFILE
+FAILED: Error in metadata: Cannot validate serde: org.apache.hadoop.hive.serde2.TestSerDe
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java?rev=787007&r1=787006&r2=787007&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java Sun Jun 21 13:12:12 2009
@@ -20,6 +20,7 @@
 
 import java.util.*;
 
+import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.serde.Constants;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
@@ -55,7 +56,7 @@
         c = serdes.get(name);
     } else {
       try {
-        c = Class.forName(name);
+        c = Class.forName(name, true, JavaUtils.getClassLoader());
       } catch(ClassNotFoundException e) {
         throw new SerDeException("SerDe " + name + " does not exist");
       }

Modified: hadoop/hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java?rev=787007&r1=787006&r2=787007&view=diff
==============================================================================
--- hadoop/hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java (original)
+++ hadoop/hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java Sun Jun 21 13:12:12 2009
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.service;
 
+import java.io.PrintStream;
 import java.util.List;
 import java.util.Map;
 import java.util.Vector;
@@ -42,12 +43,18 @@
 import com.facebook.fb303.FacebookService;
 import com.facebook.fb303.fb_status;
 import com.facebook.thrift.TException;
+import com.facebook.thrift.TProcessor;
+import com.facebook.thrift.TProcessorFactory;
 import com.facebook.thrift.protocol.TBinaryProtocol;
 import com.facebook.thrift.server.TServer;
 import com.facebook.thrift.server.TThreadPoolServer;
 import com.facebook.thrift.transport.TServerSocket;
 import com.facebook.thrift.transport.TServerTransport;
+import com.facebook.thrift.transport.TTransport;
 import com.facebook.thrift.transport.TTransportFactory;
+
+import org.apache.hadoop.hive.ql.processors.CommandProcessor;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorFactory;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.metastore.api.*;
 import org.apache.hadoop.hive.metastore.*;
@@ -64,12 +71,9 @@
    * to get an embedded server
    */
   public static class HiveServerHandler extends HiveMetaStore.HMSHandler implements HiveInterface {
-
     /**
      * Hive server uses org.apache.hadoop.hive.ql.Driver for run() and 
      * getResults() methods.
-     * TODO: There should be one Driver object per query statement executed
-     * TODO: That will allow clients to run multiple queries simulteneously
      */
     private Driver driver;
 
@@ -85,9 +89,9 @@
      */
     public HiveServerHandler() throws MetaException {
       super(HiveServer.class.getName());
-      session = new SessionState(new HiveConf(SessionState.class));
+
+      SessionState session = new SessionState(new HiveConf(SessionState.class));
       SessionState.start(session);
-      HiveConf conf = session.get().getConf();
       session.in = null;
       session.out = null;
       session.err = null;
@@ -99,17 +103,30 @@
      *
      * @param query HiveQL query to execute
      */
-    public void execute(String query) throws HiveServerException, TException {
-      HiveServerHandler.LOG.info("Running the query: " + query);
-      int rc = 0;
-      // TODO: driver.run should either return int or throw exception, not both.
+    public void execute(String cmd) throws HiveServerException, TException {
+      HiveServerHandler.LOG.info("Running the query: " + cmd);
+      SessionState ss = SessionState.get();
+      
+      String cmd_trimmed = cmd.trim();
+      String[] tokens = cmd_trimmed.split("\\s");
+      String cmd_1 = cmd_trimmed.substring(tokens[0].length()).trim();
+      
+      int ret = 0;
       try {
-        rc = driver.run(query);
+        CommandProcessor proc = CommandProcessorFactory.get(tokens[0]);
+        if(proc != null) {
+          if (proc instanceof Driver) {
+            ret = driver.run(cmd);
+          } else {
+            ret = proc.run(cmd_1);
+          }
+        }
       } catch (Exception e) {
         throw new HiveServerException("Error running query: " + e.toString());
       }
-      if (rc != 0) {
-        throw new HiveServerException("Query returned non-zero code: " + rc);
+
+      if (ret != 0) {
+        throw new HiveServerException("Query returned non-zero code: " + ret);
       }
     }
 
@@ -196,6 +213,21 @@
       return VERSION;
     }
   }
+	
+  public static class ThriftHiveProcessorFactory extends TProcessorFactory {
+    public ThriftHiveProcessorFactory (TProcessor processor) {
+      super(processor);
+    }
+
+    public TProcessor getProcessor(TTransport trans) {
+      try {
+        Iface handler = new HiveServerHandler();
+        return new ThriftHive.Processor(handler);
+      } catch (Exception e) {
+        throw new RuntimeException(e);
+      }
+    }
+  }
 
   public static void main(String[] args) {
     try {
@@ -204,10 +236,9 @@
         port = Integer.parseInt(args[0]);
       }
       TServerTransport serverTransport = new TServerSocket(port);
-      Iface handler = new HiveServerHandler();
-      FacebookService.Processor processor = new ThriftHive.Processor(handler);
+      ThriftHiveProcessorFactory hfactory = new ThriftHiveProcessorFactory(null);
       TThreadPoolServer.Options options = new TThreadPoolServer.Options();
-      TServer server = new TThreadPoolServer(processor, serverTransport,
+      TServer server = new TThreadPoolServer(hfactory, serverTransport,
           new TTransportFactory(), new TTransportFactory(),
           new TBinaryProtocol.Factory(), new TBinaryProtocol.Factory(), options);
       server.serve();