You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2014/05/27 03:26:52 UTC

svn commit: r1597693 - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/parse/ java/org/apache/hadoop/hive/ql/plan/ java/org/apache/hadoop/hive/ql/processors/ java/org/apache/hadoop/hive/ql/session/ test/org/apache/hadoop/hive/ql/session/ test/res...

Author: hashutosh
Date: Tue May 27 01:26:52 2014
New Revision: 1597693

URL: http://svn.apache.org/r1597693
Log:
HIVE-3907 : Hive should support adding multiple resources at once (Navis via Ashutosh Chauhan)

Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java
    hive/trunk/ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1597693&r1=1597692&r2=1597693&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Tue May 27 01:26:52 2014
@@ -2452,9 +2452,6 @@ public class SemanticAnalyzer extends Ba
 
     if (SessionState.canDownloadResource(progName)) {
       String filePath = ss.add_resource(ResourceType.FILE, progName, true);
-      if (filePath == null) {
-        throw new RuntimeException("Could not download the resource: " + progName);
-      }
       Path p = new Path(filePath);
       String fileName = p.getName();
       String scriptArgs = getScriptArgs(cmd);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java?rev=1597693&r1=1597692&r2=1597693&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java Tue May 27 01:26:52 2014
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 
 import org.apache.commons.lang.builder.HashCodeBuilder;
@@ -236,15 +237,19 @@ public class ExprNodeGenericFuncDesc ext
 
     if (requiredJars != null) {
       SessionState.ResourceType t = SessionState.find_resource_type("JAR");
-      for (String jarPath : requiredJars) {
-        ss.add_resource(t, jarPath);
+      try {
+        ss.add_resources(t, Arrays.asList(requiredJars));
+      } catch (Exception e) {
+        throw new UDFArgumentException(e);
       }
     }
 
     if (requiredFiles != null) {
       SessionState.ResourceType t = SessionState.find_resource_type("FILE");
-      for (String filePath : requiredFiles) {
-        ss.add_resource(t, filePath);
+      try {
+        ss.add_resources(t, Arrays.asList(requiredFiles));
+      } catch (Exception e) {
+        throw new UDFArgumentException(e);
       }
     }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java?rev=1597693&r1=1597692&r2=1597693&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java Tue May 27 01:26:52 2014
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.hive.ql.processors;
 
+import java.util.Arrays;
+
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -50,14 +52,12 @@ public class AddResourceProcessor implem
           + "] <value> [<value>]*");
       return new CommandProcessorResponse(1);
     }
-    for (int i = 1; i < tokens.length; i++) {
-      String resourceFile = ss.add_resource(t, tokens[i]);
-      if(resourceFile == null){
-        String errMsg = tokens[i]+" does not exist.";
-        return new CommandProcessorResponse(1,errMsg,null);
-      }
+    try {
+      ss.add_resources(t,
+          Arrays.asList(Arrays.copyOfRange(tokens, 1, tokens.length)));
+    } catch (Exception e) {
+      return new CommandProcessorResponse(1, e.getMessage(), null);
     }
-
     return new CommandProcessorResponse(0);
   }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java?rev=1597693&r1=1597692&r2=1597693&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java Tue May 27 01:26:52 2014
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.hive.ql.processors;
 
+import java.util.Arrays;
+
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -52,11 +54,9 @@ public class DeleteResourceProcessor imp
     }
 
     if (tokens.length >= 2) {
-      for (int i = 1; i < tokens.length; i++) {
-        ss.delete_resource(t, tokens[i]);
-      }
+      ss.delete_resources(t, Arrays.asList(Arrays.copyOfRange(tokens, 1, tokens.length)));
     } else {
-      ss.delete_resource(t);
+      ss.delete_resources(t);
     }
 
     return new CommandProcessorResponse(0);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=1597693&r1=1597692&r2=1597693&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java Tue May 27 01:26:52 2014
@@ -26,6 +26,7 @@ import java.io.PrintStream;
 import java.net.URI;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
@@ -560,26 +561,6 @@ public class SessionState {
     return _console;
   }
 
-  public static String validateFile(Set<String> curFiles, String newFile) {
-    SessionState ss = SessionState.get();
-    LogHelper console = getConsole();
-    Configuration conf = (ss == null) ? new Configuration() : ss.getConf();
-
-    try {
-      if (Utilities.realFile(newFile, conf) != null) {
-        return newFile;
-      } else {
-        console.printError(newFile + " does not exist");
-        return null;
-      }
-    } catch (IOException e) {
-      console.printError("Unable to validate " + newFile + "\nException: "
-          + e.getMessage(), "\n"
-              + org.apache.hadoop.util.StringUtils.stringifyException(e));
-      return null;
-    }
-  }
-
   /**
    *
    * @return username from current SessionState authenticator. username will be
@@ -593,27 +574,42 @@ public class SessionState {
     return null;
   }
 
-  public static boolean registerJar(String newJar) {
+  static void validateFiles(List<String> newFiles) throws IllegalArgumentException {
+    SessionState ss = SessionState.get();
+    Configuration conf = (ss == null) ? new Configuration() : ss.getConf();
+
+    LogHelper console = getConsole();
+    for (String newFile : newFiles) {
+      try {
+        if (Utilities.realFile(newFile, conf) == null) {
+          String message = newFile + " does not exist";
+          throw new IllegalArgumentException(message);
+        }
+      } catch (IOException e) {
+        String message = "Unable to validate " + newFile;
+        throw new IllegalArgumentException(message, e);
+      }
+    }
+  }
+
+  static void registerJars(List<String> newJars) throws IllegalArgumentException {
     LogHelper console = getConsole();
     try {
       ClassLoader loader = Thread.currentThread().getContextClassLoader();
-      ClassLoader newLoader = Utilities.addToClassPath(loader, StringUtils.split(newJar, ","));
+      ClassLoader newLoader = Utilities.addToClassPath(loader, newJars.toArray(new String[0]));
       Thread.currentThread().setContextClassLoader(newLoader);
       SessionState.get().getConf().setClassLoader(newLoader);
-      console.printInfo("Added " + newJar + " to class path");
-      return true;
+      console.printInfo("Added " + newJars + " to class path");
     } catch (Exception e) {
-      console.printError("Unable to register " + newJar + "\nException: "
-          + e.getMessage(), "\n"
-              + org.apache.hadoop.util.StringUtils.stringifyException(e));
-      return false;
+      String message = "Unable to register " + newJars;
+      throw new IllegalArgumentException(message, e);
     }
   }
 
-  public static boolean unregisterJar(String jarsToUnregister) {
+  static boolean unregisterJar(List<String> jarsToUnregister) {
     LogHelper console = getConsole();
     try {
-      Utilities.removeFromClassPath(StringUtils.split(jarsToUnregister, ","));
+      Utilities.removeFromClassPath(jarsToUnregister.toArray(new String[0]));
       console.printInfo("Deleted " + jarsToUnregister + " from class path");
       return true;
     } catch (Exception e) {
@@ -625,65 +621,29 @@ public class SessionState {
   }
 
   /**
-   * ResourceHook.
-   *
-   */
-  public static interface ResourceHook {
-    String preHook(Set<String> cur, String s);
-
-    boolean postHook(Set<String> cur, String s);
-  }
-
-  /**
    * ResourceType.
    *
    */
   public static enum ResourceType {
-    FILE(new ResourceHook() {
-      @Override
-      public String preHook(Set<String> cur, String s) {
-        return validateFile(cur, s);
-      }
-
-      @Override
-      public boolean postHook(Set<String> cur, String s) {
-        return true;
-      }
-    }),
-
-    JAR(new ResourceHook() {
-      @Override
-      public String preHook(Set<String> cur, String s) {
-        String newJar = validateFile(cur, s);
-        if (newJar != null) {
-          return (registerJar(newJar) ? newJar : null);
-        } else {
-          return null;
-        }
-      }
-
-      @Override
-      public boolean postHook(Set<String> cur, String s) {
-        return unregisterJar(s);
-      }
-    }),
+    FILE,
 
-    ARCHIVE(new ResourceHook() {
+    JAR {
       @Override
-      public String preHook(Set<String> cur, String s) {
-        return validateFile(cur, s);
+      public void preHook(Set<String> cur, List<String> s) throws IllegalArgumentException {
+        super.preHook(cur, s);
+        registerJars(s);
       }
-
       @Override
-      public boolean postHook(Set<String> cur, String s) {
-        return true;
+      public void postHook(Set<String> cur, List<String> s) {
+        unregisterJar(s);
       }
-    });
+    },
+    ARCHIVE;
 
-    public ResourceHook hook;
-
-    ResourceType(ResourceHook hook) {
-      this.hook = hook;
+    public void preHook(Set<String> cur, List<String> s) throws IllegalArgumentException {
+      validateFiles(s);
+    }
+    public void postHook(Set<String> cur, List<String> s) {
     }
   };
 
@@ -713,33 +673,47 @@ public class SessionState {
   private final HashMap<ResourceType, Set<String>> resource_map =
       new HashMap<ResourceType, Set<String>>();
 
-  public String add_resource(ResourceType t, String value) {
-    // By default don't convert to unix
+  public String add_resource(ResourceType t, String value) throws RuntimeException {
     return add_resource(t, value, false);
   }
 
-  public String add_resource(ResourceType t, String value, boolean convertToUnix) {
-    try {
-      value = downloadResource(value, convertToUnix);
-    } catch (Exception e) {
-      getConsole().printError(e.getMessage());
+  public String add_resource(ResourceType t, String value, boolean convertToUnix)
+      throws RuntimeException {
+    List<String> added = add_resources(t, Arrays.asList(value), convertToUnix);
+    if (added == null || added.isEmpty()) {
       return null;
     }
+    return added.get(0);
+  }
 
+  public List<String> add_resources(ResourceType t, List<String> values)
+      throws RuntimeException {
+    // By default don't convert to unix
+    return add_resources(t, values, false);
+  }
+
+  public List<String> add_resources(ResourceType t, List<String> values, boolean convertToUnix)
+      throws RuntimeException {
     Set<String> resourceMap = getResourceMap(t);
 
-    String fnlVal = value;
-    if (t.hook != null) {
-      fnlVal = t.hook.preHook(resourceMap, value);
-      if (fnlVal == null) {
-        return fnlVal;
+    List<String> localized = new ArrayList<String>();
+    try {
+      for (String value : values) {
+        localized.add(downloadResource(value, convertToUnix));
       }
+
+      t.preHook(resourceMap, localized);
+
+    } catch (RuntimeException e) {
+      getConsole().printError(e.getMessage(), "\n"
+          + org.apache.hadoop.util.StringUtils.stringifyException(e));
+      throw e;
     }
-    getConsole().printInfo("Added resource: " + fnlVal);
-    resourceMap.add(fnlVal);
 
-    addedResource = true;
-    return fnlVal;
+    getConsole().printInfo("Added resources: " + values);
+    resourceMap.addAll(localized);
+
+    return localized;
   }
 
   public void add_builtin_resource(ResourceType t, String value) {
@@ -799,16 +773,12 @@ public class SessionState {
     return value;
   }
 
-  public boolean delete_resource(ResourceType t, String value) {
-    if (resource_map.get(t) == null) {
-      return false;
+  public void delete_resources(ResourceType t, List<String> value) {
+    Set<String> resources = resource_map.get(t);
+    if (resources != null && !resources.isEmpty()) {
+      t.postHook(resources, value);
+      resources.removeAll(value);
     }
-    if (t.hook != null) {
-      if (!t.hook.postHook(resource_map.get(t), value)) {
-        return false;
-      }
-    }
-    return (resource_map.get(t).remove(value));
   }
 
   public Set<String> list_resource(ResourceType t, List<String> filter) {
@@ -829,11 +799,10 @@ public class SessionState {
     }
   }
 
-  public void delete_resource(ResourceType t) {
-    if (resource_map.get(t) != null) {
-      for (String value : resource_map.get(t)) {
-        delete_resource(t, value);
-      }
+  public void delete_resources(ResourceType t) {
+    Set<String> resources = resource_map.get(t);
+    if (resources != null && !resources.isEmpty()) {
+      delete_resources(t, new ArrayList<String>(resources));
       resource_map.remove(t);
     }
   }

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java?rev=1597693&r1=1597692&r2=1597693&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java Tue May 27 01:26:52 2014
@@ -101,7 +101,7 @@ public class TestSessionState {
 
     public void run() {
       SessionState.start(ss);
-      SessionState.registerJar(jar);
+      SessionState.registerJars(Arrays.asList(jar));
       loader = Thread.currentThread().getContextClassLoader();
     }
   }

Modified: hive/trunk/ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out?rev=1597693&r1=1597692&r2=1597693&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out Tue May 27 01:26:52 2014
@@ -2,4 +2,4 @@ PREHOOK: query: create function lookup a
 PREHOOK: type: CREATEFUNCTION
 PREHOOK: Output: database:default
 nonexistent_file.txt does not exist
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.FunctionTask. Unable to load FILE nonexistent_file.txt
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.FunctionTask. nonexistent_file.txt does not exist