You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2013/10/01 06:48:48 UTC

svn commit: r1527883 [2/6] - in /hive/branches/tez: ./ ant/src/org/apache/hadoop/hive/ant/ beeline/src/java/org/apache/hive/beeline/ bin/ common/src/java/org/apache/hadoop/hive/conf/ conf/ contrib/src/java/org/apache/hadoop/hive/contrib/fileformat/base...

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java Tue Oct  1 04:48:44 2013
@@ -87,6 +87,7 @@ public class AppConfig extends Configura
   public static final String HADOOP_NAME         = "templeton.hadoop";
   public static final String HADOOP_CONF_DIR     = "templeton.hadoop.conf.dir";
   public static final String HCAT_NAME           = "templeton.hcat";
+  public static final String PYTHON_NAME         = "templeton.python";
   public static final String HIVE_ARCHIVE_NAME   = "templeton.hive.archive";
   public static final String HIVE_PATH_NAME      = "templeton.hive.path";
   public static final String HIVE_PROPS_NAME     = "templeton.hive.properties";
@@ -181,6 +182,7 @@ public class AppConfig extends Configura
   public String hadoopQueueName()  { return get(HADOOP_QUEUE_NAME); }
   public String clusterHadoop()    { return get(HADOOP_NAME); }
   public String clusterHcat()      { return get(HCAT_NAME); }
+  public String clusterPython()    { return get(PYTHON_NAME); }
   public String pigPath()          { return get(PIG_PATH_NAME); }
   public String pigArchive()       { return get(PIG_ARCHIVE_NAME); }
   public String hivePath()         { return get(HIVE_PATH_NAME); }

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java Tue Oct  1 04:48:44 2013
@@ -18,12 +18,18 @@
  */
 package org.apache.hive.hcatalog.templeton;
 
+import java.io.BufferedReader;
 import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.io.PrintWriter;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 import java.util.concurrent.Semaphore;
 
 import org.apache.commons.exec.CommandLine;
@@ -33,6 +39,38 @@ import org.apache.commons.exec.ExecuteWa
 import org.apache.commons.exec.PumpStreamHandler;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.util.Shell;
+
+class StreamOutputWriter extends Thread
+{
+  InputStream is;
+  String type;
+  PrintWriter out;
+
+  StreamOutputWriter(InputStream is, String type, OutputStream outStream)
+  {
+    this.is = is;
+    this.type = type;
+    this.out = new PrintWriter(outStream, true);
+  }
+
+  @Override
+  public void run()
+  {
+    try
+    {
+      BufferedReader br =
+        new BufferedReader(new InputStreamReader(is));
+      String line = null;
+      while ( (line = br.readLine()) != null){
+        out.println(line);
+      }
+    } catch (IOException ioe)
+    {
+      ioe.printStackTrace();  
+    }
+  }
+}
 
 /**
  * Execute a local program.  This is a singleton service that will
@@ -45,6 +83,9 @@ public class ExecServiceImpl implements 
 
   private static volatile ExecServiceImpl theSingleton;
 
+  /** Windows CreateProcess synchronization object */
+  private static final Object WindowsProcessLaunchLock = new Object();
+
   /**
    * Retrieve the singleton.
    */
@@ -133,7 +174,54 @@ public class ExecServiceImpl implements 
 
     LOG.info("Running: " + cmd);
     ExecBean res = new ExecBean();
-    res.exitcode = executor.execute(cmd, execEnv(env));
+
+    if(Shell.WINDOWS){
+      //The default executor is sometimes causing failure on windows. hcat
+      // command sometimes returns non zero exit status with it. It seems
+      // to hit some race conditions on windows.
+      env = execEnv(env);
+      String[] envVals = new String[env.size()];
+      int i=0;
+      for( Entry<String, String> kv : env.entrySet()){
+        envVals[i++] = kv.getKey() + "=" + kv.getValue();
+        LOG.info("Setting " +  kv.getKey() + "=" + kv.getValue());
+      }
+
+      Process proc;
+      synchronized (WindowsProcessLaunchLock) {
+        // To workaround the race condition issue with child processes
+        // inheriting unintended handles during process launch that can
+        // lead to hangs on reading output and error streams, we
+        // serialize process creation. More info available at:
+        // http://support.microsoft.com/kb/315939
+        proc = Runtime.getRuntime().exec(cmd.toStrings(), envVals);
+      }
+
+      //consume stderr
+      StreamOutputWriter errorGobbler = new
+        StreamOutputWriter(proc.getErrorStream(), "ERROR", errStream);
+
+      //consume stdout
+      StreamOutputWriter outputGobbler = new
+        StreamOutputWriter(proc.getInputStream(), "OUTPUT", outStream);
+
+      //start collecting input streams
+      errorGobbler.start();
+      outputGobbler.start();
+      //execute
+      try{
+        res.exitcode = proc.waitFor();
+      } catch (InterruptedException e) {
+        throw new IOException(e);
+      }
+      //flush
+      errorGobbler.out.flush();
+      outputGobbler.out.flush();
+    }
+    else {
+      res.exitcode = executor.execute(cmd, execEnv(env));
+    }
+
     String enc = appConf.get(AppConfig.EXEC_ENCODING_NAME);
     res.stdout = outStream.toString(enc);
     res.stderr = errStream.toString(enc);

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java Tue Oct  1 04:48:44 2013
@@ -67,7 +67,11 @@ public class HcatDelegator extends Launc
       Map<String, String> env = TempletonUtils.hadoopUserEnv(user, cp);
       proxy.addEnv(env);
       proxy.addArgs(args);
-      return execService.run(appConf.clusterHcat(), args, env);
+      if (appConf.clusterHcat().toLowerCase().endsWith(".py")) {
+        return execService.run(appConf.clusterPython(), args, env);
+      } else {
+        return execService.run(appConf.clusterHcat(), args, env);
+      }
     } catch (InterruptedException e) {
       throw new IOException(e);
     } finally {
@@ -79,8 +83,12 @@ public class HcatDelegator extends Launc
   private List<String> makeArgs(String exec, boolean format,
                   String group, String permissions) {
     ArrayList<String> args = new ArrayList<String>();
+    if (appConf.clusterHcat().toLowerCase().endsWith(".py")) {
+      // hcat.py will become the first argument pass to command "python"
+      args.add(appConf.clusterHcat());
+    }
     args.add("-e");
-    args.add(exec);
+    args.add('"' + exec + '"');
     if (TempletonUtils.isset(group)) {
       args.add("-g");
       args.add(group);

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java Tue Oct  1 04:48:44 2013
@@ -24,6 +24,7 @@ import java.net.URISyntaxException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.commons.exec.ExecuteException;
 import org.apache.hive.hcatalog.templeton.tool.TempletonControllerJob;
@@ -40,7 +41,7 @@ public class HiveDelegator extends Launc
     super(appConf);
   }
 
-  public EnqueueBean run(String user,
+  public EnqueueBean run(String user, Map<String, Object> userArgs,
                String execute, String srcFile, List<String> defines,
                List<String> hiveArgs, String otherFiles,
                String statusdir, String callback, String completedUrl, boolean enablelog)
@@ -51,7 +52,7 @@ public class HiveDelegator extends Launc
     List<String> args = makeArgs(execute, srcFile, defines, hiveArgs, otherFiles, statusdir,
                    completedUrl, enablelog);
 
-    return enqueueController(user, callback, args);
+    return enqueueController(user, userArgs, callback, args);
   }
 
   private List<String> makeArgs(String execute, String srcFile,
@@ -63,6 +64,7 @@ public class HiveDelegator extends Launc
     try {
       args.addAll(makeBasicArgs(execute, srcFile, otherFiles, statusdir, completedUrl, enablelog));
       args.add("--");
+      TempletonUtils.addCmdForWindows(args);
       args.add(appConf.hivePath());
 
       args.add("--service");
@@ -74,16 +76,18 @@ public class HiveDelegator extends Launc
 
       for (String prop : appConf.getStrings(AppConfig.HIVE_PROPS_NAME)) {
         args.add("--hiveconf");
-        args.add(prop);
+        args.add(TempletonUtils.quoteForWindows(prop));
       }
       for (String prop : defines) {
         args.add("--hiveconf");
-        args.add(prop);
+        args.add(TempletonUtils.quoteForWindows(prop));
+      }
+      for (String hiveArg : hiveArgs) {
+        args.add(TempletonUtils.quoteForWindows(hiveArg));
       }
-      args.addAll(hiveArgs);
       if (TempletonUtils.isset(execute)) {
         args.add("-e");
-        args.add(execute);
+        args.add(TempletonUtils.quoteForWindows(execute));
       } else if (TempletonUtils.isset(srcFile)) {
         args.add("-f");
         args.add(TempletonUtils.hadoopFsPath(srcFile, appConf, runAs)
@@ -119,8 +123,11 @@ public class HiveDelegator extends Launc
     args.addAll(makeLauncherArgs(appConf, statusdir, completedUrl, allFiles,
                 enablelog, JobType.HIVE));
 
-    args.add("-archives");
-    args.add(appConf.hiveArchive());
+    if (appConf.hiveArchive() != null && !appConf.hiveArchive().equals(""))
+    {
+      args.add("-archives");
+      args.add(appConf.hiveArchive());
+    }
 
     return args;
   }

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java Tue Oct  1 04:48:44 2013
@@ -23,6 +23,7 @@ import java.io.IOException;
 import java.net.URISyntaxException;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.commons.exec.ExecuteException;
 import org.apache.hive.hcatalog.templeton.tool.TempletonControllerJob;
@@ -38,7 +39,7 @@ public class JarDelegator extends Launch
     super(appConf);
   }
 
-  public EnqueueBean run(String user, String jar, String mainClass,
+  public EnqueueBean run(String user, Map<String, Object> userArgs, String jar, String mainClass,
                String libjars, String files,
                List<String> jarArgs, List<String> defines,
                String statusdir, String callback, String completedUrl,
@@ -50,7 +51,7 @@ public class JarDelegator extends Launch
       libjars, files, jarArgs, defines,
       statusdir, completedUrl, enablelog, jobType);
 
-    return enqueueController(user, callback, args);
+    return enqueueController(user, userArgs, callback, args);
   }
 
   private List<String> makeArgs(String jar, String mainClass,
@@ -67,28 +68,34 @@ public class JarDelegator extends Launch
       args.addAll(makeLauncherArgs(appConf, statusdir,
         completedUrl, allFiles, enablelog, jobType));
       args.add("--");
+      TempletonUtils.addCmdForWindows(args);
       args.add(appConf.clusterHadoop());
       args.add("jar");
       args.add(TempletonUtils.hadoopFsPath(jar, appConf, runAs).getName());
       if (TempletonUtils.isset(mainClass))
         args.add(mainClass);
       if (TempletonUtils.isset(libjars)) {
+        String libjarsListAsString =
+            TempletonUtils.hadoopFsListAsString(libjars, appConf, runAs);
         args.add("-libjars");
-        args.add(TempletonUtils.hadoopFsListAsString(libjars, appConf,
-          runAs));
+        args.add(TempletonUtils.quoteForWindows(libjarsListAsString));
       }
       if (TempletonUtils.isset(files)) {
+        String filesListAsString =
+            TempletonUtils.hadoopFsListAsString(files, appConf, runAs);
         args.add("-files");
-        args.add(TempletonUtils.hadoopFsListAsString(files, appConf,
-          runAs));
+        args.add(TempletonUtils.quoteForWindows(filesListAsString));
       }
       //the token file location comes after mainClass, as a -Dprop=val
       args.add("-D" + TempletonControllerJob.TOKEN_FILE_ARG_PLACEHOLDER);
 
-      for (String d : defines)
-        args.add("-D" + d);
-
-      args.addAll(jarArgs);
+      for (String d : defines) {
+        args.add("-D");
+        TempletonUtils.quoteForWindows(d);
+      }
+      for (String arg : jarArgs) {
+        args.add(TempletonUtils.quoteForWindows(arg));
+      }
     } catch (FileNotFoundException e) {
       throw new BadParam(e.getMessage());
     } catch (URISyntaxException e) {

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java Tue Oct  1 04:48:44 2013
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.commons.exec.ExecuteException;
 import org.apache.commons.logging.Log;
@@ -49,13 +50,15 @@ public class LauncherDelegator extends T
     super(appConf);
   }
 
-  public void registerJob(String id, String user, String callback)
+  public void registerJob(String id, String user, String callback,
+      Map<String, Object> userArgs)
     throws IOException {
     JobState state = null;
     try {
       state = new JobState(id, Main.getAppConfigInstance());
       state.setUser(user);
       state.setCallback(callback);
+      state.setUserArgs(userArgs);
     } finally {
       if (state != null)
         state.close();
@@ -65,7 +68,7 @@ public class LauncherDelegator extends T
   /**
    * Enqueue the TempletonControllerJob directly calling doAs.
    */
-  public EnqueueBean enqueueController(String user, String callback,
+  public EnqueueBean enqueueController(String user, Map<String, Object> userArgs, String callback,
                      List<String> args)
     throws NotAuthorizedException, BusyException, ExecuteException,
     IOException, QueueException {
@@ -82,7 +85,7 @@ public class LauncherDelegator extends T
       if (id == null)
         throw new QueueException("Unable to get job id");
 
-      registerJob(id, user, callback);
+      registerJob(id, user, callback, userArgs);
 
       return new EnqueueBean(id);
     } catch (InterruptedException e) {

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java Tue Oct  1 04:48:44 2013
@@ -24,6 +24,7 @@ import java.net.URISyntaxException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.commons.exec.ExecuteException;
 import org.apache.hive.hcatalog.templeton.tool.TempletonControllerJob;
@@ -39,7 +40,7 @@ public class PigDelegator extends Launch
     super(appConf);
   }
 
-  public EnqueueBean run(String user,
+  public EnqueueBean run(String user, Map<String, Object> userArgs,
                String execute, String srcFile,
                List<String> pigArgs, String otherFiles,
                String statusdir, String callback, String completedUrl, boolean enablelog)
@@ -50,7 +51,7 @@ public class PigDelegator extends Launch
       srcFile, pigArgs,
       otherFiles, statusdir, completedUrl, enablelog);
 
-    return enqueueController(user, callback, args);
+    return enqueueController(user, userArgs, callback, args);
   }
 
   private List<String> makeArgs(String execute, String srcFile,
@@ -69,18 +70,24 @@ public class PigDelegator extends Launch
       }
 
       args.addAll(makeLauncherArgs(appConf, statusdir, completedUrl, allFiles, enablelog, JobType.PIG));
-      args.add("-archives");
-      args.add(appConf.pigArchive());
+      if (appConf.pigArchive() != null && !appConf.pigArchive().equals(""))
+      {
+        args.add("-archives");
+        args.add(appConf.pigArchive());
+      }
 
       args.add("--");
+      TempletonUtils.addCmdForWindows(args);
       args.add(appConf.pigPath());
       //the token file location should be first argument of pig
       args.add("-D" + TempletonControllerJob.TOKEN_FILE_ARG_PLACEHOLDER);
 
-      args.addAll(pigArgs);
+      for (String pigArg : pigArgs) {
+        args.add(TempletonUtils.quoteForWindows(pigArg));
+      }
       if (TempletonUtils.isset(execute)) {
         args.add("-execute");
-        args.add(execute);
+        args.add(TempletonUtils.quoteForWindows(execute));
       } else if (TempletonUtils.isset(srcFile)) {
         args.add("-file");
         args.add(TempletonUtils.hadoopFsPath(srcFile, appConf, runAs)

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java Tue Oct  1 04:48:44 2013
@@ -19,6 +19,7 @@
 package org.apache.hive.hcatalog.templeton;
 
 import java.io.IOException;
+import java.util.Map;
 
 import org.apache.hadoop.mapred.JobStatus;
 import org.apache.hadoop.mapred.JobProfile;
@@ -38,6 +39,7 @@ public class QueueStatusBean {
   public String user;
   public String callback;
   public String completed;
+  public Map<String, Object> userargs;
 
   public QueueStatusBean() {
   }
@@ -63,5 +65,6 @@ public class QueueStatusBean {
     user = state.getUser();
     callback = state.getCallback();
     completed = state.getCompleteStatus();
+    userargs = state.getUserArgs();
   }
 }

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java Tue Oct  1 04:48:44 2013
@@ -587,7 +587,9 @@ public class Server {
                       @FormParam("output") String output,
                       @FormParam("mapper") String mapper,
                       @FormParam("reducer") String reducer,
-                      @FormParam("file") List<String> files,
+                      @FormParam("combiner") String combiner,
+                      @FormParam("file") List<String> fileList,
+                      @FormParam("files") String files,
                       @FormParam("define") List<String> defines,
                       @FormParam("cmdenv") List<String> cmdenvs,
                       @FormParam("arg") List<String> args,
@@ -601,11 +603,26 @@ public class Server {
     verifyParam(mapper, "mapper");
     verifyParam(reducer, "reducer");
     
+    Map<String, Object> userArgs = new HashMap<String, Object>();
+    userArgs.put("user.name", getDoAsUser());
+    userArgs.put("input", inputs);
+    userArgs.put("output", output);
+    userArgs.put("mapper", mapper);
+    userArgs.put("reducer", reducer);
+    userArgs.put("combiner", combiner);
+    userArgs.put("file",  fileList);
+    userArgs.put("files",  files);
+    userArgs.put("define",  defines);
+    userArgs.put("cmdenv",  cmdenvs);
+    userArgs.put("arg",  args);
+    userArgs.put("statusdir", statusdir);
+    userArgs.put("callback", callback);
+    userArgs.put("enablelog", Boolean.toString(enablelog));
     checkEnableLogPrerequisite(enablelog, statusdir);
 
     StreamingDelegator d = new StreamingDelegator(appConf);
-    return d.run(getDoAsUser(), inputs, output, mapper, reducer,
-      files, defines, cmdenvs, args,
+    return d.run(getDoAsUser(), userArgs, inputs, output, mapper, reducer, combiner,
+      fileList, files, defines, cmdenvs, args,
       statusdir, callback, getCompletedUrl(), enablelog, JobType.STREAMING);
   }
 
@@ -630,10 +647,22 @@ public class Server {
     verifyParam(jar, "jar");
     verifyParam(mainClass, "class");
     
+    Map<String, Object> userArgs = new HashMap<String, Object>();
+    userArgs.put("user.name", getDoAsUser());
+    userArgs.put("jar", jar);
+    userArgs.put("class", mainClass);
+    userArgs.put("libjars", libjars);
+    userArgs.put("files", files);
+    userArgs.put("arg", args);
+    userArgs.put("define", defines);
+    userArgs.put("statusdir", statusdir);
+    userArgs.put("callback", callback);
+    userArgs.put("enablelog", Boolean.toString(enablelog));
+
     checkEnableLogPrerequisite(enablelog, statusdir);
 
     JarDelegator d = new JarDelegator(appConf);
-    return d.run(getDoAsUser(),
+    return d.run(getDoAsUser(), userArgs,
       jar, mainClass,
       libjars, files, args, defines,
       statusdir, callback, getCompletedUrl(), enablelog, JobType.JAR);
@@ -658,10 +687,21 @@ public class Server {
     if (execute == null && srcFile == null)
       throw new BadParam("Either execute or file parameter required");
     
+    //add all function arguments to a map
+    Map<String, Object> userArgs = new HashMap<String, Object>();
+    userArgs.put("user.name", getDoAsUser());
+    userArgs.put("execute", execute);
+    userArgs.put("file", srcFile);
+    userArgs.put("arg", pigArgs);
+    userArgs.put("files", otherFiles);
+    userArgs.put("statusdir", statusdir);
+    userArgs.put("callback", callback);
+    userArgs.put("enablelog", Boolean.toString(enablelog));
+
     checkEnableLogPrerequisite(enablelog, statusdir);
 
     PigDelegator d = new PigDelegator(appConf);
-    return d.run(getDoAsUser(),
+    return d.run(getDoAsUser(), userArgs,
       execute, srcFile,
       pigArgs, otherFiles,
       statusdir, callback, getCompletedUrl(), enablelog);
@@ -699,10 +739,21 @@ public class Server {
     if (execute == null && srcFile == null)
       throw new BadParam("Either execute or file parameter required");
     
+    //add all function arguments to a map
+    Map<String, Object> userArgs = new HashMap<String, Object>();
+    userArgs.put("user.name", getDoAsUser());
+    userArgs.put("execute", execute);
+    userArgs.put("file", srcFile);
+    userArgs.put("define", defines);
+    userArgs.put("files", otherFiles);
+    userArgs.put("statusdir", statusdir);
+    userArgs.put("callback", callback);
+    userArgs.put("enablelog", Boolean.toString(enablelog));
+
     checkEnableLogPrerequisite(enablelog, statusdir);
 
     HiveDelegator d = new HiveDelegator(appConf);
-    return d.run(getDoAsUser(), execute, srcFile, defines, hiveArgs, otherFiles,
+    return d.run(getDoAsUser(), userArgs, execute, srcFile, defines, hiveArgs, otherFiles,
       statusdir, callback, getCompletedUrl(), enablelog);
   }
 

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StreamingDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StreamingDelegator.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StreamingDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StreamingDelegator.java Tue Oct  1 04:48:44 2013
@@ -21,8 +21,10 @@ package org.apache.hive.hcatalog.templet
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.commons.exec.ExecuteException;
+import org.apache.hive.hcatalog.templeton.tool.TempletonUtils;
 
 /**
  * Submit a streaming job to the MapReduce queue.  Really just a front
@@ -35,10 +37,11 @@ public class StreamingDelegator extends 
     super(appConf);
   }
 
-  public EnqueueBean run(String user,
+  public EnqueueBean run(String user, Map<String, Object> userArgs,
                List<String> inputs, String output,
-               String mapper, String reducer,
-               List<String> files, List<String> defines,
+               String mapper, String reducer, String combiner,
+               List<String> fileList,
+               String files, List<String> defines,
                List<String> cmdenvs,
                List<String> jarArgs,
                String statusdir,
@@ -48,13 +51,13 @@ public class StreamingDelegator extends 
                JobType jobType)
     throws NotAuthorizedException, BadParam, BusyException, QueueException,
     ExecuteException, IOException, InterruptedException {
-    List<String> args = makeArgs(inputs, output, mapper, reducer,
-      files, defines, cmdenvs, jarArgs);
+    List<String> args = makeArgs(inputs, output, mapper, reducer, combiner,
+      fileList, cmdenvs, jarArgs);
 
     JarDelegator d = new JarDelegator(appConf);
-    return d.run(user,
+    return d.run(user, userArgs,
       appConf.streamingJar(), null,
-      null, null, args, defines,
+      null, files, args, defines,
       statusdir, callback, completedUrl, enableLog, jobType);
   }
 
@@ -62,10 +65,12 @@ public class StreamingDelegator extends 
                   String output,
                   String mapper,
                   String reducer,
-                  List<String> files,
-                  List<String> defines,
+                  String combiner,
+                  List<String> fileList,
                   List<String> cmdenvs,
-                  List<String> jarArgs) {
+                  List<String> jarArgs)
+    throws BadParam
+  {
     ArrayList<String> args = new ArrayList<String>();
     for (String input : inputs) {
       args.add("-input");
@@ -78,13 +83,24 @@ public class StreamingDelegator extends 
     args.add("-reducer");
     args.add(reducer);
 
-    for (String f : files)
-      args.add("-file" + f);
-    for (String d : defines)
-      args.add("-D" + d);
-    for (String e : cmdenvs)
-      args.add("-cmdenv" + e);
-    args.addAll(jarArgs);
+    if (TempletonUtils.isset(combiner)) {
+      args.add("-combiner");
+      args.add(combiner);
+    }
+
+    for (String f : fileList) {
+      args.add("-file");
+      args.add(f);
+    }
+
+    for (String e : cmdenvs) {
+      args.add("-cmdenv");
+      args.add(TempletonUtils.quoteForWindows(e));
+    }
+
+    for (String arg : jarArgs) {
+      args.add(TempletonUtils.quoteForWindows(arg));
+    }
 
     return args;
   }

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java Tue Oct  1 04:48:44 2013
@@ -98,7 +98,7 @@ public class HDFSCleanup extends Thread 
         // cycle fails, it'll try again on the next cycle.
         try {
           if (fs == null) {
-            fs = FileSystem.get(appConf);
+            fs = new Path(storage_root).getFileSystem(appConf);
           }
           checkFiles(fs);
         } catch (Exception e) {

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSStorage.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSStorage.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSStorage.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSStorage.java Tue Oct  1 04:48:44 2013
@@ -210,7 +210,7 @@ public class HDFSStorage implements Temp
   public void openStorage(Configuration config) throws IOException {
     storage_root = config.get(TempletonStorage.STORAGE_ROOT);
     if (fs == null) {
-      fs = FileSystem.get(config);
+      fs = new Path(storage_root).getFileSystem(config);
     }
   }
 

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java Tue Oct  1 04:48:44 2013
@@ -21,10 +21,12 @@ package org.apache.hive.hcatalog.templet
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hive.hcatalog.templeton.JsonBuilder;
 
 /**
  * The persistent state of a job.  The state is stored in one of the
@@ -232,6 +234,20 @@ public class JobState {
     setField("user", user);
   }
 
+  @SuppressWarnings("unchecked")
+  public Map<String, Object> getUserArgs()
+    throws IOException
+  {
+    String jsonString = getField("userArgs");
+    return (Map<String, Object>)JsonBuilder.jsonToMap(jsonString);
+  }
+  public void setUserArgs(Map<String, Object> userArgs)
+    throws IOException
+  {
+    String jsonString = JsonBuilder.mapToJson(userArgs);
+    setField("userArgs", jsonString);
+  }
+
   /**
    * The url callback
    */

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java Tue Oct  1 04:48:44 2013
@@ -48,12 +48,15 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Mapper.Context;
 import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
 import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.hive.hcatalog.templeton.BadParam;
 import org.apache.hive.hcatalog.templeton.LauncherDelegator;
 
 /**
@@ -104,6 +107,9 @@ public class TempletonControllerJob exte
 
       ArrayList<String> removeEnv = new ArrayList<String>();
       removeEnv.add("HADOOP_ROOT_LOGGER");
+      removeEnv.add("hadoop-command");
+      removeEnv.add("CLASS");
+      removeEnv.add("mapredcommand");
       Map<String, String> env = TempletonUtils.hadoopUserEnv(user,
         overrideClasspath);
       List<String> jarArgsList = new LinkedList<String>(Arrays.asList(jarArgs));
@@ -112,7 +118,15 @@ public class TempletonControllerJob exte
 
       if (tokenFile != null) {
         //Token is available, so replace the placeholder
+        tokenFile = tokenFile.replaceAll("\"", "");
         String tokenArg = "mapreduce.job.credentials.binary=" + tokenFile;
+        if (Shell.WINDOWS) {
+          try {
+            tokenArg = TempletonUtils.quoteForWindows(tokenArg);
+          } catch (BadParam e) {
+            throw new IOException("cannot pass " + tokenFile + " to mapreduce.job.credentials.binary", e);
+          }
+        }
         for(int i=0; i<jarArgsList.size(); i++){
           String newArg = 
             jarArgsList.get(i).replace(TOKEN_FILE_ARG_PLACEHOLDER, tokenArg);
@@ -211,9 +225,9 @@ public class TempletonControllerJob exte
       pool.execute(w);
     }
 
-    private KeepAlive startCounterKeepAlive(ExecutorService pool, Context cnt)
+    private KeepAlive startCounterKeepAlive(ExecutorService pool, Context context)
       throws IOException {
-      KeepAlive k = new KeepAlive(cnt);
+      KeepAlive k = new KeepAlive(context);
       pool.execute(k);
       return k;
     }
@@ -297,20 +311,25 @@ public class TempletonControllerJob exte
     }
   }
 
-  private static class KeepAlive implements Runnable {
-    private final Mapper.Context cnt;
-    private volatile boolean sendReport;
+  public static class KeepAlive implements Runnable {
+    private Context context;
+    public boolean sendReport;
 
-    public KeepAlive(Mapper.Context cnt) {
-      this.cnt = cnt;
+    public KeepAlive(Context context)
+    {
       this.sendReport = true;
+      this.context = context;
     }
 
     @Override
     public void run() {
       try {
         while (sendReport) {
-          cnt.progress();
+          // Periodically report progress on the Context object
+          // to prevent TaskTracker from killing the Templeton
+          // Controller task
+          context.progress();
+          System.err.println("KeepAlive Heart beat");
           Thread.sleep(KEEP_ALIVE_MSEC);
         }
       } catch (InterruptedException e) {

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java Tue Oct  1 04:48:44 2013
@@ -26,6 +26,7 @@ import java.net.URISyntaxException;
 import java.net.URL;
 import java.net.URLConnection;
 import java.security.PrivilegedExceptionAction;
+import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.List;
@@ -39,8 +40,10 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hive.hcatalog.templeton.UgiFactory;
+import org.apache.hive.hcatalog.templeton.BadParam;
 
 /**
  * General utility methods.
@@ -296,4 +299,46 @@ public class TempletonUtils {
 
     return env;
   }
+
+  // Add double quotes around the given input parameter if it is not already
+  // quoted. Quotes are not allowed in the middle of the parameter, and
+  // BadParam exception is thrown if this is the case.
+  //
+  // This method should be used to escape parameters before they get passed to
+  // Windows cmd scripts (specifically, special characters like a comma or an
+  // equal sign might be lost as part of the cmd script processing if not
+  // under quotes).
+  public static String quoteForWindows(String param) throws BadParam {
+    if (Shell.WINDOWS) {
+      if (param != null && param.length() > 0) {
+        String nonQuotedPart = param;
+        boolean addQuotes = true;
+        if (param.charAt(0) == '\"' && param.charAt(param.length() - 1) == '\"') {
+          if (param.length() < 2)
+            throw new BadParam("Passed in parameter is incorrectly quoted: " + param);
+
+          addQuotes = false;
+          nonQuotedPart = param.substring(1, param.length() - 1);
+        }
+
+        // If we have any quotes other then the outside quotes, throw
+        if (nonQuotedPart.contains("\"")) {
+          throw new BadParam("Passed in parameter is incorrectly quoted: " + param);
+        }
+
+        if (addQuotes) {
+          param = '\"' + param + '\"';
+        }
+      }
+    }
+    return param;
+  }
+
+  public static void addCmdForWindows(ArrayList<String> args) {
+    if(Shell.WINDOWS){
+      args.add("cmd");
+      args.add("/c");
+      args.add("call");
+    }
+  }
 }

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java Tue Oct  1 04:48:44 2013
@@ -22,12 +22,16 @@ import java.io.IOException;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
 /**
  * Execute a local program.  This is a singleton service that will
  * execute a programs on the local box.
  */
 public class TrivialExecService {
   private static volatile TrivialExecService theSingleton;
+  private static final Log LOG = LogFactory.getLog(TrivialExecService.class);
 
   /**
    * Retrieve the singleton.
@@ -41,11 +45,7 @@ public class TrivialExecService {
   public Process run(List<String> cmd, List<String> removeEnv,
              Map<String, String> environmentVariables)
     throws IOException {
-    System.err.println("templeton: starting " + cmd);
-    System.err.print("With environment variables: ");
-    for (Map.Entry<String, String> keyVal : environmentVariables.entrySet()) {
-      System.err.println(keyVal.getKey() + "=" + keyVal.getValue());
-    }
+    logDebugCmd(cmd, environmentVariables);
     ProcessBuilder pb = new ProcessBuilder(cmd);
     for (String key : removeEnv)
       pb.environment().remove(key);
@@ -53,4 +53,20 @@ public class TrivialExecService {
     return pb.start();
   }
 
+  private void logDebugCmd(List<String> cmd,
+    Map<String, String> environmentVariables) {
+    if(!LOG.isDebugEnabled()){
+      return;
+    }
+    LOG.debug("starting " + cmd);
+    LOG.debug("With environment variables: " );
+    for(Map.Entry<String, String> keyVal : environmentVariables.entrySet()){
+      LOG.debug(keyVal.getKey() + "=" + keyVal.getValue());
+    }
+    LOG.debug("With environment variables already set: " );
+    Map<String, String> env = System.getenv();
+    for (String envName : env.keySet()) {
+      LOG.debug(envName + "=" + env.get(envName));
+    }
+  }
 }

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestServer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestServer.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestServer.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestServer.java Tue Oct  1 04:48:44 2013
@@ -31,7 +31,7 @@ public class TestServer extends TestCase
   MockServer server;
 
   public void setUp() {
-    new Main(null);         // Initialize the config
+    new Main(new String[]{});         // Initialize the config
     server = new MockServer();
   }
 

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java Tue Oct  1 04:48:44 2013
@@ -159,6 +159,12 @@ public class TestTempletonUtils {
   @Test
   public void testHadoopFsFilename() {
     try {
+      String tmpFileName1 = "/tmp/testHadoopFsListAsArray1";
+      String tmpFileName2 = "/tmp/testHadoopFsListAsArray2";
+      File tmpFile1 = new File(tmpFileName1);
+      File tmpFile2 = new File(tmpFileName2);
+      tmpFile1.createNewFile();
+      tmpFile2.createNewFile();
       Assert.assertEquals(null, TempletonUtils.hadoopFsFilename(null, null, null));
       Assert.assertEquals(null,
         TempletonUtils.hadoopFsFilename(tmpFile.toURI().toString(), null, null));
@@ -188,14 +194,22 @@ public class TestTempletonUtils {
   @Test
   public void testHadoopFsListAsArray() {
     try {
+      String tmpFileName1 = "/tmp/testHadoopFsListAsArray1";
+      String tmpFileName2 = "/tmp/testHadoopFsListAsArray2";
+      File tmpFile1 = new File(tmpFileName1);
+      File tmpFile2 = new File(tmpFileName2);
+      tmpFile1.createNewFile();
+      tmpFile2.createNewFile();
       Assert.assertTrue(TempletonUtils.hadoopFsListAsArray(null, null, null) == null);
-      Assert.assertTrue(TempletonUtils.hadoopFsListAsArray(
-        tmpFile.toURI().toString() + "," + usrFile.toString(), null, null) == null);
-      String[] tmp2 = TempletonUtils.hadoopFsListAsArray(
-        tmpFile.toURI().toString() + "," + usrFile.toURI().toString(),
-        new Configuration(), null);
-      Assert.assertEquals(tmpFile.toURI().toString(), tmp2[0]);
-      Assert.assertEquals(usrFile.toURI().toString(), tmp2[1]);
+      Assert.assertTrue(TempletonUtils.hadoopFsListAsArray(tmpFileName1 + "," + tmpFileName2,
+        null, null) == null);
+      String[] tmp2
+        = TempletonUtils.hadoopFsListAsArray(tmpFileName1 + "," + tmpFileName2,
+                                             new Configuration(), null);
+      Assert.assertEquals("file:" + tmpFileName1, tmp2[0]);
+      Assert.assertEquals("file:" + tmpFileName2, tmp2[1]);
+      tmpFile1.delete();
+      tmpFile2.delete();
     } catch (FileNotFoundException e) {
       Assert.fail("Couldn't find name for " + tmpFile.toURI().toString());
     } catch (Exception e) {
@@ -218,15 +232,18 @@ public class TestTempletonUtils {
   @Test
   public void testHadoopFsListAsString() {
     try {
+      String tmpFileName1 = "/tmp/testHadoopFsListAsString1";
+      String tmpFileName2 = "/tmp/testHadoopFsListAsString2";
+      File tmpFile1 = new File(tmpFileName1);
+      File tmpFile2 = new File(tmpFileName2);
+      tmpFile1.createNewFile();
+      tmpFile2.createNewFile();
       Assert.assertTrue(TempletonUtils.hadoopFsListAsString(null, null, null) == null);
-      Assert.assertTrue(TempletonUtils.hadoopFsListAsString(
-        tmpFile.toURI().toString() + "," + usrFile.toURI().toString(),
+      Assert.assertTrue(TempletonUtils.hadoopFsListAsString("/tmp,/usr",
         null, null) == null);
-      Assert.assertEquals(
-        tmpFile.toURI().toString() + "," + usrFile.toURI().toString(),
-        TempletonUtils.hadoopFsListAsString(
-          tmpFile.toURI().toString() + "," + usrFile.toURI().toString(),
-          new Configuration(), null));
+      Assert.assertEquals("file:" + tmpFileName1 + ",file:" + tmpFileName2,
+        TempletonUtils.hadoopFsListAsString
+        (tmpFileName1 + "," + tmpFileName2, new Configuration(), null));
     } catch (FileNotFoundException e) {
       Assert.fail("Couldn't find name for " + tmpFile.toURI().toString());
     } catch (Exception e) {

Modified: hive/branches/tez/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/hive/branches/tez/ivy/libraries.properties?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ivy/libraries.properties (original)
+++ hive/branches/tez/ivy/libraries.properties Tue Oct  1 04:48:44 2013
@@ -44,6 +44,8 @@ commons-lang3.version=3.1
 derby.version=10.4.2.0
 guava.version=11.0.2
 hbase.version=0.94.6.1
+httpclient.version=4.2.5
+httpcore.version=4.2.4
 jackson.version=1.8.8
 javaewah.version=0.3.2
 jdo-api.version=3.0.1

Modified: hive/branches/tez/jdbc/ivy.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/jdbc/ivy.xml?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/jdbc/ivy.xml (original)
+++ hive/branches/tez/jdbc/ivy.xml Tue Oct  1 04:48:44 2013
@@ -29,5 +29,10 @@
   <dependencies>
     <dependency org="org.apache.hive" name="hive-cli" rev="${version}"
                 conf="compile->default" />
+    <dependency org="org.apache.httpcomponents" name="httpcore"
+                rev="${httpcore.version}"/>
+    <dependency org="org.apache.httpcomponents" name="httpclient"
+                rev="${httpclient.version}"/>
+
   </dependencies>
 </ivy-module>

Modified: hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java (original)
+++ hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java Tue Oct  1 04:48:44 2013
@@ -44,20 +44,23 @@ import java.util.concurrent.Executor;
 import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
 
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hive.service.auth.KerberosSaslHelper;
 import org.apache.hive.service.auth.PlainSaslHelper;
 import org.apache.hive.service.auth.SaslQOP;
-import org.apache.hive.service.cli.thrift.EmbeddedThriftCLIService;
+import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService;
 import org.apache.hive.service.cli.thrift.TCLIService;
 import org.apache.hive.service.cli.thrift.TCloseSessionReq;
 import org.apache.hive.service.cli.thrift.TOpenSessionReq;
 import org.apache.hive.service.cli.thrift.TOpenSessionResp;
 import org.apache.hive.service.cli.thrift.TProtocolVersion;
 import org.apache.hive.service.cli.thrift.TSessionHandle;
+import org.apache.http.impl.client.DefaultHttpClient;
 import org.apache.thrift.TException;
 import org.apache.thrift.protocol.TBinaryProtocol;
 import org.apache.thrift.protocol.TProtocol;
+import org.apache.thrift.transport.THttpClient;
 import org.apache.thrift.transport.TSocket;
 import org.apache.thrift.transport.TTransport;
 import org.apache.thrift.transport.TTransportException;
@@ -75,30 +78,49 @@ public class HiveConnection implements j
   private static final String HIVE_AUTH_PASSWD = "password";
   private static final String HIVE_ANONYMOUS_USER = "anonymous";
   private static final String HIVE_ANONYMOUS_PASSWD = "anonymous";
-
+  private final String jdbcURI;
+  private final String host;
+  private final int port;
+  private final Map<String, String> sessConfMap;
+  private final Map<String, String> hiveConfMap;
+  private final Map<String, String> hiveVarMap;
+  private final boolean isEmbeddedMode;
   private TTransport transport;
   private TCLIService.Iface client;
   private boolean isClosed = true;
   private SQLWarning warningChain = null;
   private TSessionHandle sessHandle = null;
   private final List<TProtocolVersion> supportedProtocols = new LinkedList<TProtocolVersion>();
-  /**
-   * TODO: - parse uri (use java.net.URI?).
-   */
+
   public HiveConnection(String uri, Properties info) throws SQLException {
-    Utils.JdbcConnectionParams connParams = Utils.parseURL(uri);
-    if (connParams.isEmbeddedMode()) {
-      client = new EmbeddedThriftCLIService();
+    jdbcURI = uri;
+    // parse the connection uri
+    Utils.JdbcConnectionParams connParams = Utils.parseURL(jdbcURI);
+    // extract parsed connection parameters:
+    // JDBC URL: jdbc:hive2://<host>:<port>/dbName;sess_var_list?hive_conf_list#hive_var_list
+    // each list: <key1>=<val1>;<key2>=<val2> and so on
+    // sess_var_list -> sessConfMap
+    // hive_conf_list -> hiveConfMap
+    // hive_var_list -> hiveVarMap
+    host = connParams.getHost();
+    port = connParams.getPort();
+    sessConfMap = connParams.getSessionVars();
+    hiveConfMap = connParams.getHiveConfs();
+    hiveVarMap = connParams.getHiveVars();
+    isEmbeddedMode = connParams.isEmbeddedMode();
+
+    if (isEmbeddedMode) {
+      client = new EmbeddedThriftBinaryCLIService();
     } else {
       // extract user/password from JDBC connection properties if its not supplied in the connection URL
       if (info.containsKey(HIVE_AUTH_USER)) {
-        connParams.getSessionVars().put(HIVE_AUTH_USER, info.getProperty(HIVE_AUTH_USER));
+        sessConfMap.put(HIVE_AUTH_USER, info.getProperty(HIVE_AUTH_USER));
         if (info.containsKey(HIVE_AUTH_PASSWD)) {
-            connParams.getSessionVars().put(HIVE_AUTH_PASSWD, info.getProperty(HIVE_AUTH_PASSWD));
+          sessConfMap.put(HIVE_AUTH_PASSWD, info.getProperty(HIVE_AUTH_PASSWD));
         }
       }
-
-      openTransport(uri, connParams.getHost(), connParams.getPort(), connParams.getSessionVars());
+      // open the client transport
+      openTransport();
     }
 
     // add supported protocols
@@ -107,48 +129,66 @@ public class HiveConnection implements j
     supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V3);
 
     // open client session
-    openSession(uri);
+    openSession();
 
-    configureConnection(connParams);
+    configureConnection();
   }
 
-  private void configureConnection(Utils.JdbcConnectionParams connParams)
-      throws SQLException {
-    // set the hive variable in session state for local mode
-    if (connParams.isEmbeddedMode()) {
-      if (!connParams.getHiveVars().isEmpty()) {
-        SessionState.get().setHiveVariables(connParams.getHiveVars());
-      }
-    } else {
-      // for remote JDBC client, try to set the conf var using 'set foo=bar'
-      Statement stmt = createStatement();
-      for (Entry<String, String> hiveConf : connParams.getHiveConfs().entrySet()) {
-        stmt.execute("set " + hiveConf.getKey() + "=" + hiveConf.getValue());
-        stmt.close();
-      }
+  private void openTransport() throws SQLException {
+    transport = isHttpTransportMode() ?
+        createHttpTransport() :
+          createBinaryTransport();
+    TProtocol protocol = new TBinaryProtocol(transport);
+    client = new TCLIService.Client(protocol);
+    try {
+      transport.open();
+    } catch (TTransportException e) {
+      throw new SQLException("Could not open connection to "
+          + jdbcURI + ": " + e.getMessage(), " 08S01", e);
+    }
+  }
 
-      // For remote JDBC client, try to set the hive var using 'set hivevar:key=value'
-      for (Entry<String, String> hiveVar : connParams.getHiveVars().entrySet()) {
-        stmt.execute("set hivevar:" + hiveVar.getKey() + "=" + hiveVar.getValue());
-        stmt.close();
-      }
+  private TTransport createHttpTransport() throws SQLException {
+    // http path should begin with "/"
+    String httpPath;
+    httpPath = hiveConfMap.get(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH.varname);
+    if(httpPath == null) {
+      httpPath = "/";
+    }
+    if(!httpPath.startsWith("/")) {
+      httpPath = "/" + httpPath;
     }
+
+    DefaultHttpClient httpClient = new DefaultHttpClient();
+    String httpUrl = hiveConfMap.get(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname) +
+        "://" + host + ":" + port + httpPath;
+    httpClient.addRequestInterceptor(
+        new HttpBasicAuthInterceptor(getUserName(), getPasswd())
+        );
+    try {
+      transport = new THttpClient(httpUrl, httpClient);
+    }
+    catch (TTransportException e) {
+      String msg =  "Could not create http connection to " +
+          jdbcURI + ". " + e.getMessage();
+      throw new SQLException(msg, " 08S01", e);
+    }
+    return transport;
   }
 
-  private void openTransport(String uri, String host, int port, Map<String, String> sessConf )
-      throws SQLException {
+  private TTransport createBinaryTransport() throws SQLException {
     transport = new TSocket(host, port);
-
     // handle secure connection if specified
-    if (!sessConf.containsKey(HIVE_AUTH_TYPE)
-        || !sessConf.get(HIVE_AUTH_TYPE).equals(HIVE_AUTH_SIMPLE)){
+    if (!sessConfMap.containsKey(HIVE_AUTH_TYPE)
+        || !sessConfMap.get(HIVE_AUTH_TYPE).equals(HIVE_AUTH_SIMPLE)) {
       try {
-        if (sessConf.containsKey(HIVE_AUTH_PRINCIPAL)) {
+        // If Kerberos
+        if (sessConfMap.containsKey(HIVE_AUTH_PRINCIPAL)) {
           Map<String, String> saslProps = new HashMap<String, String>();
           SaslQOP saslQOP = SaslQOP.AUTH;
-          if(sessConf.containsKey(HIVE_AUTH_QOP)) {
+          if(sessConfMap.containsKey(HIVE_AUTH_QOP)) {
             try {
-              saslQOP = SaslQOP.fromString(sessConf.get(HIVE_AUTH_QOP));
+              saslQOP = SaslQOP.fromString(sessConfMap.get(HIVE_AUTH_QOP));
             } catch (IllegalArgumentException e) {
               throw new SQLException("Invalid " + HIVE_AUTH_QOP + " parameter. " + e.getMessage(), "42000", e);
             }
@@ -156,35 +196,38 @@ public class HiveConnection implements j
           saslProps.put(Sasl.QOP, saslQOP.toString());
           saslProps.put(Sasl.SERVER_AUTH, "true");
           transport = KerberosSaslHelper.getKerberosTransport(
-                  sessConf.get(HIVE_AUTH_PRINCIPAL), host, transport, saslProps);
+              sessConfMap.get(HIVE_AUTH_PRINCIPAL), host, transport, saslProps);
         } else {
-          String userName = sessConf.get(HIVE_AUTH_USER);
+          String userName = sessConfMap.get(HIVE_AUTH_USER);
           if ((userName == null) || userName.isEmpty()) {
             userName = HIVE_ANONYMOUS_USER;
           }
-          String passwd = sessConf.get(HIVE_AUTH_PASSWD);
+          String passwd = sessConfMap.get(HIVE_AUTH_PASSWD);
           if ((passwd == null) || passwd.isEmpty()) {
             passwd = HIVE_ANONYMOUS_PASSWD;
           }
           transport = PlainSaslHelper.getPlainTransport(userName, passwd, transport);
         }
       } catch (SaslException e) {
-        throw new SQLException("Could not establish secure connection to "
-                  + uri + ": " + e.getMessage(), " 08S01", e);
+        throw new SQLException("Could not create secure connection to "
+            + jdbcURI + ": " + e.getMessage(), " 08S01", e);
       }
     }
+    return transport;
+  }
 
-    TProtocol protocol = new TBinaryProtocol(transport);
-    client = new TCLIService.Client(protocol);
-    try {
-      transport.open();
-    } catch (TTransportException e) {
-      throw new SQLException("Could not establish connection to "
-          + uri + ": " + e.getMessage(), " 08S01", e);
+
+  private boolean isHttpTransportMode() {
+    String transportMode =
+        hiveConfMap.get(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname);
+    if(transportMode != null && (transportMode.equalsIgnoreCase("http") ||
+        transportMode.equalsIgnoreCase("https"))) {
+      return true;
     }
+    return false;
   }
 
-  private void openSession(String uri) throws SQLException {
+  private void openSession() throws SQLException {
     TOpenSessionReq openReq = new TOpenSessionReq();
 
     // set the session configuration
@@ -201,11 +244,61 @@ public class HiveConnection implements j
       sessHandle = openResp.getSessionHandle();
     } catch (TException e) {
       throw new SQLException("Could not establish connection to "
-          + uri + ": " + e.getMessage(), " 08S01", e);
+          + jdbcURI + ": " + e.getMessage(), " 08S01", e);
     }
     isClosed = false;
   }
 
+  private void configureConnection() throws SQLException {
+    // set the hive variable in session state for local mode
+    if (isEmbeddedMode) {
+      if (!hiveVarMap.isEmpty()) {
+        SessionState.get().setHiveVariables(hiveVarMap);
+      }
+    } else {
+      // for remote JDBC client, try to set the conf var using 'set foo=bar'
+      Statement stmt = createStatement();
+      for (Entry<String, String> hiveConf : hiveConfMap.entrySet()) {
+        stmt.execute("set " + hiveConf.getKey() + "=" + hiveConf.getValue());
+      }
+
+      // For remote JDBC client, try to set the hive var using 'set hivevar:key=value'
+      for (Entry<String, String> hiveVar : hiveVarMap.entrySet()) {
+        stmt.execute("set hivevar:" + hiveVar.getKey() + "=" + hiveVar.getValue());
+      }
+      stmt.close();
+    }
+  }
+
+  /**
+   * @return username from sessConfMap
+   */
+  private String getUserName() {
+    return getSessionValue(HIVE_AUTH_USER, HIVE_ANONYMOUS_USER);
+  }
+
+  /**
+   * @return password from sessConfMap
+   */
+  private String getPasswd() {
+    return getSessionValue(HIVE_AUTH_PASSWD, HIVE_ANONYMOUS_PASSWD);
+  }
+
+  /**
+   * Lookup varName in sessConfMap, if its null or empty return the default
+   * value varDefault
+   * @param varName
+   * @param varDefault
+   * @return
+   */
+  private String getSessionValue(String varName, String varDefault) {
+    String varValue = sessConfMap.get(varName);
+    if ((varValue == null) || varValue.isEmpty()) {
+      varValue = varDefault;
+    }
+    return varValue;
+  }
+
   public void abort(Executor executor) throws SQLException {
     // JDK 1.7
     throw new SQLException("Method not supported");

Modified: hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java (original)
+++ hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java Tue Oct  1 04:48:44 2013
@@ -30,6 +30,10 @@ import java.util.jar.Attributes;
 import java.util.jar.Manifest;
 import java.util.logging.Logger;
 import java.util.regex.Pattern;
+
+import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
+
+
 /**
  * HiveDriver.
  *
@@ -50,16 +54,6 @@ public class HiveDriver implements Drive
   private static final boolean JDBC_COMPLIANT = false;
 
   /**
-   * The required prefix for the connection URL.
-   */
-  private static final String URL_PREFIX = "jdbc:hive2://";
-
-  /**
-   * If host is provided, without a port.
-   */
-  private static final String DEFAULT_PORT = "10000";
-
-  /**
    * Property key for the database name.
    */
   private static final String DBNAME_PROPERTY_KEY = "DBNAME";
@@ -99,7 +93,7 @@ public class HiveDriver implements Drive
    */
 
   public boolean acceptsURL(String url) throws SQLException {
-    return Pattern.matches(URL_PREFIX + ".*", url);
+    return Pattern.matches(Utils.URL_PREFIX + ".*", url);
   }
 
   /*
@@ -183,8 +177,8 @@ public class HiveDriver implements Drive
       info = new Properties();
     }
 
-    if ((url != null) && url.startsWith(URL_PREFIX)) {
-      info = parseURL(url, info);
+    if ((url != null) && url.startsWith(Utils.URL_PREFIX)) {
+      info = parseURLforPropertyInfo(url, info);
     }
 
     DriverPropertyInfo hostProp = new DriverPropertyInfo(HOST_PROPERTY_KEY,
@@ -214,7 +208,6 @@ public class HiveDriver implements Drive
   /**
    * Returns whether the driver is JDBC compliant.
    */
-
   public boolean jdbcCompliant() {
     return JDBC_COMPLIANT;
   }
@@ -223,44 +216,36 @@ public class HiveDriver implements Drive
    * Takes a url in the form of jdbc:hive://[hostname]:[port]/[db_name] and
    * parses it. Everything after jdbc:hive// is optional.
    *
+   * The output from Utils.parseUrl() is massaged for the needs of getPropertyInfo
    * @param url
    * @param defaults
    * @return
    * @throws java.sql.SQLException
    */
-  private Properties parseURL(String url, Properties defaults) throws SQLException {
+  private Properties parseURLforPropertyInfo(String url, Properties defaults) throws SQLException {
     Properties urlProps = (defaults != null) ? new Properties(defaults)
         : new Properties();
 
-    if (url == null || !url.startsWith(URL_PREFIX)) {
+    if (url == null || !url.startsWith(Utils.URL_PREFIX)) {
       throw new SQLException("Invalid connection url: " + url);
     }
 
-    if (url.length() <= URL_PREFIX.length()) {
-      return urlProps;
-    }
-
-    // [hostname]:[port]/[db_name]
-    String connectionInfo = url.substring(URL_PREFIX.length());
-
-    // [hostname]:[port] [db_name]
-    String[] hostPortAndDatabase = connectionInfo.split("/", 2);
-
-    // [hostname]:[port]
-    if (hostPortAndDatabase[0].length() > 0) {
-      String[] hostAndPort = hostPortAndDatabase[0].split(":", 2);
-      urlProps.put(HOST_PROPERTY_KEY, hostAndPort[0]);
-      if (hostAndPort.length > 1) {
-        urlProps.put(PORT_PROPERTY_KEY, hostAndPort[1]);
-      } else {
-        urlProps.put(PORT_PROPERTY_KEY, DEFAULT_PORT);
-      }
-    }
-
-    // [db_name]
-    if (hostPortAndDatabase.length > 1) {
-      urlProps.put(DBNAME_PROPERTY_KEY, hostPortAndDatabase[1]);
-    }
+    JdbcConnectionParams params = Utils.parseURL(url);
+    String host = params.getHost();
+    if (host == null){
+      host = "";
+    }
+    String port = Integer.toString(params.getPort());
+    if(host.equals("")){
+      port = "";
+    }
+    else if(port.equals("0")){
+      port = Utils.DEFAULT_PORT;
+    }
+    String db = params.getDbName();
+    urlProps.put(HOST_PROPERTY_KEY, host);
+    urlProps.put(PORT_PROPERTY_KEY, port);
+    urlProps.put(DBNAME_PROPERTY_KEY, db);
 
     return urlProps;
   }

Modified: hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/Utils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/Utils.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/Utils.java (original)
+++ hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/Utils.java Tue Oct  1 04:48:44 2013
@@ -169,13 +169,16 @@ public class Utils {
 
   /**
    * Parse JDBC connection URL
-   * The new format of the URL is jdbc:hive://<host>:<port>/dbName;sess_var_list?hive_conf_list#hive_var_list
+   * The new format of the URL is jdbc:hive2://<host>:<port>/dbName;sess_var_list?hive_conf_list#hive_var_list
    * where the optional sess, conf and var lists are semicolon separated <key>=<val> pairs. As before, if the
    * host/port is not specified, it the driver runs an embedded hive.
    * examples -
-   *  jdbc:hive://ubuntu:11000/db2?hive.cli.conf.printheader=true;hive.exec.mode.local.auto.inputbytes.max=9999#stab=salesTable;icol=customerID
-   *  jdbc:hive://?hive.cli.conf.printheader=true;hive.exec.mode.local.auto.inputbytes.max=9999#stab=salesTable;icol=customerID
-   *  jdbc:hive://ubuntu:11000/db2;user=foo;password=bar
+   *  jdbc:hive2://ubuntu:11000/db2?hive.cli.conf.printheader=true;hive.exec.mode.local.auto.inputbytes.max=9999#stab=salesTable;icol=customerID
+   *  jdbc:hive2://?hive.cli.conf.printheader=true;hive.exec.mode.local.auto.inputbytes.max=9999#stab=salesTable;icol=customerID
+   *  jdbc:hive2://ubuntu:11000/db2;user=foo;password=bar
+   *
+   *  Connect to http://server:10001/hs2, with specified basicAuth credentials and initial database:
+   *     jdbc:hive2://server:10001/db;user=foo;password=bar?hive.server2.transport.mode=http;hive.server2.thrift.http.path=hs2
    *
    * Note that currently the session properties are not used.
    *
@@ -189,7 +192,8 @@ public class Utils {
       throw new IllegalArgumentException("Bad URL format");
     }
 
-    // Don't parse URL with no other configuration.
+    // For URLs with no other configuration
+    // Don't parse them, but set embedded mode as true
     if (uri.equalsIgnoreCase(URL_PREFIX)) {
       connParams.setEmbeddedMode(true);
       return connParams;
@@ -197,11 +201,11 @@ public class Utils {
 
     URI jdbcURI = URI.create(uri.substring(URI_JDBC_PREFIX.length()));
 
-    //Check to prevent unintentional use of embedded mode. A missing "/" can
+    // Check to prevent unintentional use of embedded mode. A missing "/"
     // to separate the 'path' portion of URI can result in this.
-    //The missing "/" common typo while using secure mode, eg of such url -
+    // The missing "/" common typo while using secure mode, eg of such url -
     // jdbc:hive2://localhost:10000;principal=hive/HiveServer2Host@YOUR-REALM.COM
-    if((jdbcURI.getAuthority() != null) && (jdbcURI.getHost()==null)){
+    if((jdbcURI.getAuthority() != null) && (jdbcURI.getHost()==null)) {
        throw new IllegalArgumentException("Bad URL format. Hostname not found "
            + " in authority part of the url: " + jdbcURI.getAuthority()
            + ". Are you missing a '/' after the hostname ?");
@@ -264,6 +268,4 @@ public class Utils {
 
     return connParams;
   }
-
-
 }

Modified: hive/branches/tez/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java (original)
+++ hive/branches/tez/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java Tue Oct  1 04:48:44 2013
@@ -45,6 +45,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hive.common.util.HiveVersionInfo;
+import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
 import org.apache.hive.service.cli.operation.ClassicTableTypeMapping;
 import org.apache.hive.service.cli.operation.ClassicTableTypeMapping.ClassicTableTypes;
 import org.apache.hive.service.cli.operation.HiveTableTypeMapping;
@@ -1356,14 +1357,21 @@ public class TestJdbcDriver2 extends Tes
 
   // [url] [host] [port] [db]
   private static final String[][] URL_PROPERTIES = new String[][] {
+    // binary mode
     {"jdbc:hive2://", "", "", "default"},
     {"jdbc:hive2://localhost:10001/default", "localhost", "10001", "default"},
     {"jdbc:hive2://localhost/notdefault", "localhost", "10000", "notdefault"},
-    {"jdbc:hive2://foo:1243", "foo", "1243", "default"}};
+    {"jdbc:hive2://foo:1243", "foo", "1243", "default"},
+
+    // http mode
+    {"jdbc:hive2://server:10002/db;user=foo;password=bar?" +
+        "hive.server2.transport.mode=http;" +
+        "hive.server2.thrift.http.path=hs2",
+        "server", "10002", "db"},
+  };
 
   public void testDriverProperties() throws SQLException {
     HiveDriver driver = new HiveDriver();
-
     for (String[] testValues : URL_PROPERTIES) {
       DriverPropertyInfo[] dpi = driver.getPropertyInfo(testValues[0], null);
       assertEquals("unexpected DriverPropertyInfo array size", 3, dpi.length);
@@ -1371,7 +1379,29 @@ public class TestJdbcDriver2 extends Tes
       assertDpi(dpi[1], "PORT", testValues[2]);
       assertDpi(dpi[2], "DBNAME", testValues[3]);
     }
+  }
 
+  private static final String[][] HTTP_URL_PROPERTIES = new String[][] {
+    {"jdbc:hive2://server:10002/db;" +
+        "user=foo;password=bar?" +
+        "hive.server2.transport.mode=http;" +
+        "hive.server2.thrift.http.path=hs2", "server", "10002", "db", "http", "hs2"},
+    {"jdbc:hive2://server:10000/testdb;" +
+        "user=foo;password=bar?" +
+        "hive.server2.transport.mode=binary;" +
+        "hive.server2.thrift.http.path=", "server", "10000", "testdb", "binary", ""},
+  };
+
+  public void testParseUrlHttpMode() throws SQLException {
+    HiveDriver driver = new HiveDriver();
+    for (String[] testValues : HTTP_URL_PROPERTIES) {
+      JdbcConnectionParams params = Utils.parseURL(testValues[0]);
+      assertEquals(params.getHost(), testValues[1]);
+      assertEquals(params.getPort(), Integer.parseInt(testValues[2]));
+      assertEquals(params.getDbName(), testValues[3]);
+      assertEquals(params.getHiveConfs().get("hive.server2.transport.mode"), testValues[4]);
+      assertEquals(params.getHiveConfs().get("hive.server2.thrift.http.path"), testValues[5]);
+    }
   }
 
   private static void assertDpi(DriverPropertyInfo dpi, String name,

Modified: hive/branches/tez/metastore/scripts/upgrade/postgres/014-HIVE-3764.postgres.sql
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/scripts/upgrade/postgres/014-HIVE-3764.postgres.sql?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/metastore/scripts/upgrade/postgres/014-HIVE-3764.postgres.sql (original)
+++ hive/branches/tez/metastore/scripts/upgrade/postgres/014-HIVE-3764.postgres.sql Tue Oct  1 04:48:44 2013
@@ -4,9 +4,8 @@
 CREATE TABLE "VERSION" (
   "VER_ID" bigint,
   "SCHEMA_VERSION" character varying(127) NOT NULL,
-  "VERSION_COMMENT" character varying(255) NOT NULL,
-  PRIMARY KEY ("VER_ID")
+  "VERSION_COMMENT" character varying(255) NOT NULL
 );
 ALTER TABLE ONLY "VERSION" ADD CONSTRAINT "VERSION_pkey" PRIMARY KEY ("VER_ID");
 
-INSERT INTO VERSION (VER_ID, SCHEMA_VERSION, VERSION_COMMENT) VALUES (1, '', 'Initial value');
+INSERT INTO "VERSION" ("VER_ID", "SCHEMA_VERSION", "VERSION_COMMENT") VALUES (1, '', 'Initial value');

Modified: hive/branches/tez/metastore/scripts/upgrade/postgres/hive-schema-0.12.0.postgres.sql
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/scripts/upgrade/postgres/hive-schema-0.12.0.postgres.sql?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/metastore/scripts/upgrade/postgres/hive-schema-0.12.0.postgres.sql (original)
+++ hive/branches/tez/metastore/scripts/upgrade/postgres/hive-schema-0.12.0.postgres.sql Tue Oct  1 04:48:44 2013
@@ -522,7 +522,6 @@ CREATE TABLE "VERSION" (
   "VER_ID" bigint,
   "SCHEMA_VERSION" character varying(127) NOT NULL,
   "VERSION_COMMENT" character varying(255) NOT NULL,
-  PRIMARY KEY ("VER_ID")
 );
 
 --
@@ -1400,7 +1399,7 @@ REVOKE ALL ON SCHEMA public FROM PUBLIC;
 GRANT ALL ON SCHEMA public TO PUBLIC;
 
 
-INSERT INTO VERSION (VER_ID, SCHEMA_VERSION, VERSION_COMMENT) VALUES (1, '0.12.0', 'Hive release version 0.12.0');
+INSERT INTO "VERSION" ("VER_ID", "SCHEMA_VERSION", "VERSION_COMMENT") VALUES (1, '0.12.0', 'Hive release version 0.12.0');
 --
 -- PostgreSQL database dump complete
 --

Modified: hive/branches/tez/metastore/scripts/upgrade/postgres/hive-schema-0.13.0.postgres.sql
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/scripts/upgrade/postgres/hive-schema-0.13.0.postgres.sql?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/metastore/scripts/upgrade/postgres/hive-schema-0.13.0.postgres.sql (original)
+++ hive/branches/tez/metastore/scripts/upgrade/postgres/hive-schema-0.13.0.postgres.sql Tue Oct  1 04:48:44 2013
@@ -521,8 +521,7 @@ CREATE TABLE "TAB_COL_STATS" (
 CREATE TABLE "VERSION" (
   "VER_ID" bigint,
   "SCHEMA_VERSION" character varying(127) NOT NULL,
-  "COMMENT" character varying(255) NOT NULL,
-  PRIMARY KEY ("VER_ID")
+  "VERSION_COMMENT" character varying(255) NOT NULL
 );
 
 --
@@ -1400,7 +1399,7 @@ REVOKE ALL ON SCHEMA public FROM PUBLIC;
 GRANT ALL ON SCHEMA public TO PUBLIC;
 
 
-INSERT INTO VERSION (VER_ID, SCHEMA_VERSION, VERSION_COMMENT) VALUES (1, '0.13.0', 'Hive release version 0.13.0');
+INSERT INTO "VERSION" ("VER_ID", "SCHEMA_VERSION", "VERSION_COMMENT") VALUES (1, '0.13.0', 'Hive release version 0.13.0');
 --
 -- PostgreSQL database dump complete
 --

Modified: hive/branches/tez/metastore/scripts/upgrade/postgres/upgrade-0.11.0-to-0.12.0.postgres.sql
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/scripts/upgrade/postgres/upgrade-0.11.0-to-0.12.0.postgres.sql?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/metastore/scripts/upgrade/postgres/upgrade-0.11.0-to-0.12.0.postgres.sql (original)
+++ hive/branches/tez/metastore/scripts/upgrade/postgres/upgrade-0.11.0-to-0.12.0.postgres.sql Tue Oct  1 04:48:44 2013
@@ -1,5 +1,5 @@
 SELECT 'Upgrading MetaStore schema from 0.11.0 to 0.12.0';
 \i 013-HIVE-3255.postgres.sql;
 \i 014-HIVE-3764.postgres.sql;
-UPDATE VERSION SET SCHEMA_VERSION='0.12.0', VERSION_COMMENT='Hive release version 0.12.0' where VER_ID=1;
+UPDATE "VERSION" SET "SCHEMA_VERSION"='0.12.0', "VERSION_COMMENT"='Hive release version 0.12.0' where "VER_ID"=1;
 SELECT 'Finished upgrading MetaStore schema from 0.11.0 to 0.12.0';

Modified: hive/branches/tez/metastore/scripts/upgrade/postgres/upgrade-0.12.0-to-0.13.0.postgres.sql
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/scripts/upgrade/postgres/upgrade-0.12.0-to-0.13.0.postgres.sql?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/metastore/scripts/upgrade/postgres/upgrade-0.12.0-to-0.13.0.postgres.sql (original)
+++ hive/branches/tez/metastore/scripts/upgrade/postgres/upgrade-0.12.0-to-0.13.0.postgres.sql Tue Oct  1 04:48:44 2013
@@ -1,3 +1,3 @@
 SELECT 'Upgrading MetaStore schema from 0.11.0 to 0.12.0';
-UPDATE VERSION SET SCHEMA_VERSION='0.13.0', VERSION_COMMENT='Hive release version 0.13.0' where VER_ID=1;
+UPDATE "VERSION" SET "SCHEMA_VERSION"='0.13.0', "VERSION_COMMENT"='Hive release version 0.13.0' where "VER_ID"=1;
 SELECT 'Finished upgrading MetaStore schema from 0.11.0 to 0.12.0';