You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2013/09/27 19:41:55 UTC

svn commit: r1526996 [3/29] - in /hive/branches/maven: ./ beeline/src/java/org/apache/hive/beeline/ beeline/src/test/org/apache/hive/beeline/src/test/ bin/ bin/ext/ common/src/java/org/apache/hadoop/hive/conf/ conf/ contrib/src/test/results/clientposit...

Modified: hive/branches/maven/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java (original)
+++ hive/branches/maven/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java Fri Sep 27 17:41:42 2013
@@ -59,20 +59,20 @@ import org.apache.hadoop.mapred.RunningJ
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-import org.apache.hive.hcatalog.cli.HCatDriver;
-import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
-import org.apache.hive.hcatalog.common.HCatConstants;
-import org.apache.hive.hcatalog.common.HCatException;
-import org.apache.hive.hcatalog.common.HCatUtil;
-import org.apache.hive.hcatalog.data.HCatRecord;
-import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
-import org.apache.hive.hcatalog.data.schema.HCatSchema;
+import org.apache.hcatalog.cli.HCatDriver;
+import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
+import org.apache.hcatalog.common.HCatConstants;
+import org.apache.hcatalog.common.HCatException;
+import org.apache.hcatalog.common.HCatUtil;
+import org.apache.hcatalog.data.HCatRecord;
+import org.apache.hcatalog.data.schema.HCatFieldSchema;
+import org.apache.hcatalog.data.schema.HCatSchema;
 import org.apache.hcatalog.hbase.snapshot.RevisionManager;
 import org.apache.hcatalog.hbase.snapshot.RevisionManagerConfiguration;
 import org.apache.hcatalog.hbase.snapshot.Transaction;
-import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
-import org.apache.hive.hcatalog.mapreduce.PartInfo;
+import org.apache.hcatalog.mapreduce.HCatInputFormat;
+import org.apache.hcatalog.mapreduce.InputJobInfo;
+import org.apache.hcatalog.mapreduce.PartInfo;
 import org.junit.Test;
 
 public class TestHCatHBaseInputFormat extends SkeletonHBaseTest {
@@ -229,7 +229,7 @@ public class TestHCatHBaseInputFormat ex
     // Note: These asserts only works in case of LocalJobRunner as they run in same jvm.
     // If using MiniMRCluster, the tests will have to be modified.
     assertFalse(MapReadHTable.error);
-    assertEquals(MapReadHTable.count, 1);
+    assertEquals(1, MapReadHTable.count);
 
     String dropTableQuery = "DROP TABLE " + hbaseTableName;
     CommandProcessorResponse responseThree = hcatDriver.run(dropTableQuery);
@@ -291,7 +291,7 @@ public class TestHCatHBaseInputFormat ex
     job.setNumReduceTasks(0);
     assertTrue(job.waitForCompletion(true));
     assertFalse(MapReadProjHTable.error);
-    assertEquals(MapReadProjHTable.count, 1);
+    assertEquals(1, MapReadProjHTable.count);
 
     String dropTableQuery = "DROP TABLE " + tableName;
     CommandProcessorResponse responseThree = hcatDriver.run(dropTableQuery);
@@ -325,7 +325,7 @@ public class TestHCatHBaseInputFormat ex
         HCatUtil.serialize(getHiveConf().getAllProperties()));
 
     // output settings
-    Path outputDir = new Path(getTestDir(), "mapred/testHBaseTableProjectionReadMR");
+    Path outputDir = new Path(getTestDir(), "mapred/testHBaseInputFormatProjectionReadMR");
     FileSystem fs = getFileSystem();
     if (fs.exists(outputDir)) {
       fs.delete(outputDir, true);
@@ -361,8 +361,8 @@ public class TestHCatHBaseInputFormat ex
     RunningJob runJob = JobClient.runJob(job);
     runJob.waitForCompletion();
     assertTrue(runJob.isSuccessful());
-    assertFalse(MapReadProjHTable.error);
-    assertEquals(MapReadProjHTable.count, 1);
+    assertFalse(MapReadProjectionHTable.error);
+    assertEquals(1, MapReadProjectionHTable.count);
 
     String dropTableQuery = "DROP TABLE " + tableName;
     CommandProcessorResponse responseThree = hcatDriver.run(dropTableQuery);

Modified: hive/branches/maven/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java (original)
+++ hive/branches/maven/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java Fri Sep 27 17:41:42 2013
@@ -34,13 +34,13 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.mapreduce.Job;
-import org.apache.hive.hcatalog.cli.HCatDriver;
-import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
-import org.apache.hive.hcatalog.common.HCatConstants;
-import org.apache.hive.hcatalog.common.HCatUtil;
+import org.apache.hcatalog.cli.HCatDriver;
+import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
+import org.apache.hcatalog.common.HCatConstants;
+import org.apache.hcatalog.common.HCatUtil;
 import org.apache.hcatalog.hbase.snapshot.TableSnapshot;
-import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
+import org.apache.hcatalog.mapreduce.HCatInputFormat;
+import org.apache.hcatalog.mapreduce.InputJobInfo;
 import org.junit.Test;
 
 public class TestSnapshots extends SkeletonHBaseTest {

Modified: hive/branches/maven/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java (original)
+++ hive/branches/maven/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java Fri Sep 27 17:41:42 2013
@@ -33,8 +33,8 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hive.hcatalog.cli.HCatDriver;
-import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
+import org.apache.hcatalog.cli.HCatDriver;
+import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
 import org.apache.hcatalog.hbase.SkeletonHBaseTest;
 import org.apache.zookeeper.ZooKeeper;
 import org.apache.zookeeper.data.Stat;

Modified: hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java (original)
+++ hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java Fri Sep 27 17:41:42 2013
@@ -24,6 +24,7 @@ import java.net.URISyntaxException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.commons.exec.ExecuteException;
 import org.apache.hive.hcatalog.templeton.tool.TempletonControllerJob;
@@ -40,7 +41,7 @@ public class HiveDelegator extends Launc
     super(appConf);
   }
 
-  public EnqueueBean run(String user,
+  public EnqueueBean run(String user, Map<String, Object> userArgs,
                String execute, String srcFile, List<String> defines,
                List<String> hiveArgs, String otherFiles,
                String statusdir, String callback, String completedUrl, boolean enablelog)
@@ -51,7 +52,7 @@ public class HiveDelegator extends Launc
     List<String> args = makeArgs(execute, srcFile, defines, hiveArgs, otherFiles, statusdir,
                    completedUrl, enablelog);
 
-    return enqueueController(user, callback, args);
+    return enqueueController(user, userArgs, callback, args);
   }
 
   private List<String> makeArgs(String execute, String srcFile,

Modified: hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java (original)
+++ hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java Fri Sep 27 17:41:42 2013
@@ -23,6 +23,7 @@ import java.io.IOException;
 import java.net.URISyntaxException;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.commons.exec.ExecuteException;
 import org.apache.hive.hcatalog.templeton.tool.TempletonControllerJob;
@@ -38,7 +39,7 @@ public class JarDelegator extends Launch
     super(appConf);
   }
 
-  public EnqueueBean run(String user, String jar, String mainClass,
+  public EnqueueBean run(String user, Map<String, Object> userArgs, String jar, String mainClass,
                String libjars, String files,
                List<String> jarArgs, List<String> defines,
                String statusdir, String callback, String completedUrl,
@@ -50,7 +51,7 @@ public class JarDelegator extends Launch
       libjars, files, jarArgs, defines,
       statusdir, completedUrl, enablelog, jobType);
 
-    return enqueueController(user, callback, args);
+    return enqueueController(user, userArgs, callback, args);
   }
 
   private List<String> makeArgs(String jar, String mainClass,

Modified: hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java (original)
+++ hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java Fri Sep 27 17:41:42 2013
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.commons.exec.ExecuteException;
 import org.apache.commons.logging.Log;
@@ -49,13 +50,15 @@ public class LauncherDelegator extends T
     super(appConf);
   }
 
-  public void registerJob(String id, String user, String callback)
+  public void registerJob(String id, String user, String callback,
+      Map<String, Object> userArgs)
     throws IOException {
     JobState state = null;
     try {
       state = new JobState(id, Main.getAppConfigInstance());
       state.setUser(user);
       state.setCallback(callback);
+      state.setUserArgs(userArgs);
     } finally {
       if (state != null)
         state.close();
@@ -65,7 +68,7 @@ public class LauncherDelegator extends T
   /**
    * Enqueue the TempletonControllerJob directly calling doAs.
    */
-  public EnqueueBean enqueueController(String user, String callback,
+  public EnqueueBean enqueueController(String user, Map<String, Object> userArgs, String callback,
                      List<String> args)
     throws NotAuthorizedException, BusyException, ExecuteException,
     IOException, QueueException {
@@ -82,7 +85,7 @@ public class LauncherDelegator extends T
       if (id == null)
         throw new QueueException("Unable to get job id");
 
-      registerJob(id, user, callback);
+      registerJob(id, user, callback, userArgs);
 
       return new EnqueueBean(id);
     } catch (InterruptedException e) {

Modified: hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java (original)
+++ hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java Fri Sep 27 17:41:42 2013
@@ -24,6 +24,7 @@ import java.net.URISyntaxException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.commons.exec.ExecuteException;
 import org.apache.hive.hcatalog.templeton.tool.TempletonControllerJob;
@@ -39,7 +40,7 @@ public class PigDelegator extends Launch
     super(appConf);
   }
 
-  public EnqueueBean run(String user,
+  public EnqueueBean run(String user, Map<String, Object> userArgs,
                String execute, String srcFile,
                List<String> pigArgs, String otherFiles,
                String statusdir, String callback, String completedUrl, boolean enablelog)
@@ -50,7 +51,7 @@ public class PigDelegator extends Launch
       srcFile, pigArgs,
       otherFiles, statusdir, completedUrl, enablelog);
 
-    return enqueueController(user, callback, args);
+    return enqueueController(user, userArgs, callback, args);
   }
 
   private List<String> makeArgs(String execute, String srcFile,

Modified: hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java (original)
+++ hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java Fri Sep 27 17:41:42 2013
@@ -19,6 +19,7 @@
 package org.apache.hive.hcatalog.templeton;
 
 import java.io.IOException;
+import java.util.Map;
 
 import org.apache.hadoop.mapred.JobStatus;
 import org.apache.hadoop.mapred.JobProfile;
@@ -38,6 +39,7 @@ public class QueueStatusBean {
   public String user;
   public String callback;
   public String completed;
+  public Map<String, Object> userargs;
 
   public QueueStatusBean() {
   }
@@ -63,5 +65,6 @@ public class QueueStatusBean {
     user = state.getUser();
     callback = state.getCallback();
     completed = state.getCompleteStatus();
+    userargs = state.getUserArgs();
   }
 }

Modified: hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java (original)
+++ hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java Fri Sep 27 17:41:42 2013
@@ -601,10 +601,23 @@ public class Server {
     verifyParam(mapper, "mapper");
     verifyParam(reducer, "reducer");
     
+    Map<String, Object> userArgs = new HashMap<String, Object>();
+    userArgs.put("user.name", getDoAsUser());
+    userArgs.put("input", inputs);
+    userArgs.put("output", output);
+    userArgs.put("mapper", mapper);
+    userArgs.put("reducer", reducer);
+    userArgs.put("files",  files);
+    userArgs.put("define",  defines);
+    userArgs.put("cmdenv",  cmdenvs);
+    userArgs.put("arg",  args);
+    userArgs.put("statusdir", statusdir);
+    userArgs.put("callback", callback);
+    userArgs.put("enablelog", Boolean.toString(enablelog));
     checkEnableLogPrerequisite(enablelog, statusdir);
 
     StreamingDelegator d = new StreamingDelegator(appConf);
-    return d.run(getDoAsUser(), inputs, output, mapper, reducer,
+    return d.run(getDoAsUser(), userArgs, inputs, output, mapper, reducer,
       files, defines, cmdenvs, args,
       statusdir, callback, getCompletedUrl(), enablelog, JobType.STREAMING);
   }
@@ -630,10 +643,22 @@ public class Server {
     verifyParam(jar, "jar");
     verifyParam(mainClass, "class");
     
+    Map<String, Object> userArgs = new HashMap<String, Object>();
+    userArgs.put("user.name", getDoAsUser());
+    userArgs.put("jar", jar);
+    userArgs.put("class", mainClass);
+    userArgs.put("libjars", libjars);
+    userArgs.put("files", files);
+    userArgs.put("arg", args);
+    userArgs.put("define", defines);
+    userArgs.put("statusdir", statusdir);
+    userArgs.put("callback", callback);
+    userArgs.put("enablelog", Boolean.toString(enablelog));
+
     checkEnableLogPrerequisite(enablelog, statusdir);
 
     JarDelegator d = new JarDelegator(appConf);
-    return d.run(getDoAsUser(),
+    return d.run(getDoAsUser(), userArgs,
       jar, mainClass,
       libjars, files, args, defines,
       statusdir, callback, getCompletedUrl(), enablelog, JobType.JAR);
@@ -658,10 +683,21 @@ public class Server {
     if (execute == null && srcFile == null)
       throw new BadParam("Either execute or file parameter required");
     
+    //add all function arguments to a map
+    Map<String, Object> userArgs = new HashMap<String, Object>();
+    userArgs.put("user.name", getDoAsUser());
+    userArgs.put("execute", execute);
+    userArgs.put("file", srcFile);
+    userArgs.put("arg", pigArgs);
+    userArgs.put("files", otherFiles);
+    userArgs.put("statusdir", statusdir);
+    userArgs.put("callback", callback);
+    userArgs.put("enablelog", Boolean.toString(enablelog));
+
     checkEnableLogPrerequisite(enablelog, statusdir);
 
     PigDelegator d = new PigDelegator(appConf);
-    return d.run(getDoAsUser(),
+    return d.run(getDoAsUser(), userArgs,
       execute, srcFile,
       pigArgs, otherFiles,
       statusdir, callback, getCompletedUrl(), enablelog);
@@ -699,10 +735,21 @@ public class Server {
     if (execute == null && srcFile == null)
       throw new BadParam("Either execute or file parameter required");
     
+    //add all function arguments to a map
+    Map<String, Object> userArgs = new HashMap<String, Object>();
+    userArgs.put("user.name", getDoAsUser());
+    userArgs.put("execute", execute);
+    userArgs.put("file", srcFile);
+    userArgs.put("define", defines);
+    userArgs.put("files", otherFiles);
+    userArgs.put("statusdir", statusdir);
+    userArgs.put("callback", callback);
+    userArgs.put("enablelog", Boolean.toString(enablelog));
+
     checkEnableLogPrerequisite(enablelog, statusdir);
 
     HiveDelegator d = new HiveDelegator(appConf);
-    return d.run(getDoAsUser(), execute, srcFile, defines, hiveArgs, otherFiles,
+    return d.run(getDoAsUser(), userArgs, execute, srcFile, defines, hiveArgs, otherFiles,
       statusdir, callback, getCompletedUrl(), enablelog);
   }
 

Modified: hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StreamingDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StreamingDelegator.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StreamingDelegator.java (original)
+++ hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StreamingDelegator.java Fri Sep 27 17:41:42 2013
@@ -21,6 +21,7 @@ package org.apache.hive.hcatalog.templet
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.commons.exec.ExecuteException;
 
@@ -35,7 +36,7 @@ public class StreamingDelegator extends 
     super(appConf);
   }
 
-  public EnqueueBean run(String user,
+  public EnqueueBean run(String user, Map<String, Object> userArgs,
                List<String> inputs, String output,
                String mapper, String reducer,
                List<String> files, List<String> defines,
@@ -52,7 +53,7 @@ public class StreamingDelegator extends 
       files, defines, cmdenvs, jarArgs);
 
     JarDelegator d = new JarDelegator(appConf);
-    return d.run(user,
+    return d.run(user, userArgs,
       appConf.streamingJar(), null,
       null, null, args, defines,
       statusdir, callback, completedUrl, enableLog, jobType);

Modified: hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java (original)
+++ hive/branches/maven/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java Fri Sep 27 17:41:42 2013
@@ -21,10 +21,12 @@ package org.apache.hive.hcatalog.templet
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hive.hcatalog.templeton.JsonBuilder;
 
 /**
  * The persistent state of a job.  The state is stored in one of the
@@ -232,6 +234,20 @@ public class JobState {
     setField("user", user);
   }
 
+  @SuppressWarnings("unchecked")
+  public Map<String, Object> getUserArgs()
+    throws IOException
+  {
+    String jsonString = getField("userArgs");
+    return (Map<String, Object>)JsonBuilder.jsonToMap(jsonString);
+  }
+  public void setUserArgs(Map<String, Object> userArgs)
+    throws IOException
+  {
+    String jsonString = JsonBuilder.mapToJson(userArgs);
+    setField("userArgs", jsonString);
+  }
+
   /**
    * The url callback
    */

Modified: hive/branches/maven/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/hive/branches/maven/ivy/libraries.properties?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ivy/libraries.properties (original)
+++ hive/branches/maven/ivy/libraries.properties Fri Sep 27 17:41:42 2013
@@ -43,6 +43,8 @@ commons-pool.version=1.5.4
 derby.version=10.4.2.0
 guava.version=11.0.2
 hbase.version=0.94.6.1
+httpclient.version=4.2.5
+httpcore.version=4.2.4
 jackson.version=1.8.8
 javaewah.version=0.3.2
 jdo-api.version=3.0.1

Modified: hive/branches/maven/jdbc/ivy.xml
URL: http://svn.apache.org/viewvc/hive/branches/maven/jdbc/ivy.xml?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/jdbc/ivy.xml (original)
+++ hive/branches/maven/jdbc/ivy.xml Fri Sep 27 17:41:42 2013
@@ -29,5 +29,10 @@
   <dependencies>
     <dependency org="org.apache.hive" name="hive-cli" rev="${version}"
                 conf="compile->default" />
+    <dependency org="org.apache.httpcomponents" name="httpcore"
+                rev="${httpcore.version}"/>
+    <dependency org="org.apache.httpcomponents" name="httpclient"
+                rev="${httpclient.version}"/>
+
   </dependencies>
 </ivy-module>

Modified: hive/branches/maven/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java (original)
+++ hive/branches/maven/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java Fri Sep 27 17:41:42 2013
@@ -44,20 +44,23 @@ import java.util.concurrent.Executor;
 import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
 
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hive.service.auth.KerberosSaslHelper;
 import org.apache.hive.service.auth.PlainSaslHelper;
 import org.apache.hive.service.auth.SaslQOP;
-import org.apache.hive.service.cli.thrift.EmbeddedThriftCLIService;
+import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService;
 import org.apache.hive.service.cli.thrift.TCLIService;
 import org.apache.hive.service.cli.thrift.TCloseSessionReq;
 import org.apache.hive.service.cli.thrift.TOpenSessionReq;
 import org.apache.hive.service.cli.thrift.TOpenSessionResp;
 import org.apache.hive.service.cli.thrift.TProtocolVersion;
 import org.apache.hive.service.cli.thrift.TSessionHandle;
+import org.apache.http.impl.client.DefaultHttpClient;
 import org.apache.thrift.TException;
 import org.apache.thrift.protocol.TBinaryProtocol;
 import org.apache.thrift.protocol.TProtocol;
+import org.apache.thrift.transport.THttpClient;
 import org.apache.thrift.transport.TSocket;
 import org.apache.thrift.transport.TTransport;
 import org.apache.thrift.transport.TTransportException;
@@ -75,30 +78,49 @@ public class HiveConnection implements j
   private static final String HIVE_AUTH_PASSWD = "password";
   private static final String HIVE_ANONYMOUS_USER = "anonymous";
   private static final String HIVE_ANONYMOUS_PASSWD = "anonymous";
-
+  private final String jdbcURI;
+  private final String host;
+  private final int port;
+  private final Map<String, String> sessConfMap;
+  private final Map<String, String> hiveConfMap;
+  private final Map<String, String> hiveVarMap;
+  private final boolean isEmbeddedMode;
   private TTransport transport;
   private TCLIService.Iface client;
   private boolean isClosed = true;
   private SQLWarning warningChain = null;
   private TSessionHandle sessHandle = null;
   private final List<TProtocolVersion> supportedProtocols = new LinkedList<TProtocolVersion>();
-  /**
-   * TODO: - parse uri (use java.net.URI?).
-   */
+
   public HiveConnection(String uri, Properties info) throws SQLException {
-    Utils.JdbcConnectionParams connParams = Utils.parseURL(uri);
-    if (connParams.isEmbeddedMode()) {
-      client = new EmbeddedThriftCLIService();
+    jdbcURI = uri;
+    // parse the connection uri
+    Utils.JdbcConnectionParams connParams = Utils.parseURL(jdbcURI);
+    // extract parsed connection parameters:
+    // JDBC URL: jdbc:hive2://<host>:<port>/dbName;sess_var_list?hive_conf_list#hive_var_list
+    // each list: <key1>=<val1>;<key2>=<val2> and so on
+    // sess_var_list -> sessConfMap
+    // hive_conf_list -> hiveConfMap
+    // hive_var_list -> hiveVarMap
+    host = connParams.getHost();
+    port = connParams.getPort();
+    sessConfMap = connParams.getSessionVars();
+    hiveConfMap = connParams.getHiveConfs();
+    hiveVarMap = connParams.getHiveVars();
+    isEmbeddedMode = connParams.isEmbeddedMode();
+
+    if (isEmbeddedMode) {
+      client = new EmbeddedThriftBinaryCLIService();
     } else {
       // extract user/password from JDBC connection properties if its not supplied in the connection URL
       if (info.containsKey(HIVE_AUTH_USER)) {
-        connParams.getSessionVars().put(HIVE_AUTH_USER, info.getProperty(HIVE_AUTH_USER));
+        sessConfMap.put(HIVE_AUTH_USER, info.getProperty(HIVE_AUTH_USER));
         if (info.containsKey(HIVE_AUTH_PASSWD)) {
-            connParams.getSessionVars().put(HIVE_AUTH_PASSWD, info.getProperty(HIVE_AUTH_PASSWD));
+          sessConfMap.put(HIVE_AUTH_PASSWD, info.getProperty(HIVE_AUTH_PASSWD));
         }
       }
-
-      openTransport(uri, connParams.getHost(), connParams.getPort(), connParams.getSessionVars());
+      // open the client transport
+      openTransport();
     }
 
     // add supported protocols
@@ -107,48 +129,66 @@ public class HiveConnection implements j
     supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V3);
 
     // open client session
-    openSession(uri);
+    openSession();
 
-    configureConnection(connParams);
+    configureConnection();
   }
 
-  private void configureConnection(Utils.JdbcConnectionParams connParams)
-      throws SQLException {
-    // set the hive variable in session state for local mode
-    if (connParams.isEmbeddedMode()) {
-      if (!connParams.getHiveVars().isEmpty()) {
-        SessionState.get().setHiveVariables(connParams.getHiveVars());
-      }
-    } else {
-      // for remote JDBC client, try to set the conf var using 'set foo=bar'
-      Statement stmt = createStatement();
-      for (Entry<String, String> hiveConf : connParams.getHiveConfs().entrySet()) {
-        stmt.execute("set " + hiveConf.getKey() + "=" + hiveConf.getValue());
-        stmt.close();
-      }
+  private void openTransport() throws SQLException {
+    transport = isHttpTransportMode() ?
+        createHttpTransport() :
+          createBinaryTransport();
+    TProtocol protocol = new TBinaryProtocol(transport);
+    client = new TCLIService.Client(protocol);
+    try {
+      transport.open();
+    } catch (TTransportException e) {
+      throw new SQLException("Could not open connection to "
+          + jdbcURI + ": " + e.getMessage(), " 08S01", e);
+    }
+  }
 
-      // For remote JDBC client, try to set the hive var using 'set hivevar:key=value'
-      for (Entry<String, String> hiveVar : connParams.getHiveVars().entrySet()) {
-        stmt.execute("set hivevar:" + hiveVar.getKey() + "=" + hiveVar.getValue());
-        stmt.close();
-      }
+  private TTransport createHttpTransport() throws SQLException {
+    // http path should begin with "/"
+    String httpPath;
+    httpPath = hiveConfMap.get(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH.varname);
+    if(httpPath == null) {
+      httpPath = "/";
+    }
+    if(!httpPath.startsWith("/")) {
+      httpPath = "/" + httpPath;
     }
+
+    DefaultHttpClient httpClient = new DefaultHttpClient();
+    String httpUrl = hiveConfMap.get(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname) +
+        "://" + host + ":" + port + httpPath;
+    httpClient.addRequestInterceptor(
+        new HttpBasicAuthInterceptor(getUserName(), getPasswd())
+        );
+    try {
+      transport = new THttpClient(httpUrl, httpClient);
+    }
+    catch (TTransportException e) {
+      String msg =  "Could not create http connection to " +
+          jdbcURI + ". " + e.getMessage();
+      throw new SQLException(msg, " 08S01", e);
+    }
+    return transport;
   }
 
-  private void openTransport(String uri, String host, int port, Map<String, String> sessConf )
-      throws SQLException {
+  private TTransport createBinaryTransport() throws SQLException {
     transport = new TSocket(host, port);
-
     // handle secure connection if specified
-    if (!sessConf.containsKey(HIVE_AUTH_TYPE)
-        || !sessConf.get(HIVE_AUTH_TYPE).equals(HIVE_AUTH_SIMPLE)){
+    if (!sessConfMap.containsKey(HIVE_AUTH_TYPE)
+        || !sessConfMap.get(HIVE_AUTH_TYPE).equals(HIVE_AUTH_SIMPLE)) {
       try {
-        if (sessConf.containsKey(HIVE_AUTH_PRINCIPAL)) {
+        // If Kerberos
+        if (sessConfMap.containsKey(HIVE_AUTH_PRINCIPAL)) {
           Map<String, String> saslProps = new HashMap<String, String>();
           SaslQOP saslQOP = SaslQOP.AUTH;
-          if(sessConf.containsKey(HIVE_AUTH_QOP)) {
+          if(sessConfMap.containsKey(HIVE_AUTH_QOP)) {
             try {
-              saslQOP = SaslQOP.fromString(sessConf.get(HIVE_AUTH_QOP));
+              saslQOP = SaslQOP.fromString(sessConfMap.get(HIVE_AUTH_QOP));
             } catch (IllegalArgumentException e) {
               throw new SQLException("Invalid " + HIVE_AUTH_QOP + " parameter. " + e.getMessage(), "42000", e);
             }
@@ -156,35 +196,38 @@ public class HiveConnection implements j
           saslProps.put(Sasl.QOP, saslQOP.toString());
           saslProps.put(Sasl.SERVER_AUTH, "true");
           transport = KerberosSaslHelper.getKerberosTransport(
-                  sessConf.get(HIVE_AUTH_PRINCIPAL), host, transport, saslProps);
+              sessConfMap.get(HIVE_AUTH_PRINCIPAL), host, transport, saslProps);
         } else {
-          String userName = sessConf.get(HIVE_AUTH_USER);
+          String userName = sessConfMap.get(HIVE_AUTH_USER);
           if ((userName == null) || userName.isEmpty()) {
             userName = HIVE_ANONYMOUS_USER;
           }
-          String passwd = sessConf.get(HIVE_AUTH_PASSWD);
+          String passwd = sessConfMap.get(HIVE_AUTH_PASSWD);
           if ((passwd == null) || passwd.isEmpty()) {
             passwd = HIVE_ANONYMOUS_PASSWD;
           }
           transport = PlainSaslHelper.getPlainTransport(userName, passwd, transport);
         }
       } catch (SaslException e) {
-        throw new SQLException("Could not establish secure connection to "
-                  + uri + ": " + e.getMessage(), " 08S01", e);
+        throw new SQLException("Could not create secure connection to "
+            + jdbcURI + ": " + e.getMessage(), " 08S01", e);
       }
     }
+    return transport;
+  }
 
-    TProtocol protocol = new TBinaryProtocol(transport);
-    client = new TCLIService.Client(protocol);
-    try {
-      transport.open();
-    } catch (TTransportException e) {
-      throw new SQLException("Could not establish connection to "
-          + uri + ": " + e.getMessage(), " 08S01", e);
+
+  private boolean isHttpTransportMode() {
+    String transportMode =
+        hiveConfMap.get(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname);
+    if(transportMode != null && (transportMode.equalsIgnoreCase("http") ||
+        transportMode.equalsIgnoreCase("https"))) {
+      return true;
     }
+    return false;
   }
 
-  private void openSession(String uri) throws SQLException {
+  private void openSession() throws SQLException {
     TOpenSessionReq openReq = new TOpenSessionReq();
 
     // set the session configuration
@@ -201,11 +244,61 @@ public class HiveConnection implements j
       sessHandle = openResp.getSessionHandle();
     } catch (TException e) {
       throw new SQLException("Could not establish connection to "
-          + uri + ": " + e.getMessage(), " 08S01", e);
+          + jdbcURI + ": " + e.getMessage(), " 08S01", e);
     }
     isClosed = false;
   }
 
+  private void configureConnection() throws SQLException {
+    // set the hive variable in session state for local mode
+    if (isEmbeddedMode) {
+      if (!hiveVarMap.isEmpty()) {
+        SessionState.get().setHiveVariables(hiveVarMap);
+      }
+    } else {
+      // for remote JDBC client, try to set the conf var using 'set foo=bar'
+      Statement stmt = createStatement();
+      for (Entry<String, String> hiveConf : hiveConfMap.entrySet()) {
+        stmt.execute("set " + hiveConf.getKey() + "=" + hiveConf.getValue());
+      }
+
+      // For remote JDBC client, try to set the hive var using 'set hivevar:key=value'
+      for (Entry<String, String> hiveVar : hiveVarMap.entrySet()) {
+        stmt.execute("set hivevar:" + hiveVar.getKey() + "=" + hiveVar.getValue());
+      }
+      stmt.close();
+    }
+  }
+
+  /**
+   * @return username from sessConfMap
+   */
+  private String getUserName() {
+    return getSessionValue(HIVE_AUTH_USER, HIVE_ANONYMOUS_USER);
+  }
+
+  /**
+   * @return password from sessConfMap
+   */
+  private String getPasswd() {
+    return getSessionValue(HIVE_AUTH_PASSWD, HIVE_ANONYMOUS_PASSWD);
+  }
+
+  /**
+   * Lookup varName in sessConfMap, if its null or empty return the default
+   * value varDefault
+   * @param varName
+   * @param varDefault
+   * @return
+   */
+  private String getSessionValue(String varName, String varDefault) {
+    String varValue = sessConfMap.get(varName);
+    if ((varValue == null) || varValue.isEmpty()) {
+      varValue = varDefault;
+    }
+    return varValue;
+  }
+
   public void abort(Executor executor) throws SQLException {
     // JDK 1.7
     throw new SQLException("Method not supported");

Modified: hive/branches/maven/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java (original)
+++ hive/branches/maven/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java Fri Sep 27 17:41:42 2013
@@ -30,6 +30,10 @@ import java.util.jar.Attributes;
 import java.util.jar.Manifest;
 import java.util.logging.Logger;
 import java.util.regex.Pattern;
+
+import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
+
+
 /**
  * HiveDriver.
  *
@@ -50,16 +54,6 @@ public class HiveDriver implements Drive
   private static final boolean JDBC_COMPLIANT = false;
 
   /**
-   * The required prefix for the connection URL.
-   */
-  private static final String URL_PREFIX = "jdbc:hive2://";
-
-  /**
-   * If host is provided, without a port.
-   */
-  private static final String DEFAULT_PORT = "10000";
-
-  /**
    * Property key for the database name.
    */
   private static final String DBNAME_PROPERTY_KEY = "DBNAME";
@@ -99,7 +93,7 @@ public class HiveDriver implements Drive
    */
 
   public boolean acceptsURL(String url) throws SQLException {
-    return Pattern.matches(URL_PREFIX + ".*", url);
+    return Pattern.matches(Utils.URL_PREFIX + ".*", url);
   }
 
   /*
@@ -183,8 +177,8 @@ public class HiveDriver implements Drive
       info = new Properties();
     }
 
-    if ((url != null) && url.startsWith(URL_PREFIX)) {
-      info = parseURL(url, info);
+    if ((url != null) && url.startsWith(Utils.URL_PREFIX)) {
+      info = parseURLforPropertyInfo(url, info);
     }
 
     DriverPropertyInfo hostProp = new DriverPropertyInfo(HOST_PROPERTY_KEY,
@@ -214,7 +208,6 @@ public class HiveDriver implements Drive
   /**
    * Returns whether the driver is JDBC compliant.
    */
-
   public boolean jdbcCompliant() {
     return JDBC_COMPLIANT;
   }
@@ -223,44 +216,36 @@ public class HiveDriver implements Drive
    * Takes a url in the form of jdbc:hive://[hostname]:[port]/[db_name] and
    * parses it. Everything after jdbc:hive// is optional.
    *
+   * The output from Utils.parseUrl() is massaged for the needs of getPropertyInfo
    * @param url
    * @param defaults
    * @return
    * @throws java.sql.SQLException
    */
-  private Properties parseURL(String url, Properties defaults) throws SQLException {
+  private Properties parseURLforPropertyInfo(String url, Properties defaults) throws SQLException {
     Properties urlProps = (defaults != null) ? new Properties(defaults)
         : new Properties();
 
-    if (url == null || !url.startsWith(URL_PREFIX)) {
+    if (url == null || !url.startsWith(Utils.URL_PREFIX)) {
       throw new SQLException("Invalid connection url: " + url);
     }
 
-    if (url.length() <= URL_PREFIX.length()) {
-      return urlProps;
-    }
-
-    // [hostname]:[port]/[db_name]
-    String connectionInfo = url.substring(URL_PREFIX.length());
-
-    // [hostname]:[port] [db_name]
-    String[] hostPortAndDatabase = connectionInfo.split("/", 2);
-
-    // [hostname]:[port]
-    if (hostPortAndDatabase[0].length() > 0) {
-      String[] hostAndPort = hostPortAndDatabase[0].split(":", 2);
-      urlProps.put(HOST_PROPERTY_KEY, hostAndPort[0]);
-      if (hostAndPort.length > 1) {
-        urlProps.put(PORT_PROPERTY_KEY, hostAndPort[1]);
-      } else {
-        urlProps.put(PORT_PROPERTY_KEY, DEFAULT_PORT);
-      }
-    }
-
-    // [db_name]
-    if (hostPortAndDatabase.length > 1) {
-      urlProps.put(DBNAME_PROPERTY_KEY, hostPortAndDatabase[1]);
-    }
+    JdbcConnectionParams params = Utils.parseURL(url);
+    String host = params.getHost();
+    if (host == null){
+      host = "";
+    }
+    String port = Integer.toString(params.getPort());
+    if(host.equals("")){
+      port = "";
+    }
+    else if(port.equals("0")){
+      port = Utils.DEFAULT_PORT;
+    }
+    String db = params.getDbName();
+    urlProps.put(HOST_PROPERTY_KEY, host);
+    urlProps.put(PORT_PROPERTY_KEY, port);
+    urlProps.put(DBNAME_PROPERTY_KEY, db);
 
     return urlProps;
   }

Modified: hive/branches/maven/jdbc/src/java/org/apache/hive/jdbc/Utils.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/jdbc/src/java/org/apache/hive/jdbc/Utils.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/jdbc/src/java/org/apache/hive/jdbc/Utils.java (original)
+++ hive/branches/maven/jdbc/src/java/org/apache/hive/jdbc/Utils.java Fri Sep 27 17:41:42 2013
@@ -169,13 +169,16 @@ public class Utils {
 
   /**
    * Parse JDBC connection URL
-   * The new format of the URL is jdbc:hive://<host>:<port>/dbName;sess_var_list?hive_conf_list#hive_var_list
+   * The new format of the URL is jdbc:hive2://<host>:<port>/dbName;sess_var_list?hive_conf_list#hive_var_list
    * where the optional sess, conf and var lists are semicolon separated <key>=<val> pairs. As before, if the
    * host/port is not specified, it the driver runs an embedded hive.
    * examples -
-   *  jdbc:hive://ubuntu:11000/db2?hive.cli.conf.printheader=true;hive.exec.mode.local.auto.inputbytes.max=9999#stab=salesTable;icol=customerID
-   *  jdbc:hive://?hive.cli.conf.printheader=true;hive.exec.mode.local.auto.inputbytes.max=9999#stab=salesTable;icol=customerID
-   *  jdbc:hive://ubuntu:11000/db2;user=foo;password=bar
+   *  jdbc:hive2://ubuntu:11000/db2?hive.cli.conf.printheader=true;hive.exec.mode.local.auto.inputbytes.max=9999#stab=salesTable;icol=customerID
+   *  jdbc:hive2://?hive.cli.conf.printheader=true;hive.exec.mode.local.auto.inputbytes.max=9999#stab=salesTable;icol=customerID
+   *  jdbc:hive2://ubuntu:11000/db2;user=foo;password=bar
+   *
+   *  Connect to http://server:10001/hs2, with specified basicAuth credentials and initial database:
+   *     jdbc:hive2://server:10001/db;user=foo;password=bar?hive.server2.transport.mode=http;hive.server2.thrift.http.path=hs2
    *
    * Note that currently the session properties are not used.
    *
@@ -189,7 +192,8 @@ public class Utils {
       throw new IllegalArgumentException("Bad URL format");
     }
 
-    // Don't parse URL with no other configuration.
+    // For URLs with no other configuration
+    // Don't parse them, but set embedded mode as true
     if (uri.equalsIgnoreCase(URL_PREFIX)) {
       connParams.setEmbeddedMode(true);
       return connParams;
@@ -197,11 +201,11 @@ public class Utils {
 
     URI jdbcURI = URI.create(uri.substring(URI_JDBC_PREFIX.length()));
 
-    //Check to prevent unintentional use of embedded mode. A missing "/" can
+    // Check to prevent unintentional use of embedded mode. A missing "/"
     // to separate the 'path' portion of URI can result in this.
-    //The missing "/" common typo while using secure mode, eg of such url -
+    // The missing "/" common typo while using secure mode, eg of such url -
     // jdbc:hive2://localhost:10000;principal=hive/HiveServer2Host@YOUR-REALM.COM
-    if((jdbcURI.getAuthority() != null) && (jdbcURI.getHost()==null)){
+    if((jdbcURI.getAuthority() != null) && (jdbcURI.getHost()==null)) {
        throw new IllegalArgumentException("Bad URL format. Hostname not found "
            + " in authority part of the url: " + jdbcURI.getAuthority()
            + ". Are you missing a '/' after the hostname ?");
@@ -264,6 +268,4 @@ public class Utils {
 
     return connParams;
   }
-
-
 }

Modified: hive/branches/maven/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java (original)
+++ hive/branches/maven/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java Fri Sep 27 17:41:42 2013
@@ -45,6 +45,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hive.common.util.HiveVersionInfo;
+import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
 import org.apache.hive.service.cli.operation.ClassicTableTypeMapping;
 import org.apache.hive.service.cli.operation.ClassicTableTypeMapping.ClassicTableTypes;
 import org.apache.hive.service.cli.operation.HiveTableTypeMapping;
@@ -1356,14 +1357,21 @@ public class TestJdbcDriver2 extends Tes
 
   // [url] [host] [port] [db]
   private static final String[][] URL_PROPERTIES = new String[][] {
+    // binary mode
     {"jdbc:hive2://", "", "", "default"},
     {"jdbc:hive2://localhost:10001/default", "localhost", "10001", "default"},
     {"jdbc:hive2://localhost/notdefault", "localhost", "10000", "notdefault"},
-    {"jdbc:hive2://foo:1243", "foo", "1243", "default"}};
+    {"jdbc:hive2://foo:1243", "foo", "1243", "default"},
+
+    // http mode
+    {"jdbc:hive2://server:10002/db;user=foo;password=bar?" +
+        "hive.server2.transport.mode=http;" +
+        "hive.server2.thrift.http.path=hs2",
+        "server", "10002", "db"},
+  };
 
   public void testDriverProperties() throws SQLException {
     HiveDriver driver = new HiveDriver();
-
     for (String[] testValues : URL_PROPERTIES) {
       DriverPropertyInfo[] dpi = driver.getPropertyInfo(testValues[0], null);
       assertEquals("unexpected DriverPropertyInfo array size", 3, dpi.length);
@@ -1371,7 +1379,29 @@ public class TestJdbcDriver2 extends Tes
       assertDpi(dpi[1], "PORT", testValues[2]);
       assertDpi(dpi[2], "DBNAME", testValues[3]);
     }
+  }
 
+  private static final String[][] HTTP_URL_PROPERTIES = new String[][] {
+    {"jdbc:hive2://server:10002/db;" +
+        "user=foo;password=bar?" +
+        "hive.server2.transport.mode=http;" +
+        "hive.server2.thrift.http.path=hs2", "server", "10002", "db", "http", "hs2"},
+    {"jdbc:hive2://server:10000/testdb;" +
+        "user=foo;password=bar?" +
+        "hive.server2.transport.mode=binary;" +
+        "hive.server2.thrift.http.path=", "server", "10000", "testdb", "binary", ""},
+  };
+
+  public void testParseUrlHttpMode() throws SQLException {
+    HiveDriver driver = new HiveDriver();
+    for (String[] testValues : HTTP_URL_PROPERTIES) {
+      JdbcConnectionParams params = Utils.parseURL(testValues[0]);
+      assertEquals(params.getHost(), testValues[1]);
+      assertEquals(params.getPort(), Integer.parseInt(testValues[2]));
+      assertEquals(params.getDbName(), testValues[3]);
+      assertEquals(params.getHiveConfs().get("hive.server2.transport.mode"), testValues[4]);
+      assertEquals(params.getHiveConfs().get("hive.server2.thrift.http.path"), testValues[5]);
+    }
   }
 
   private static void assertDpi(DriverPropertyInfo dpi, String name,

Modified: hive/branches/maven/metastore/if/hive_metastore.thrift
URL: http://svn.apache.org/viewvc/hive/branches/maven/metastore/if/hive_metastore.thrift?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/metastore/if/hive_metastore.thrift (original)
+++ hive/branches/maven/metastore/if/hive_metastore.thrift Fri Sep 27 17:41:42 2013
@@ -271,6 +271,21 @@ struct EnvironmentContext {
   1: map<string, string> properties
 }
 
+// Return type for get_partitions_by_expr
+struct PartitionsByExprResult {
+  1: required set<Partition> partitions,
+  // Whether the results has any (currently, all) partitions which may or may not match
+  2: required bool hasUnknownPartitions
+}
+
+struct PartitionsByExprRequest {
+  1: required string dbName,
+  2: required string tblName,
+  3: required binary expr,
+  4: optional string defaultPartitionName,
+  5: optional i16 maxParts=-1
+}
+
 exception MetaException {
   1: string message
 }
@@ -492,6 +507,12 @@ service ThriftHiveMetastore extends fb30
     3:string filter, 4:i16 max_parts=-1)
                        throws(1:MetaException o1, 2:NoSuchObjectException o2)
 
+  // get the partitions matching the given partition filter
+  // unlike get_partitions_by_filter, takes serialized hive expression, and with that can work
+  // with any filter (get_partitions_by_filter only works if the filter can be pushed down to JDOQL.
+  PartitionsByExprResult get_partitions_by_expr(1:PartitionsByExprRequest req)
+                       throws(1:MetaException o1, 2:NoSuchObjectException o2)
+
   // get partitions give a list of partition names
   list<Partition> get_partitions_by_names(1:string db_name 2:string tbl_name 3:list<string> names)
                        throws(1:MetaException o1, 2:NoSuchObjectException o2)

Modified: hive/branches/maven/metastore/scripts/upgrade/derby/014-HIVE-3764.derby.sql
URL: http://svn.apache.org/viewvc/hive/branches/maven/metastore/scripts/upgrade/derby/014-HIVE-3764.derby.sql?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/metastore/scripts/upgrade/derby/014-HIVE-3764.derby.sql (original)
+++ hive/branches/maven/metastore/scripts/upgrade/derby/014-HIVE-3764.derby.sql Fri Sep 27 17:41:42 2013
@@ -2,7 +2,7 @@
 -- Hive HIVE-3764
 -- Support metastore version consistency check
 
-CREATE TABLE "APP"."VERSION" ("VER_ID" BIGINT NOT NULL, "SCHEMA_VERSION" VARCHAR(127) NOT NULL, "COMMENT" VARCHAR(255));
+CREATE TABLE "APP"."VERSION" ("VER_ID" BIGINT NOT NULL, "SCHEMA_VERSION" VARCHAR(127) NOT NULL, "VERSION_COMMENT" VARCHAR(255));
 ALTER TABLE "APP"."VERSION" ADD CONSTRAINT "VERSION_PK" PRIMARY KEY ("VER_ID");
 
-INSERT INTO "APP"."VERSION" (VER_ID, SCHEMA_VERSION, COMMENT) VALUES (1, '', 'Initial value');
+INSERT INTO "APP"."VERSION" (VER_ID, SCHEMA_VERSION, VERSION_COMMENT) VALUES (1, '', 'Initial value');

Modified: hive/branches/maven/metastore/scripts/upgrade/mysql/014-HIVE-3764.mysql.sql
URL: http://svn.apache.org/viewvc/hive/branches/maven/metastore/scripts/upgrade/mysql/014-HIVE-3764.mysql.sql?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/metastore/scripts/upgrade/mysql/014-HIVE-3764.mysql.sql (original)
+++ hive/branches/maven/metastore/scripts/upgrade/mysql/014-HIVE-3764.mysql.sql Fri Sep 27 17:41:42 2013
@@ -2,8 +2,8 @@
 CREATE TABLE IF NOT EXISTS `VERSION` (
   `VER_ID` BIGINT NOT NULL,
   `SCHEMA_VERSION` VARCHAR(127) NOT NULL,
-  `COMMENT` VARCHAR(255),
+  `VERSION_COMMENT` VARCHAR(255),
   PRIMARY KEY (`VER_ID`)
 ) ENGINE=InnoDB DEFAULT CHARSET=latin1;
 
-INSERT INTO VERSION (VER_ID, SCHEMA_VERSION, COMMENT) VALUES (1, '', 'Initial value');
+INSERT INTO VERSION (VER_ID, SCHEMA_VERSION, VERSION_COMMENT) VALUES (1, '', 'Initial value');

Modified: hive/branches/maven/metastore/scripts/upgrade/oracle/014-HIVE-3764.oracle.sql
URL: http://svn.apache.org/viewvc/hive/branches/maven/metastore/scripts/upgrade/oracle/014-HIVE-3764.oracle.sql?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/metastore/scripts/upgrade/oracle/014-HIVE-3764.oracle.sql (original)
+++ hive/branches/maven/metastore/scripts/upgrade/oracle/014-HIVE-3764.oracle.sql Fri Sep 27 17:41:42 2013
@@ -3,8 +3,8 @@
 CREATE TABLE IF NOT EXISTS VERSION (
   VER_ID NUMBER NOT NULL,
   SCHEMA_VERSION VARCHAR(127) NOT NULL,
-  COMMENT VARCHAR(255)
+  VERSION_COMMENT VARCHAR(255)
 )
 ALTER TABLE VERSION ADD CONSTRAINT VERSION_PK PRIMARY KEY (VER_ID);
 
-INSERT INTO VERSION (VER_ID, SCHEMA_VERSION, COMMENT) VALUES (1, '', 'Initial value');
+INSERT INTO VERSION (VER_ID, SCHEMA_VERSION, VERSION_COMMENT) VALUES (1, '', 'Initial value');

Modified: hive/branches/maven/metastore/scripts/upgrade/postgres/014-HIVE-3764.postgres.sql
URL: http://svn.apache.org/viewvc/hive/branches/maven/metastore/scripts/upgrade/postgres/014-HIVE-3764.postgres.sql?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/metastore/scripts/upgrade/postgres/014-HIVE-3764.postgres.sql (original)
+++ hive/branches/maven/metastore/scripts/upgrade/postgres/014-HIVE-3764.postgres.sql Fri Sep 27 17:41:42 2013
@@ -4,9 +4,9 @@
 CREATE TABLE "VERSION" (
   "VER_ID" bigint,
   "SCHEMA_VERSION" character varying(127) NOT NULL,
-  "COMMENT" character varying(255) NOT NULL,
+  "VERSION_COMMENT" character varying(255) NOT NULL,
   PRIMARY KEY ("VER_ID")
 );
 ALTER TABLE ONLY "VERSION" ADD CONSTRAINT "VERSION_pkey" PRIMARY KEY ("VER_ID");
 
-INSERT INTO VERSION (VER_ID, SCHEMA_VERSION, COMMENT) VALUES (1, '', 'Initial value');
+INSERT INTO VERSION (VER_ID, SCHEMA_VERSION, VERSION_COMMENT) VALUES (1, '', 'Initial value');