You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by se...@apache.org on 2016/01/15 00:38:25 UTC

[1/2] hive git commit: HIVE-12794 : LLAP cannot run queries against HBase due to missing HBase jars (Sergey Shelukhin, reviewed by Gunther Hagleitner)

Repository: hive
Updated Branches:
  refs/heads/branch-2.0 295ad94f9 -> 2f4b68492
  refs/heads/master fe5cd560c -> 4ac36c4f0


HIVE-12794 : LLAP cannot run queries against HBase due to missing HBase jars (Sergey Shelukhin, reviewed by Gunther Hagleitner)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4ac36c4f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4ac36c4f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4ac36c4f

Branch: refs/heads/master
Commit: 4ac36c4f0b7841ecb43a78a8dba4ed6320a0c4d3
Parents: fe5cd56
Author: Sergey Shelukhin <se...@apache.org>
Authored: Thu Jan 14 15:34:28 2016 -0800
Committer: Sergey Shelukhin <se...@apache.org>
Committed: Thu Jan 14 15:34:28 2016 -0800

----------------------------------------------------------------------
 llap-server/pom.xml                             | 36 +++++++++++
 .../hive/llap/cli/LlapOptionsProcessor.java     | 19 ++++--
 .../hadoop/hive/llap/cli/LlapServiceDriver.java | 68 +++++++++++++++-----
 llap-server/src/main/resources/package.py       | 54 +++++++---------
 4 files changed, 125 insertions(+), 52 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/4ac36c4f/llap-server/pom.xml
----------------------------------------------------------------------
diff --git a/llap-server/pom.xml b/llap-server/pom.xml
index 2628782..916fb5c 100644
--- a/llap-server/pom.xml
+++ b/llap-server/pom.xml
@@ -210,6 +210,42 @@
       <version>${jersey.version}</version>
       <scope>test</scope>
     </dependency>
+<!-- HBase dependencies to call the API to localize the JARs -->
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop2-compat</artifactId>
+      <version>${hbase.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-client</artifactId>
+      <version>${hbase.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <version>${hbase.version}</version>
+          <exclusions>
+             <exclusion>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-log4j12</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>commmons-logging</groupId>
+            <artifactId>commons-logging</artifactId>
+          </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+      <version>${hbase.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop-compat</artifactId>
+      <version>${hbase.version}</version>
+    </dependency>
   </dependencies>
 
   <profiles>

http://git-wip-us.apache.org/repos/asf/hive/blob/4ac36c4f/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java
index 58ef472..6d25384 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java
@@ -45,10 +45,12 @@ public class LlapOptionsProcessor {
     private final long size;
     private final long xmx;
     private final String jars;
+    private final boolean isHbase;
     private final Properties conf;
 
     public LlapOptions(String name, int instances, String directory, int executors, long cache,
-        long size, long xmx, String jars, @Nonnull Properties hiveconf) throws ParseException {
+        long size, long xmx, String jars, boolean isHbase, @Nonnull Properties hiveconf)
+            throws ParseException {
       if (instances <= 0) {
         throw new ParseException("Invalid configuration: " + instances
             + " (should be greater than 0)");
@@ -61,6 +63,7 @@ public class LlapOptionsProcessor {
       this.size = size;
       this.xmx = xmx;
       this.jars = jars;
+      this.isHbase = isHbase;
       this.conf = hiveconf;
     }
 
@@ -96,6 +99,10 @@ public class LlapOptionsProcessor {
       return jars;
     }
 
+    public boolean getIsHBase() {
+      return isHbase;
+    }
+
     public Properties getConfig() {
       return conf;
     }
@@ -141,8 +148,11 @@ public class LlapOptionsProcessor {
         .withDescription("working memory size").create('w'));
 
     options.addOption(OptionBuilder.hasArg().withArgName("auxjars").withLongOpt("auxjars")
-        .withDescription("additional jars to package (by default, JSON and HBase SerDe jars"
-            + " are packaged if available)").create('j'));
+        .withDescription("additional jars to package (by default, JSON SerDe jar is packaged"
+            + " if available)").create('j'));
+
+    options.addOption(OptionBuilder.hasArg().withArgName("auxhbase").withLongOpt("auxhbase")
+        .withDescription("whether to package the HBase jars (true by default)").create('h'));
 
     // -hiveconf x=y
     options.addOption(OptionBuilder.withValueSeparator().hasArgs(2).withArgName("property=value")
@@ -174,6 +184,7 @@ public class LlapOptionsProcessor {
     final long cache = parseSuffixed(commandLine.getOptionValue("cache", "-1"));
     final long size = parseSuffixed(commandLine.getOptionValue("size", "-1"));
     final long xmx = parseSuffixed(commandLine.getOptionValue("xmx", "-1"));
+    final boolean isHbase = Boolean.parseBoolean(commandLine.getOptionValue("auxhbase", "true"));
 
     final Properties hiveconf;
 
@@ -186,7 +197,7 @@ public class LlapOptionsProcessor {
     // loglevel, chaosmonkey & args are parsed by the python processor
 
     return new LlapOptions(
-        name, instances, directory, executors, cache, size, xmx, jars, hiveconf);
+        name, instances, directory, executors, cache, size, xmx, jars, isHbase, hiveconf);
 
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/4ac36c4f/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
index 0d54558..d6e1a6e 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
@@ -23,6 +23,7 @@ import java.io.OutputStreamWriter;
 import java.net.URL;
 import java.util.Map;
 import java.util.Map.Entry;
+import java.util.Collection;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -30,6 +31,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
 import org.apache.hadoop.hive.common.CompressionUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
@@ -38,6 +40,8 @@ import org.apache.hadoop.hive.llap.io.api.impl.LlapInputFormat;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.io.HiveInputFormat;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.json.JSONObject;
 
@@ -47,7 +51,8 @@ public class LlapServiceDriver {
 
   protected static final Logger LOG = LoggerFactory.getLogger(LlapServiceDriver.class.getName());
   private static final String[] DEFAULT_AUX_CLASSES = new String[] {
-    "org.apache.hive.hcatalog.data.JsonSerDe", "org.apache.hadoop.hive.hbase.HBaseSerDe" };
+  "org.apache.hive.hcatalog.data.JsonSerDe" };
+  private static final String HBASE_SERDE_CLASS = "org.apache.hadoop.hive.hbase.HBaseSerDe";
   private static final String[] NEEDED_CONFIGS = {
     "tez-site.xml", "hive-site.xml", "llap-daemon-site.xml", "core-site.xml" };
   private static final String[] OPTIONAL_CONFIGS = { "ssl-server.xml" };
@@ -217,25 +222,25 @@ public class LlapServiceDriver {
     // copy default aux classes (json/hbase)
 
     for (String className : DEFAULT_AUX_CLASSES) {
-      String jarPath = null;
-      boolean hasException = false;
+      localizeJarForClass(lfs, libDir, className, false);
+    }
+
+    if (options.getIsHBase()) {
       try {
-        Class<?> auxClass = Class.forName(className);
-        jarPath = Utilities.jarFinderGetJar(auxClass);
+        localizeJarForClass(lfs, libDir, HBASE_SERDE_CLASS, true);
+        Job fakeJob = new Job(new JobConf()); // HBase API is convoluted.
+        TableMapReduceUtil.addDependencyJars(fakeJob);
+        Collection<String> hbaseJars = fakeJob.getConfiguration().getStringCollection("tmpjars");
+        for (String jarPath : hbaseJars) {
+          if (!jarPath.isEmpty()) {
+            lfs.copyFromLocalFile(new Path(jarPath), libDir);
+          }
+        }
       } catch (Throwable t) {
-        hasException = true;
-        String err =
-            "Cannot find a jar for [" + className + "] due to an exception (" + t.getMessage()
-                + "); not packaging the jar";
-        LOG.error(err, t);
-        System.err.println(err);
-      }
-      if (jarPath != null) {
-        lfs.copyFromLocalFile(new Path(jarPath), libDir);
-      } else if (!hasException) {
-        String err = "Cannot find a jar for [" + className + "]; not packaging the jar";
+        String err = "Failed to add HBase jars. Use --auxhbase=false to avoid localizing them";
         LOG.error(err);
         System.err.println(err);
+        throw new RuntimeException(t);
       }
     }
 
@@ -307,6 +312,37 @@ public class LlapServiceDriver {
     }
   }
 
+// TODO#: assumes throw
+  private void localizeJarForClass(FileSystem lfs, Path libDir, String className, boolean doThrow)
+      throws IOException {
+    String jarPath = null;
+    boolean hasException = false;
+    try {
+      Class<?> auxClass = Class.forName(className);
+      jarPath = Utilities.jarFinderGetJar(auxClass);
+    } catch (Throwable t) {
+      if (doThrow) {
+        throw (t instanceof IOException) ? (IOException)t : new IOException(t);
+      }
+      hasException = true;
+      String err =
+          "Cannot find a jar for [" + className + "] due to an exception (" + t.getMessage()
+              + "); not packaging the jar";
+      LOG.error(err, t);
+      System.err.println(err);
+    }
+    if (jarPath != null) {
+      lfs.copyFromLocalFile(new Path(jarPath), libDir);
+    } else if (!hasException) {
+      String err = "Cannot find a jar for [" + className + "]; not packaging the jar";
+      if (doThrow) {
+        throw new IOException(err);
+      }
+      LOG.error(err);
+      System.err.println(err);
+    }
+  }
+
   private void copyConfig(
       LlapOptions options, FileSystem lfs, Path confPath, String f) throws IOException {
     if (f.equals("llap-daemon-site.xml")) {

http://git-wip-us.apache.org/repos/asf/hive/blob/4ac36c4f/llap-server/src/main/resources/package.py
----------------------------------------------------------------------
diff --git a/llap-server/src/main/resources/package.py b/llap-server/src/main/resources/package.py
index d6e762e..5620483 100644
--- a/llap-server/src/main/resources/package.py
+++ b/llap-server/src/main/resources/package.py
@@ -1,7 +1,7 @@
 #!/usr/bin/python
 
 import sys,os,stat
-from getopt import getopt
+import argparse
 from json import loads as json_parse
 from os.path import exists, join, relpath
 from time import gmtime, strftime
@@ -17,8 +17,8 @@ class LlapResource(object):
 		self.cores = config["hive.llap.daemon.vcpus.per.instance"]
 		size = config["hive.llap.daemon.yarn.container.mb"]
 		# convert to Mb
-		self.cache = config["hive.llap.io.cache.orc.size"] / (1024*1024.0)
-		self.direct = config["hive.llap.io.cache.direct"]
+		self.cache = config["hive.llap.io.memory.size"] / (1024*1024.0)
+		self.direct = config["hive.llap.io.allocator.direct"]
 		self.min_mb = -1
 		self.min_cores = -1
 		# compute heap + cache as final Xmx
@@ -48,57 +48,47 @@ def zipdir(path, zip, prefix="."):
 			zip.write(src, dst)
 	
 def main(args):
-	opts, args = getopt(args,"",["instances=","output=", "input=","args=","name=","loglevel=","chaosmonkey=","size=","xmx=", "cache=", "executors=","hiveconf="])
 	version = os.getenv("HIVE_VERSION")
 	if not version:
 		version = strftime("%d%b%Y", gmtime()) 
 	home = os.getenv("HIVE_HOME")
 	output = "llap-slider-%(version)s" % ({"version": version})
-	instances=1
-	name = "llap0"
-	d_args = ""
-	d_loglevel = "INFO"
-	input = None
-	monkey = "0"
-	for k,v in opts:
-		if k in ("--input"):
-			input = v
-		elif k in ("--output"):
-			output = v
-		elif k in ("--instances"):
-			instances = int(v)
-		elif k in ("--name"):
-			name = v 
-		elif k in ("--args"):
-			d_args = v
-		elif k in ("--loglevel"):
-			d_loglevel = v
-		elif k in ("--chaosmonkey"):
-			monkey = v
+	parser = argparse.ArgumentParser()
+	parser.add_argument("--instances", type=int, default=1)
+	parser.add_argument("--output", default=output)
+	parser.add_argument("--input", required=True)
+	parser.add_argument("--args", default="")
+	parser.add_argument("--name", default="llap0")
+	parser.add_argument("--loglevel", default="INFO")
+	parser.add_argument("--chaosmonkey", type=int, default="0")
+	# Unneeded here for now: parser.add_argument("--hiveconf", action='append')
+	#parser.add_argument("--size") parser.add_argument("--xmx") parser.add_argument("--cache") parser.add_argument("--executors")
+	(args, unknown_args) = parser.parse_known_args(args)
+	input = args.input
+	output = args.output
 	if not input:
 		print "Cannot find input files"
 		sys.exit(1)
 		return
 	config = json_parse(open(join(input, "config.json")).read())
 	resource = LlapResource(config)
-	monkey_interval = int(monkey) 
 	# 5% container failure every monkey_interval seconds
 	monkey_percentage = 5 # 5%
 	vars = {
 		"home" : home,
 		"version" : version,
-		"instances" : instances,
+		"instances" : args.instances,
 		"heap" : resource.heap_size,
 		"container.mb" : resource.container_size,
 		"container.cores" : resource.container_cores,
 		"hadoop_home" : os.getenv("HADOOP_HOME"),
 		"java_home" : os.getenv("JAVA_HOME"),
-		"name" : name,
-		"daemon_args" : d_args,
-		"daemon_loglevel" : d_loglevel,
-		"monkey_interval" : monkey_interval,
+		"name" : args.name,
+		"daemon_args" : args.args,
+		"daemon_loglevel" : args.loglevel,
+		"monkey_interval" : args.chaosmonkey,
 		"monkey_percentage" : monkey_percentage,
-		"monkey_enabled" : monkey_interval > 0
+		"monkey_enabled" : args.chaosmonkey > 0
 	}
 	
 	if not exists(output):


[2/2] hive git commit: HIVE-12794 : LLAP cannot run queries against HBase due to missing HBase jars (Sergey Shelukhin, reviewed by Gunther Hagleitner)

Posted by se...@apache.org.
HIVE-12794 : LLAP cannot run queries against HBase due to missing HBase jars (Sergey Shelukhin, reviewed by Gunther Hagleitner)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/2f4b6849
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/2f4b6849
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/2f4b6849

Branch: refs/heads/branch-2.0
Commit: 2f4b6849292b51841e747836ac86ceafd8306e64
Parents: 295ad94
Author: Sergey Shelukhin <se...@apache.org>
Authored: Thu Jan 14 15:34:28 2016 -0800
Committer: Sergey Shelukhin <se...@apache.org>
Committed: Thu Jan 14 15:34:38 2016 -0800

----------------------------------------------------------------------
 llap-server/pom.xml                             | 36 +++++++++++
 .../hive/llap/cli/LlapOptionsProcessor.java     | 19 ++++--
 .../hadoop/hive/llap/cli/LlapServiceDriver.java | 68 +++++++++++++++-----
 llap-server/src/main/resources/package.py       | 54 +++++++---------
 4 files changed, 125 insertions(+), 52 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/2f4b6849/llap-server/pom.xml
----------------------------------------------------------------------
diff --git a/llap-server/pom.xml b/llap-server/pom.xml
index ab4f5a4..ab25f66 100644
--- a/llap-server/pom.xml
+++ b/llap-server/pom.xml
@@ -210,6 +210,42 @@
       <version>${jersey.version}</version>
       <scope>test</scope>
     </dependency>
+<!-- HBase dependencies to call the API to localize the JARs -->
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop2-compat</artifactId>
+      <version>${hbase.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-client</artifactId>
+      <version>${hbase.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <version>${hbase.version}</version>
+          <exclusions>
+             <exclusion>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-log4j12</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>commmons-logging</groupId>
+            <artifactId>commons-logging</artifactId>
+          </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+      <version>${hbase.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop-compat</artifactId>
+      <version>${hbase.version}</version>
+    </dependency>
   </dependencies>
 
   <profiles>

http://git-wip-us.apache.org/repos/asf/hive/blob/2f4b6849/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java
index 58ef472..6d25384 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java
@@ -45,10 +45,12 @@ public class LlapOptionsProcessor {
     private final long size;
     private final long xmx;
     private final String jars;
+    private final boolean isHbase;
     private final Properties conf;
 
     public LlapOptions(String name, int instances, String directory, int executors, long cache,
-        long size, long xmx, String jars, @Nonnull Properties hiveconf) throws ParseException {
+        long size, long xmx, String jars, boolean isHbase, @Nonnull Properties hiveconf)
+            throws ParseException {
       if (instances <= 0) {
         throw new ParseException("Invalid configuration: " + instances
             + " (should be greater than 0)");
@@ -61,6 +63,7 @@ public class LlapOptionsProcessor {
       this.size = size;
       this.xmx = xmx;
       this.jars = jars;
+      this.isHbase = isHbase;
       this.conf = hiveconf;
     }
 
@@ -96,6 +99,10 @@ public class LlapOptionsProcessor {
       return jars;
     }
 
+    public boolean getIsHBase() {
+      return isHbase;
+    }
+
     public Properties getConfig() {
       return conf;
     }
@@ -141,8 +148,11 @@ public class LlapOptionsProcessor {
         .withDescription("working memory size").create('w'));
 
     options.addOption(OptionBuilder.hasArg().withArgName("auxjars").withLongOpt("auxjars")
-        .withDescription("additional jars to package (by default, JSON and HBase SerDe jars"
-            + " are packaged if available)").create('j'));
+        .withDescription("additional jars to package (by default, JSON SerDe jar is packaged"
+            + " if available)").create('j'));
+
+    options.addOption(OptionBuilder.hasArg().withArgName("auxhbase").withLongOpt("auxhbase")
+        .withDescription("whether to package the HBase jars (true by default)").create('h'));
 
     // -hiveconf x=y
     options.addOption(OptionBuilder.withValueSeparator().hasArgs(2).withArgName("property=value")
@@ -174,6 +184,7 @@ public class LlapOptionsProcessor {
     final long cache = parseSuffixed(commandLine.getOptionValue("cache", "-1"));
     final long size = parseSuffixed(commandLine.getOptionValue("size", "-1"));
     final long xmx = parseSuffixed(commandLine.getOptionValue("xmx", "-1"));
+    final boolean isHbase = Boolean.parseBoolean(commandLine.getOptionValue("auxhbase", "true"));
 
     final Properties hiveconf;
 
@@ -186,7 +197,7 @@ public class LlapOptionsProcessor {
     // loglevel, chaosmonkey & args are parsed by the python processor
 
     return new LlapOptions(
-        name, instances, directory, executors, cache, size, xmx, jars, hiveconf);
+        name, instances, directory, executors, cache, size, xmx, jars, isHbase, hiveconf);
 
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/2f4b6849/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
index 0d54558..d6e1a6e 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
@@ -23,6 +23,7 @@ import java.io.OutputStreamWriter;
 import java.net.URL;
 import java.util.Map;
 import java.util.Map.Entry;
+import java.util.Collection;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -30,6 +31,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
 import org.apache.hadoop.hive.common.CompressionUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
@@ -38,6 +40,8 @@ import org.apache.hadoop.hive.llap.io.api.impl.LlapInputFormat;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.io.HiveInputFormat;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.json.JSONObject;
 
@@ -47,7 +51,8 @@ public class LlapServiceDriver {
 
   protected static final Logger LOG = LoggerFactory.getLogger(LlapServiceDriver.class.getName());
   private static final String[] DEFAULT_AUX_CLASSES = new String[] {
-    "org.apache.hive.hcatalog.data.JsonSerDe", "org.apache.hadoop.hive.hbase.HBaseSerDe" };
+  "org.apache.hive.hcatalog.data.JsonSerDe" };
+  private static final String HBASE_SERDE_CLASS = "org.apache.hadoop.hive.hbase.HBaseSerDe";
   private static final String[] NEEDED_CONFIGS = {
     "tez-site.xml", "hive-site.xml", "llap-daemon-site.xml", "core-site.xml" };
   private static final String[] OPTIONAL_CONFIGS = { "ssl-server.xml" };
@@ -217,25 +222,25 @@ public class LlapServiceDriver {
     // copy default aux classes (json/hbase)
 
     for (String className : DEFAULT_AUX_CLASSES) {
-      String jarPath = null;
-      boolean hasException = false;
+      localizeJarForClass(lfs, libDir, className, false);
+    }
+
+    if (options.getIsHBase()) {
       try {
-        Class<?> auxClass = Class.forName(className);
-        jarPath = Utilities.jarFinderGetJar(auxClass);
+        localizeJarForClass(lfs, libDir, HBASE_SERDE_CLASS, true);
+        Job fakeJob = new Job(new JobConf()); // HBase API is convoluted.
+        TableMapReduceUtil.addDependencyJars(fakeJob);
+        Collection<String> hbaseJars = fakeJob.getConfiguration().getStringCollection("tmpjars");
+        for (String jarPath : hbaseJars) {
+          if (!jarPath.isEmpty()) {
+            lfs.copyFromLocalFile(new Path(jarPath), libDir);
+          }
+        }
       } catch (Throwable t) {
-        hasException = true;
-        String err =
-            "Cannot find a jar for [" + className + "] due to an exception (" + t.getMessage()
-                + "); not packaging the jar";
-        LOG.error(err, t);
-        System.err.println(err);
-      }
-      if (jarPath != null) {
-        lfs.copyFromLocalFile(new Path(jarPath), libDir);
-      } else if (!hasException) {
-        String err = "Cannot find a jar for [" + className + "]; not packaging the jar";
+        String err = "Failed to add HBase jars. Use --auxhbase=false to avoid localizing them";
         LOG.error(err);
         System.err.println(err);
+        throw new RuntimeException(t);
       }
     }
 
@@ -307,6 +312,37 @@ public class LlapServiceDriver {
     }
   }
 
+// TODO#: assumes throw
+  private void localizeJarForClass(FileSystem lfs, Path libDir, String className, boolean doThrow)
+      throws IOException {
+    String jarPath = null;
+    boolean hasException = false;
+    try {
+      Class<?> auxClass = Class.forName(className);
+      jarPath = Utilities.jarFinderGetJar(auxClass);
+    } catch (Throwable t) {
+      if (doThrow) {
+        throw (t instanceof IOException) ? (IOException)t : new IOException(t);
+      }
+      hasException = true;
+      String err =
+          "Cannot find a jar for [" + className + "] due to an exception (" + t.getMessage()
+              + "); not packaging the jar";
+      LOG.error(err, t);
+      System.err.println(err);
+    }
+    if (jarPath != null) {
+      lfs.copyFromLocalFile(new Path(jarPath), libDir);
+    } else if (!hasException) {
+      String err = "Cannot find a jar for [" + className + "]; not packaging the jar";
+      if (doThrow) {
+        throw new IOException(err);
+      }
+      LOG.error(err);
+      System.err.println(err);
+    }
+  }
+
   private void copyConfig(
       LlapOptions options, FileSystem lfs, Path confPath, String f) throws IOException {
     if (f.equals("llap-daemon-site.xml")) {

http://git-wip-us.apache.org/repos/asf/hive/blob/2f4b6849/llap-server/src/main/resources/package.py
----------------------------------------------------------------------
diff --git a/llap-server/src/main/resources/package.py b/llap-server/src/main/resources/package.py
index d6e762e..5620483 100644
--- a/llap-server/src/main/resources/package.py
+++ b/llap-server/src/main/resources/package.py
@@ -1,7 +1,7 @@
 #!/usr/bin/python
 
 import sys,os,stat
-from getopt import getopt
+import argparse
 from json import loads as json_parse
 from os.path import exists, join, relpath
 from time import gmtime, strftime
@@ -17,8 +17,8 @@ class LlapResource(object):
 		self.cores = config["hive.llap.daemon.vcpus.per.instance"]
 		size = config["hive.llap.daemon.yarn.container.mb"]
 		# convert to Mb
-		self.cache = config["hive.llap.io.cache.orc.size"] / (1024*1024.0)
-		self.direct = config["hive.llap.io.cache.direct"]
+		self.cache = config["hive.llap.io.memory.size"] / (1024*1024.0)
+		self.direct = config["hive.llap.io.allocator.direct"]
 		self.min_mb = -1
 		self.min_cores = -1
 		# compute heap + cache as final Xmx
@@ -48,57 +48,47 @@ def zipdir(path, zip, prefix="."):
 			zip.write(src, dst)
 	
 def main(args):
-	opts, args = getopt(args,"",["instances=","output=", "input=","args=","name=","loglevel=","chaosmonkey=","size=","xmx=", "cache=", "executors=","hiveconf="])
 	version = os.getenv("HIVE_VERSION")
 	if not version:
 		version = strftime("%d%b%Y", gmtime()) 
 	home = os.getenv("HIVE_HOME")
 	output = "llap-slider-%(version)s" % ({"version": version})
-	instances=1
-	name = "llap0"
-	d_args = ""
-	d_loglevel = "INFO"
-	input = None
-	monkey = "0"
-	for k,v in opts:
-		if k in ("--input"):
-			input = v
-		elif k in ("--output"):
-			output = v
-		elif k in ("--instances"):
-			instances = int(v)
-		elif k in ("--name"):
-			name = v 
-		elif k in ("--args"):
-			d_args = v
-		elif k in ("--loglevel"):
-			d_loglevel = v
-		elif k in ("--chaosmonkey"):
-			monkey = v
+	parser = argparse.ArgumentParser()
+	parser.add_argument("--instances", type=int, default=1)
+	parser.add_argument("--output", default=output)
+	parser.add_argument("--input", required=True)
+	parser.add_argument("--args", default="")
+	parser.add_argument("--name", default="llap0")
+	parser.add_argument("--loglevel", default="INFO")
+	parser.add_argument("--chaosmonkey", type=int, default="0")
+	# Unneeded here for now: parser.add_argument("--hiveconf", action='append')
+	#parser.add_argument("--size") parser.add_argument("--xmx") parser.add_argument("--cache") parser.add_argument("--executors")
+	(args, unknown_args) = parser.parse_known_args(args)
+	input = args.input
+	output = args.output
 	if not input:
 		print "Cannot find input files"
 		sys.exit(1)
 		return
 	config = json_parse(open(join(input, "config.json")).read())
 	resource = LlapResource(config)
-	monkey_interval = int(monkey) 
 	# 5% container failure every monkey_interval seconds
 	monkey_percentage = 5 # 5%
 	vars = {
 		"home" : home,
 		"version" : version,
-		"instances" : instances,
+		"instances" : args.instances,
 		"heap" : resource.heap_size,
 		"container.mb" : resource.container_size,
 		"container.cores" : resource.container_cores,
 		"hadoop_home" : os.getenv("HADOOP_HOME"),
 		"java_home" : os.getenv("JAVA_HOME"),
-		"name" : name,
-		"daemon_args" : d_args,
-		"daemon_loglevel" : d_loglevel,
-		"monkey_interval" : monkey_interval,
+		"name" : args.name,
+		"daemon_args" : args.args,
+		"daemon_loglevel" : args.loglevel,
+		"monkey_interval" : args.chaosmonkey,
 		"monkey_percentage" : monkey_percentage,
-		"monkey_enabled" : monkey_interval > 0
+		"monkey_enabled" : args.chaosmonkey > 0
 	}
 	
 	if not exists(output):