You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jd...@apache.org on 2016/04/20 21:47:25 UTC
hive git commit: HIVE-13558: Update LlapDump
Repository: hive
Updated Branches:
refs/heads/llap 99cb7f96f -> 5816ff303
HIVE-13558: Update LlapDump
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/5816ff30
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/5816ff30
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/5816ff30
Branch: refs/heads/llap
Commit: 5816ff3038ca6b263d97e5b7e8f10f97f2b657ec
Parents: 99cb7f9
Author: Jason Dere <jd...@hortonworks.com>
Authored: Wed Apr 20 12:46:53 2016 -0700
Committer: Jason Dere <jd...@hortonworks.com>
Committed: Wed Apr 20 12:46:53 2016 -0700
----------------------------------------------------------------------
.../org/apache/hadoop/hive/llap/LlapDump.java | 60 +++++++++++++++++---
1 file changed, 51 insertions(+), 9 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/5816ff30/llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapDump.java
----------------------------------------------------------------------
diff --git a/llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapDump.java b/llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapDump.java
index ce419af..1c4397f 100644
--- a/llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapDump.java
+++ b/llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapDump.java
@@ -24,8 +24,11 @@ import java.io.IOException;
import java.io.FileInputStream;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
+import java.util.Map;
+import java.util.Properties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -49,10 +52,11 @@ import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.hive.llap.io.api.LlapProxy;
-import org.apache.hadoop.hive.llap.LlapBaseRecordReader;
-import org.apache.hadoop.hive.llap.Schema;
-
import org.apache.hadoop.hive.llap.LlapBaseInputFormat;
+import org.apache.hadoop.hive.llap.LlapRowInputFormat;
+import org.apache.hadoop.hive.llap.LlapRowRecordReader;
+import org.apache.hadoop.hive.llap.Row;
+import org.apache.hadoop.hive.llap.Schema;
public class LlapDump {
@@ -90,6 +94,8 @@ public class LlapDump {
numSplits = cli.getOptionValue("n");
}
+ Properties configProps = cli.getOptionProperties("hiveconf");
+
if (cli.getArgs().length > 0) {
query = cli.getArgs()[0];
}
@@ -98,8 +104,18 @@ public class LlapDump {
System.out.println("user: "+user);
System.out.println("query: "+query);
- LlapBaseInputFormat format = new LlapBaseInputFormat(url, user, pwd, query);
+ LlapRowInputFormat format = new LlapRowInputFormat();
+
JobConf job = new JobConf();
+ job.set(LlapBaseInputFormat.URL_KEY, url);
+ job.set(LlapBaseInputFormat.USER_KEY, user);
+ job.set(LlapBaseInputFormat.PWD_KEY, pwd);
+ job.set(LlapBaseInputFormat.QUERY_KEY, query);
+
+ // Additional conf settings specified on the command line
+ for (String key: configProps.stringPropertyNames()) {
+ job.set(key, configProps.getProperty(key));
+ }
InputSplit[] splits = format.getSplits(job, Integer.parseInt(numSplits));
@@ -111,10 +127,10 @@ public class LlapDump {
for (InputSplit s: splits) {
LOG.info("Processing input split s from " + Arrays.toString(s.getLocations()));
- RecordReader<NullWritable, Text> reader = format.getRecordReader(s, job, null);
+ RecordReader<NullWritable, Row> reader = format.getRecordReader(s, job, null);
- if (reader instanceof LlapBaseRecordReader && first) {
- Schema schema = ((LlapBaseRecordReader)reader).getSchema();
+ if (reader instanceof LlapRowRecordReader && first) {
+ Schema schema = ((LlapRowRecordReader)reader).getSchema();
System.out.println(""+schema);
}
@@ -124,15 +140,27 @@ public class LlapDump {
first = false;
}
- Text value = reader.createValue();
+ Row value = reader.createValue();
while (reader.next(NullWritable.get(), value)) {
- System.out.println(value);
+ printRow(value);
}
}
System.exit(0);
}
}
+ private static void printRow(Row row) {
+ Schema schema = row.getSchema();
+ StringBuilder sb = new StringBuilder();
+ for (int idx = 0; idx < schema.getColumns().size(); ++idx) {
+ if (idx > 0) {
+ sb.append(", ");
+ sb.append(row.getValue(idx));
+ }
+ }
+ System.out.println(sb.toString());
+ }
+
static Options createOptions() {
Options result = new Options();
@@ -160,6 +188,20 @@ public class LlapDump {
.hasArg()
.create('n'));
+ result.addOption(OptionBuilder
+ .withValueSeparator()
+ .hasArgs(2)
+ .withArgName("property=value")
+ .withLongOpt("hiveconf")
+ .withDescription("Use value for given property")
+ .create());
+
+ result.addOption(OptionBuilder
+ .withLongOpt("help")
+ .withDescription("help")
+ .hasArg(false)
+ .create('h'));
+
return result;
}
}