You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by vg...@apache.org on 2017/07/14 17:40:43 UTC
hive git commit: HIVE-4577: hive CLI can't handle hadoop dfs command
with space and quotes (Bing Li reviewed by Vaibhav Gumashta)
Repository: hive
Updated Branches:
refs/heads/master 4af462495 -> adca35a46
HIVE-4577: hive CLI can't handle hadoop dfs command with space and quotes (Bing Li reviewed by Vaibhav Gumashta)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/adca35a4
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/adca35a4
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/adca35a4
Branch: refs/heads/master
Commit: adca35a469e31f496a5001e88f265a9145bfbcdf
Parents: 4af4624
Author: Vaibhav Gumashta <vg...@hortonworks.com>
Authored: Fri Jul 14 10:40:20 2017 -0700
Committer: Vaibhav Gumashta <vg...@hortonworks.com>
Committed: Fri Jul 14 10:40:20 2017 -0700
----------------------------------------------------------------------
.../hadoop/hive/ql/processors/DfsProcessor.java | 60 +++++++++++++++++++-
ql/src/test/queries/clientpositive/dfscmd.q | 7 +++
ql/src/test/results/clientpositive/dfscmd.q.out | 1 +
.../results/clientpositive/perf/query14.q.out | 2 +-
4 files changed, 68 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/adca35a4/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
index 19f5bde..87a0c5a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.processors;
import java.io.PrintStream;
import java.util.Arrays;
+import java.util.ArrayList;
import java.util.Map;
import org.slf4j.Logger;
@@ -30,6 +31,7 @@ import org.apache.hadoop.hive.conf.HiveVariableSource;
import org.apache.hadoop.hive.conf.VariableSubstitution;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Schema;
+import org.apache.hadoop.hive.ql.CommandNeedRetryException;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
@@ -74,7 +76,7 @@ public class DfsProcessor implements CommandProcessor {
}
}).substitute(ss.getConf(), command);
- String[] tokens = command.split("\\s+");
+ String[] tokens = splitCmd(command);
CommandProcessorResponse authErrResp =
CommandUtil.authorizeCommand(ss, HiveOperationType.DFS, Arrays.asList(tokens));
if(authErrResp != null){
@@ -104,4 +106,60 @@ public class DfsProcessor implements CommandProcessor {
}
}
+ private String[] splitCmd(String command) throws CommandNeedRetryException {
+
+ ArrayList<String> paras = new ArrayList<String>();
+ int cmdLng = command.length();
+ char y = 0;
+ int start = 0;
+
+ for (int i = 0; i < cmdLng; i++) {
+ char x = command.charAt(i);
+
+ switch(x) {
+ case ' ':
+ if ((int) y == 0) {
+ String str = command.substring(start, i).trim();
+ if (!str.equals("")) {
+ paras.add(str);
+ start = i + 1;
+ }
+ }
+ break;
+ case '"':
+ if ((int) y == 0) {
+ y = x;
+ start = i + 1;
+ } else if ('"' == y) {
+ paras.add(command.substring(start, i).trim());
+ y = 0;
+ start = i + 1;
+ }
+ break;
+ case '\'':
+ if ((int) y == 0) {
+ y = x;
+ start = i + 1;
+ } else if ('\'' == y) {
+ paras.add(command.substring(start, i).trim());
+ y = 0;
+ start = i + 1;
+ }
+ break;
+ default:
+ if (i == cmdLng-1 && start < cmdLng) {
+ paras.add(command.substring(start, cmdLng).trim());
+ }
+ break;
+ }
+ }
+
+ if ((int) y != 0) {
+ console.printError("Syntax error on hadoop options: dfs " + command);
+ throw new CommandNeedRetryException();
+ }
+
+ return paras.toArray(new String[paras.size()]);
+ }
+
}
http://git-wip-us.apache.org/repos/asf/hive/blob/adca35a4/ql/src/test/queries/clientpositive/dfscmd.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/dfscmd.q b/ql/src/test/queries/clientpositive/dfscmd.q
new file mode 100644
index 0000000..0789336
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/dfscmd.q
@@ -0,0 +1,7 @@
+dfs -mkdir "hello";
+dfs -mkdir 'world';
+dfs -mkdir "bei jing";
+dfs -rmr 'hello';
+dfs -rmr "world";
+dfs -rmr 'bei jing';
+
http://git-wip-us.apache.org/repos/asf/hive/blob/adca35a4/ql/src/test/results/clientpositive/dfscmd.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/dfscmd.q.out b/ql/src/test/results/clientpositive/dfscmd.q.out
new file mode 100644
index 0000000..0b8182a
--- /dev/null
+++ b/ql/src/test/results/clientpositive/dfscmd.q.out
@@ -0,0 +1 @@
+#### A masked pattern was here ####
http://git-wip-us.apache.org/repos/asf/hive/blob/adca35a4/ql/src/test/results/clientpositive/perf/query14.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query14.q.out b/ql/src/test/results/clientpositive/perf/query14.q.out
index 37156ec..b15587c 100644
--- a/ql/src/test/results/clientpositive/perf/query14.q.out
+++ b/ql/src/test/results/clientpositive/perf/query14.q.out
@@ -1,6 +1,6 @@
-Warning: Shuffle Join MERGEJOIN[890][tables = [$hdt$_1, $hdt$_2, $hdt$_0]] in Stage 'Reducer 5' is a cross product
Warning: Shuffle Join MERGEJOIN[891][tables = [$hdt$_1, $hdt$_2, $hdt$_0]] in Stage 'Reducer 12' is a cross product
Warning: Shuffle Join MERGEJOIN[892][tables = [$hdt$_2, $hdt$_3, $hdt$_1]] in Stage 'Reducer 16' is a cross product
+Warning: Shuffle Join MERGEJOIN[890][tables = [$hdt$_1, $hdt$_2, $hdt$_0]] in Stage 'Reducer 5' is a cross product
PREHOOK: query: explain
with cross_items as
(select i_item_sk ss_item_sk