You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ma...@apache.org on 2017/08/02 18:25:22 UTC
hadoop git commit: HADOOP-13595. Rework hadoop_usage to be broken up
by clients/daemons/etc. Contributed by Allen Wittenauer.
Repository: hadoop
Updated Branches:
refs/heads/trunk 8ce8672b6 -> 1a1bf6b7d
HADOOP-13595. Rework hadoop_usage to be broken up by clients/daemons/etc. Contributed by Allen Wittenauer.
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1a1bf6b7
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1a1bf6b7
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1a1bf6b7
Branch: refs/heads/trunk
Commit: 1a1bf6b7d044929bc9d6a4763780d916b00ccf5a
Parents: 8ce8672
Author: Sean Mackrory <ma...@apache.org>
Authored: Wed Aug 2 11:17:40 2017 -0600
Committer: Sean Mackrory <ma...@apache.org>
Committed: Wed Aug 2 12:25:05 2017 -0600
----------------------------------------------------------------------
.../hadoop-common/src/main/bin/hadoop | 28 ++--
.../src/main/bin/hadoop-functions.sh | 155 +++++++++++++++++--
.../src/site/markdown/UnixShellGuide.md | 4 +-
.../test/scripts/hadoop_add_array_param.bats | 37 +++++
.../src/test/scripts/hadoop_array_contains.bats | 47 ++++++
.../src/test/scripts/hadoop_sort_array.bats | 37 +++++
.../main/libexec/shellprofile.d/hadoop-kms.sh | 4 +-
.../libexec/shellprofile.d/hadoop-httpfs.sh | 2 +-
.../hadoop-hdfs/src/main/bin/hdfs | 62 ++++----
hadoop-mapreduce-project/bin/mapred | 18 +--
.../main/shellprofile.d/hadoop-archive-logs.sh | 2 +-
.../src/main/shellprofile.d/hadoop-archives.sh | 4 +-
.../src/main/shellprofile.d/hadoop-distcp.sh | 4 +-
.../src/main/shellprofile.d/hadoop-extras.sh | 2 +-
.../src/main/shellprofile.d/hadoop-gridmix.sh | 2 +-
.../src/main/shellprofile.d/hadoop-rumen.sh | 4 +-
.../src/main/shellprofile.d/hadoop-streaming.sh | 2 +-
hadoop-yarn-project/hadoop-yarn/bin/yarn | 44 +++---
18 files changed, 352 insertions(+), 106 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a1bf6b7/hadoop-common-project/hadoop-common/src/main/bin/hadoop
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
index 70f66a5..3834600 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
@@ -30,20 +30,20 @@ function hadoop_usage
hadoop_add_option "hosts filename" "list of hosts to use in slave mode"
hadoop_add_option "workers" "turn on worker mode"
- hadoop_add_subcommand "checknative" "check native Hadoop and compression libraries availability"
- hadoop_add_subcommand "classpath" "prints the class path needed to get the Hadoop jar and the required libraries"
- hadoop_add_subcommand "conftest" "validate configuration XML files"
- hadoop_add_subcommand "credential" "interact with credential providers"
- hadoop_add_subcommand "daemonlog" "get/set the log level for each daemon"
- hadoop_add_subcommand "dtutil" "operations related to delegation tokens"
- hadoop_add_subcommand "envvars" "display computed Hadoop environment variables"
- hadoop_add_subcommand "fs" "run a generic filesystem user client"
- hadoop_add_subcommand "jar <jar>" "run a jar file. NOTE: please use \"yarn jar\" to launch YARN applications, not this command."
- hadoop_add_subcommand "jnipath" "prints the java.library.path"
- hadoop_add_subcommand "kerbname" "show auth_to_local principal conversion"
- hadoop_add_subcommand "key" "manage keys via the KeyProvider"
- hadoop_add_subcommand "trace" "view and modify Hadoop tracing settings"
- hadoop_add_subcommand "version" "print the version"
+ hadoop_add_subcommand "checknative" client "check native Hadoop and compression libraries availability"
+ hadoop_add_subcommand "classpath" client "prints the class path needed to get the Hadoop jar and the required libraries"
+ hadoop_add_subcommand "conftest" client "validate configuration XML files"
+ hadoop_add_subcommand "credential" client "interact with credential providers"
+ hadoop_add_subcommand "daemonlog" admin "get/set the log level for each daemon"
+ hadoop_add_subcommand "dtutil" client "operations related to delegation tokens"
+ hadoop_add_subcommand "envvars" client "display computed Hadoop environment variables"
+ hadoop_add_subcommand "fs" client "run a generic filesystem user client"
+ hadoop_add_subcommand "jar <jar>" client "run a jar file. NOTE: please use \"yarn jar\" to launch YARN applications, not this command."
+ hadoop_add_subcommand "jnipath" client "prints the java.library.path"
+ hadoop_add_subcommand "kerbname" client "show auth_to_local principal conversion"
+ hadoop_add_subcommand "key" client "manage keys via the KeyProvider"
+ hadoop_add_subcommand "trace" client "view and modify Hadoop tracing settings"
+ hadoop_add_subcommand "version" client "print the version"
hadoop_generate_usage "${HADOOP_SHELL_EXECNAME}" true
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a1bf6b7/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
index 2744643..3cf21cf 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
@@ -18,6 +18,7 @@
# be done outside of a function
declare -a HADOOP_SUBCMD_USAGE
declare -a HADOOP_OPTION_USAGE
+declare -a HADOOP_SUBCMD_USAGE_TYPES
## @description Print a message to stderr
## @audience public
@@ -115,6 +116,89 @@ function hadoop_verify_entry
[[ ${!1} =~ \ ${2}\ ]]
}
+## @description Check if an array has a given value
+## @audience public
+## @stability stable
+## @replaceable yes
+## @param element
+## @param array
+## @returns 0 = yes
+## @returns 1 = no
+function hadoop_array_contains
+{
+ declare element=$1
+ shift
+ declare val
+
+ if [[ "$#" -eq 0 ]]; then
+ return 1
+ fi
+
+ for val in "${@}"; do
+ if [[ "${val}" == "${element}" ]]; then
+ return 0
+ fi
+ done
+ return 1
+}
+
+## @description Add the `appendstring` if `checkstring` is not
+## @description present in the given array
+## @audience public
+## @stability stable
+## @replaceable yes
+## @param envvar
+## @param appendstring
+function hadoop_add_array_param
+{
+ declare arrname=$1
+ declare add=$2
+
+ declare arrref="${arrname}[@]"
+ declare array=("${!arrref}")
+
+ if ! hadoop_array_contains "${add}" "${array[@]}"; then
+ #shellcheck disable=SC1083,SC2086
+ eval ${arrname}=\(\"\${array[@]}\" \"${add}\" \)
+ hadoop_debug "$1 accepted $2"
+ else
+ hadoop_debug "$1 declined $2"
+ fi
+}
+
+## @description Sort an array (must not contain regexps)
+## @description present in the given array
+## @audience public
+## @stability stable
+## @replaceable yes
+## @param arrayvar
+function hadoop_sort_array
+{
+ declare arrname=$1
+ declare arrref="${arrname}[@]"
+ declare array=("${!arrref}")
+ declare oifs
+
+ declare globstatus
+ declare -a sa
+
+ globstatus=$(set -o | grep noglob | awk '{print $NF}')
+
+ set -f
+ oifs=${IFS}
+
+ # shellcheck disable=SC2034
+ IFS=$'\n' sa=($(sort <<<"${array[*]}"))
+
+ # shellcheck disable=SC1083
+ eval "${arrname}"=\(\"\${sa[@]}\"\)
+
+ IFS=${oifs}
+ if [[ "${globstatus}" = off ]]; then
+ set +f
+ fi
+}
+
## @description Check if we are running with priv
## @description by default, this implementation looks for
## @description EUID=0. For OSes that have true priv
@@ -220,13 +304,20 @@ function hadoop_uservar_su
## @stability evolving
## @replaceable no
## @param subcommand
+## @param subcommandtype
## @param subcommanddesc
function hadoop_add_subcommand
{
- local subcmd=$1
- local text=$2
+ declare subcmd=$1
+ declare subtype=$2
+ declare text=$3
+
+ hadoop_debug "${subcmd} as a ${subtype}"
- HADOOP_SUBCMD_USAGE[${HADOOP_SUBCMD_USAGE_COUNTER}]="${subcmd}@${text}"
+ hadoop_add_array_param HADOOP_SUBCMD_USAGE_TYPES "${subtype}"
+
+ # done in this order so that sort works later
+ HADOOP_SUBCMD_USAGE[${HADOOP_SUBCMD_USAGE_COUNTER}]="${subcmd}@${subtype}@${text}"
((HADOOP_SUBCMD_USAGE_COUNTER=HADOOP_SUBCMD_USAGE_COUNTER+1))
}
@@ -253,17 +344,22 @@ function hadoop_reset_usage
{
HADOOP_SUBCMD_USAGE=()
HADOOP_OPTION_USAGE=()
+ HADOOP_SUBCMD_USAGE_TYPES=()
HADOOP_SUBCMD_USAGE_COUNTER=0
HADOOP_OPTION_USAGE_COUNTER=0
}
## @description Print a screen-size aware two-column output
+## @description if reqtype is not null, only print those requested
## @audience private
## @stability evolving
## @replaceable no
+## @param reqtype
## @param array
function hadoop_generic_columnprinter
{
+ declare reqtype=$1
+ shift
declare -a input=("$@")
declare -i i=0
declare -i counter=0
@@ -275,11 +371,13 @@ function hadoop_generic_columnprinter
declare -i foldsize
declare -a tmpa
declare numcols
+ declare brup
if [[ -n "${COLUMNS}" ]]; then
numcols=${COLUMNS}
else
numcols=$(tput cols) 2>/dev/null
+ COLUMNS=${numcols}
fi
if [[ -z "${numcols}"
@@ -292,7 +390,8 @@ function hadoop_generic_columnprinter
while read -r line; do
tmpa[${counter}]=${line}
((counter=counter+1))
- option=$(echo "${line}" | cut -f1 -d'@')
+ IFS='@' read -ra brup <<< "${line}"
+ option="${brup[0]}"
if [[ ${#option} -gt ${maxoptsize} ]]; then
maxoptsize=${#option}
fi
@@ -304,8 +403,22 @@ function hadoop_generic_columnprinter
((foldsize=numcols-maxoptsize))
until [[ $i -eq ${#tmpa[@]} ]]; do
- option=$(echo "${tmpa[$i]}" | cut -f1 -d'@')
- giventext=$(echo "${tmpa[$i]}" | cut -f2 -d'@')
+ IFS='@' read -ra brup <<< "${tmpa[$i]}"
+
+ option="${brup[0]}"
+ cmdtype="${brup[1]}"
+ giventext="${brup[2]}"
+
+ if [[ -n "${reqtype}" ]]; then
+ if [[ "${cmdtype}" != "${reqtype}" ]]; then
+ ((i=i+1))
+ continue
+ fi
+ fi
+
+ if [[ -z "${giventext}" ]]; then
+ giventext=${cmdtype}
+ fi
while read -r line; do
printf "%-${maxoptsize}s %-s\n" "${option}" "${line}"
@@ -325,13 +438,14 @@ function hadoop_generic_columnprinter
## @param [text to use in place of SUBCOMMAND]
function hadoop_generate_usage
{
- local cmd=$1
- local takesclass=$2
- local subcmdtext=${3:-"SUBCOMMAND"}
- local haveoptions
- local optstring
- local havesubs
- local subcmdstring
+ declare cmd=$1
+ declare takesclass=$2
+ declare subcmdtext=${3:-"SUBCOMMAND"}
+ declare haveoptions
+ declare optstring
+ declare havesubs
+ declare subcmdstring
+ declare cmdtype
cmd=${cmd##*/}
@@ -358,7 +472,7 @@ function hadoop_generate_usage
echo " OPTIONS is none or any of:"
echo ""
- hadoop_generic_columnprinter "${HADOOP_OPTION_USAGE[@]}"
+ hadoop_generic_columnprinter "" "${HADOOP_OPTION_USAGE[@]}"
fi
if [[ "${havesubs}" = true ]]; then
@@ -366,7 +480,18 @@ function hadoop_generate_usage
echo " ${subcmdtext} is one of:"
echo ""
- hadoop_generic_columnprinter "${HADOOP_SUBCMD_USAGE[@]}"
+ if [[ "${#HADOOP_SUBCMD_USAGE_TYPES[@]}" -gt 0 ]]; then
+
+ hadoop_sort_array HADOOP_SUBCMD_USAGE_TYPES
+ for subtype in "${HADOOP_SUBCMD_USAGE_TYPES[@]}"; do
+ #shellcheck disable=SC2086
+ cmdtype="$(tr '[:lower:]' '[:upper:]' <<< ${subtype:0:1})${subtype:1}"
+ printf "\n %s Commands:\n\n" "${cmdtype}"
+ hadoop_generic_columnprinter "${subtype}" "${HADOOP_SUBCMD_USAGE[@]}"
+ done
+ else
+ hadoop_generic_columnprinter "" "${HADOOP_SUBCMD_USAGE[@]}"
+ fi
echo ""
echo "${subcmdtext} may print help when invoked w/o parameters or with -h."
fi
http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a1bf6b7/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md b/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md
index 97f9e9a..ffe2aec 100644
--- a/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md
+++ b/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md
@@ -180,11 +180,11 @@ It is also possible to add the new subcommands to the usage output. The `hadoop_
```bash
if [[ "${HADOOP_SHELL_EXECNAME}" = "yarn" ]]; then
- hadoop_add_subcommand "hello" "Print some text to the screen"
+ hadoop_add_subcommand "hello" client "Print some text to the screen"
fi
```
-This functionality may also be use to override the built-ins. For example, defining:
+We set the subcommand type to be "client" as there are no special restrictions, extra capabilities, etc. This functionality may also be use to override the built-ins. For example, defining:
```bash
function hdfs_subcommand_fetchdt
http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a1bf6b7/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_array_param.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_array_param.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_array_param.bats
new file mode 100644
index 0000000..03264c1
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_array_param.bats
@@ -0,0 +1,37 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_add_array_param (empty)" {
+ hadoop_add_array_param ARRAY value
+ [ "${ARRAY[0]}" = value ]
+}
+
+@test "hadoop_add_array_param (exist)" {
+ ARRAY=("val2")
+ hadoop_add_array_param ARRAY val1
+ [ "${ARRAY[0]}" = val2 ]
+ [ "${ARRAY[1]}" = val1 ]
+}
+
+@test "hadoop_add_array_param (double exist)" {
+ ARRAY=("val2" "val1")
+ hadoop_add_array_param ARRAY val3
+ [ "${ARRAY[0]}" = val2 ]
+ [ "${ARRAY[1]}" = val1 ]
+ [ "${ARRAY[2]}" = val3 ]
+}
+
http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a1bf6b7/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_array_contains.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_array_contains.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_array_contains.bats
new file mode 100644
index 0000000..01cb4e3
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_array_contains.bats
@@ -0,0 +1,47 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_array_contains (empty)" {
+ run hadoop_array_contains value "${ARRAY[@]}"
+ [ "${status}" = 1 ]
+}
+
+@test "hadoop_array_contains (exist)" {
+ ARRAY=("value")
+ run hadoop_array_contains value "${ARRAY[@]}"
+ [ "${status}" = 0 ]
+}
+
+@test "hadoop_array_contains (notexist)" {
+ ARRAY=("different")
+ run hadoop_array_contains value "${ARRAY[@]}"
+ [ "${status}" = 1 ]
+}
+
+@test "hadoop_array_contains (exist, multi)" {
+ ARRAY=("val1" "val2" "val3")
+ for j in val1 val2 val3; do
+ run hadoop_array_contains "${j}" "${ARRAY[@]}"
+ [ "${status}" = 0 ]
+ done
+}
+
+@test "hadoop_array_contains (multi, not exist)" {
+ ARRAY=("val1" "val2" "val3")
+ run hadoop_array_contains value "${ARRAY[@]}"
+ [ "${status}" = 1 ]
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a1bf6b7/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_sort_array.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_sort_array.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_sort_array.bats
new file mode 100644
index 0000000..7a18b5d
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_sort_array.bats
@@ -0,0 +1,37 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_sort_array (empty)" {
+ hadoop_sort_array ARRAY
+}
+
+@test "hadoop_sort_array (single value)" {
+ ARRAY=("value")
+ hadoop_sort_array ARRAY
+}
+
+@test "hadoop_sort_array (multiple value)" {
+ ARRAY=("b" "c" "a")
+ preifsod=$(echo "${IFS}" | od -c)
+ hadoop_sort_array ARRAY
+ postifsod=$(echo "${IFS}" | od -c)
+
+ [ "${ARRAY[0]}" = "a" ]
+ [ "${ARRAY[1]}" = "b" ]
+ [ "${ARRAY[2]}" = "c" ]
+ [ "${preifsod}" = "${postifsod}" ]
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a1bf6b7/hadoop-common-project/hadoop-kms/src/main/libexec/shellprofile.d/hadoop-kms.sh
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-kms/src/main/libexec/shellprofile.d/hadoop-kms.sh b/hadoop-common-project/hadoop-kms/src/main/libexec/shellprofile.d/hadoop-kms.sh
index c530716..0d084bb 100755
--- a/hadoop-common-project/hadoop-kms/src/main/libexec/shellprofile.d/hadoop-kms.sh
+++ b/hadoop-common-project/hadoop-kms/src/main/libexec/shellprofile.d/hadoop-kms.sh
@@ -16,7 +16,7 @@
# limitations under the License.
if [[ "${HADOOP_SHELL_EXECNAME}" = hadoop ]]; then
- hadoop_add_subcommand "kms" "run KMS, the Key Management Server"
+ hadoop_add_subcommand "kms" daemon "run KMS, the Key Management Server"
fi
## @description Command handler for kms subcommand
@@ -54,4 +54,4 @@ function hadoop_subcommand_kms
[[ "${HADOOP_DAEMON_MODE}" == "start" ]]; then
hadoop_mkdir "${KMS_TEMP:-${HADOOP_HOME}/temp}"
fi
-}
\ No newline at end of file
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a1bf6b7/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/shellprofile.d/hadoop-httpfs.sh
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/shellprofile.d/hadoop-httpfs.sh b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/shellprofile.d/hadoop-httpfs.sh
index 6301e27..85cbc66 100755
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/shellprofile.d/hadoop-httpfs.sh
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/shellprofile.d/hadoop-httpfs.sh
@@ -16,7 +16,7 @@
# limitations under the License.
if [[ "${HADOOP_SHELL_EXECNAME}" = hdfs ]]; then
- hadoop_add_subcommand "httpfs" "run HttpFS server, the HDFS HTTP Gateway"
+ hadoop_add_subcommand "httpfs" daemon "run HttpFS server, the HDFS HTTP Gateway"
fi
## @description Command handler for httpfs subcommand
http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a1bf6b7/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
index 594a468..e6405b5 100755
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
@@ -31,37 +31,37 @@ function hadoop_usage
hadoop_add_option "--hosts filename" "list of hosts to use in worker mode"
hadoop_add_option "--workers" "turn on worker mode"
- hadoop_add_subcommand "balancer" "run a cluster balancing utility"
- hadoop_add_subcommand "cacheadmin" "configure the HDFS cache"
- hadoop_add_subcommand "classpath" "prints the class path needed to get the hadoop jar and the required libraries"
- hadoop_add_subcommand "crypto" "configure HDFS encryption zones"
- hadoop_add_subcommand "datanode" "run a DFS datanode"
- hadoop_add_subcommand "debug" "run a Debug Admin to execute HDFS debug commands"
- hadoop_add_subcommand "dfs" "run a filesystem command on the file system"
- hadoop_add_subcommand "dfsadmin" "run a DFS admin client"
- hadoop_add_subcommand "diskbalancer" "Distributes data evenly among disks on a given node"
- hadoop_add_subcommand "envvars" "display computed Hadoop environment variables"
- hadoop_add_subcommand "ec" "run a HDFS ErasureCoding CLI"
- hadoop_add_subcommand "fetchdt" "fetch a delegation token from the NameNode"
- hadoop_add_subcommand "fsck" "run a DFS filesystem checking utility"
- hadoop_add_subcommand "getconf" "get config values from configuration"
- hadoop_add_subcommand "groups" "get the groups which users belong to"
- hadoop_add_subcommand "haadmin" "run a DFS HA admin client"
- hadoop_add_subcommand "jmxget" "get JMX exported values from NameNode or DataNode."
- hadoop_add_subcommand "journalnode" "run the DFS journalnode"
- hadoop_add_subcommand "lsSnapshottableDir" "list all snapshottable dirs owned by the current user"
- hadoop_add_subcommand "mover" "run a utility to move block replicas across storage types"
- hadoop_add_subcommand "namenode" "run the DFS namenode"
- hadoop_add_subcommand "nfs3" "run an NFS version 3 gateway"
- hadoop_add_subcommand "oev" "apply the offline edits viewer to an edits file"
- hadoop_add_subcommand "oiv" "apply the offline fsimage viewer to an fsimage"
- hadoop_add_subcommand "oiv_legacy" "apply the offline fsimage viewer to a legacy fsimage"
- hadoop_add_subcommand "portmap" "run a portmap service"
- hadoop_add_subcommand "secondarynamenode" "run the DFS secondary namenode"
- hadoop_add_subcommand "snapshotDiff" "diff two snapshots of a directory or diff the current directory contents with a snapshot"
- hadoop_add_subcommand "storagepolicies" "list/get/set block storage policies"
- hadoop_add_subcommand "version" "print the version"
- hadoop_add_subcommand "zkfc" "run the ZK Failover Controller daemon"
+ hadoop_add_subcommand "balancer" daemon "run a cluster balancing utility"
+ hadoop_add_subcommand "cacheadmin" admin "configure the HDFS cache"
+ hadoop_add_subcommand "classpath" client "prints the class path needed to get the hadoop jar and the required libraries"
+ hadoop_add_subcommand "crypto" admin "configure HDFS encryption zones"
+ hadoop_add_subcommand "datanode" daemon "run a DFS datanode"
+ hadoop_add_subcommand "debug" admin "run a Debug Admin to execute HDFS debug commands"
+ hadoop_add_subcommand "dfs" client "run a filesystem command on the file system"
+ hadoop_add_subcommand "dfsadmin" admin "run a DFS admin client"
+ hadoop_add_subcommand "diskbalancer" daemon "Distributes data evenly among disks on a given node"
+ hadoop_add_subcommand "envvars" client "display computed Hadoop environment variables"
+ hadoop_add_subcommand "ec" admin "run a HDFS ErasureCoding CLI"
+ hadoop_add_subcommand "fetchdt" client "fetch a delegation token from the NameNode"
+ hadoop_add_subcommand "fsck" admin "run a DFS filesystem checking utility"
+ hadoop_add_subcommand "getconf" client "get config values from configuration"
+ hadoop_add_subcommand "groups" client "get the groups which users belong to"
+ hadoop_add_subcommand "haadmin" admin "run a DFS HA admin client"
+ hadoop_add_subcommand "jmxget" admin "get JMX exported values from NameNode or DataNode."
+ hadoop_add_subcommand "journalnode" daemon "run the DFS journalnode"
+ hadoop_add_subcommand "lsSnapshottableDir" client "list all snapshottable dirs owned by the current user"
+ hadoop_add_subcommand "mover" daemon "run a utility to move block replicas across storage types"
+ hadoop_add_subcommand "namenode" daemon "run the DFS namenode"
+ hadoop_add_subcommand "nfs3" daemon "run an NFS version 3 gateway"
+ hadoop_add_subcommand "oev" admin "apply the offline edits viewer to an edits file"
+ hadoop_add_subcommand "oiv" admin "apply the offline fsimage viewer to an fsimage"
+ hadoop_add_subcommand "oiv_legacy" admin "apply the offline fsimage viewer to a legacy fsimage"
+ hadoop_add_subcommand "portmap" daemon "run a portmap service"
+ hadoop_add_subcommand "secondarynamenode" daemon "run the DFS secondary namenode"
+ hadoop_add_subcommand "snapshotDiff" client "diff two snapshots of a directory or diff the current directory contents with a snapshot"
+ hadoop_add_subcommand "storagepolicies" admin "list/get/set block storage policies"
+ hadoop_add_subcommand "version" client "print the version"
+ hadoop_add_subcommand "zkfc" daemon "run the ZK Failover Controller daemon"
hadoop_generate_usage "${HADOOP_SHELL_EXECNAME}" false
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a1bf6b7/hadoop-mapreduce-project/bin/mapred
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/bin/mapred b/hadoop-mapreduce-project/bin/mapred
index cf17aea..f66f563 100755
--- a/hadoop-mapreduce-project/bin/mapred
+++ b/hadoop-mapreduce-project/bin/mapred
@@ -24,15 +24,15 @@ HADOOP_SHELL_EXECNAME="${MYNAME##*/}"
## @replaceable no
function hadoop_usage
{
- hadoop_add_subcommand "classpath" "prints the class path needed for running mapreduce subcommands"
- hadoop_add_subcommand "envvars" "display computed Hadoop environment variables"
- hadoop_add_subcommand "historyserver" "run job history servers as a standalone daemon"
- hadoop_add_subcommand "hsadmin" "job history server admin interface"
- hadoop_add_subcommand "job" "manipulate MapReduce jobs"
- hadoop_add_subcommand "pipes" "run a Pipes job"
- hadoop_add_subcommand "queue" "get information regarding JobQueues"
- hadoop_add_subcommand "sampler" "sampler"
- hadoop_add_subcommand "version" "print the version"
+ hadoop_add_subcommand "classpath" client "prints the class path needed for running mapreduce subcommands"
+ hadoop_add_subcommand "envvars" client "display computed Hadoop environment variables"
+ hadoop_add_subcommand "historyserver" daemon "run job history servers as a standalone daemon"
+ hadoop_add_subcommand "hsadmin" admin "job history server admin interface"
+ hadoop_add_subcommand "job" client "manipulate MapReduce jobs"
+ hadoop_add_subcommand "pipes" client "run a Pipes job"
+ hadoop_add_subcommand "queue" client "get information regarding JobQueues"
+ hadoop_add_subcommand "sampler" client "sampler"
+ hadoop_add_subcommand "version" client "print the version"
hadoop_generate_usage "${HADOOP_SHELL_EXECNAME}" true
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a1bf6b7/hadoop-tools/hadoop-archive-logs/src/main/shellprofile.d/hadoop-archive-logs.sh
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-archive-logs/src/main/shellprofile.d/hadoop-archive-logs.sh b/hadoop-tools/hadoop-archive-logs/src/main/shellprofile.d/hadoop-archive-logs.sh
index c889816..278a089 100755
--- a/hadoop-tools/hadoop-archive-logs/src/main/shellprofile.d/hadoop-archive-logs.sh
+++ b/hadoop-tools/hadoop-archive-logs/src/main/shellprofile.d/hadoop-archive-logs.sh
@@ -18,7 +18,7 @@
if ! declare -f mapred_subcommand_archive-logs >/dev/null 2>/dev/null; then
if [[ "${HADOOP_SHELL_EXECNAME}" = mapred ]]; then
- hadoop_add_subcommand "archive-logs" "combine aggregated logs into hadoop archives"
+ hadoop_add_subcommand "archive-logs" client "combine aggregated logs into hadoop archives"
fi
# this can't be indented otherwise shelldocs won't get it
http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a1bf6b7/hadoop-tools/hadoop-archives/src/main/shellprofile.d/hadoop-archives.sh
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-archives/src/main/shellprofile.d/hadoop-archives.sh b/hadoop-tools/hadoop-archives/src/main/shellprofile.d/hadoop-archives.sh
index f74fe5b..42fc1a0 100755
--- a/hadoop-tools/hadoop-archives/src/main/shellprofile.d/hadoop-archives.sh
+++ b/hadoop-tools/hadoop-archives/src/main/shellprofile.d/hadoop-archives.sh
@@ -18,7 +18,7 @@
if ! declare -f hadoop_subcommand_archive >/dev/null 2>/dev/null; then
if [[ "${HADOOP_SHELL_EXECNAME}" = hadoop ]]; then
- hadoop_add_subcommand "archive" "create a Hadoop archive"
+ hadoop_add_subcommand "archive" client "create a Hadoop archive"
fi
# this can't be indented otherwise shelldocs won't get it
@@ -39,7 +39,7 @@ fi
if ! declare -f mapred_subcommand_archive >/dev/null 2>/dev/null; then
if [[ "${HADOOP_SHELL_EXECNAME}" = mapred ]]; then
- hadoop_add_subcommand "archive" "create a Hadoop archive"
+ hadoop_add_subcommand "archive" client "create a Hadoop archive"
fi
# this can't be indented otherwise shelldocs won't get it
http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a1bf6b7/hadoop-tools/hadoop-distcp/src/main/shellprofile.d/hadoop-distcp.sh
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-distcp/src/main/shellprofile.d/hadoop-distcp.sh b/hadoop-tools/hadoop-distcp/src/main/shellprofile.d/hadoop-distcp.sh
index 6e93ec1..4502848 100755
--- a/hadoop-tools/hadoop-distcp/src/main/shellprofile.d/hadoop-distcp.sh
+++ b/hadoop-tools/hadoop-distcp/src/main/shellprofile.d/hadoop-distcp.sh
@@ -18,7 +18,7 @@
if ! declare -f hadoop_subcommand_distcp >/dev/null 2>/dev/null; then
if [[ "${HADOOP_SHELL_EXECNAME}" = hadoop ]]; then
- hadoop_add_subcommand "distcp" "copy file or directories recursively"
+ hadoop_add_subcommand "distcp" client "copy file or directories recursively"
fi
# this can't be indented otherwise shelldocs won't get it
@@ -39,7 +39,7 @@ fi
if ! declare -f mapred_subcommand_distcp >/dev/null 2>/dev/null; then
if [[ "${HADOOP_SHELL_EXECNAME}" = mapred ]]; then
- hadoop_add_subcommand "distcp" "copy file or directories recursively"
+ hadoop_add_subcommand "distcp" client "copy file or directories recursively"
fi
# this can't be indented otherwise shelldocs won't get it
http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a1bf6b7/hadoop-tools/hadoop-extras/src/main/shellprofile.d/hadoop-extras.sh
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-extras/src/main/shellprofile.d/hadoop-extras.sh b/hadoop-tools/hadoop-extras/src/main/shellprofile.d/hadoop-extras.sh
index 1ce9aee..364c950 100755
--- a/hadoop-tools/hadoop-extras/src/main/shellprofile.d/hadoop-extras.sh
+++ b/hadoop-tools/hadoop-extras/src/main/shellprofile.d/hadoop-extras.sh
@@ -18,7 +18,7 @@
if ! declare -f hadoop_subcommand_distch >/dev/null 2>/dev/null; then
if [[ "${HADOOP_SHELL_EXECNAME}" = hadoop ]]; then
- hadoop_add_subcommand "distch" "distributed metadata changer"
+ hadoop_add_subcommand "distch" client "distributed metadata changer"
fi
# this can't be indented otherwise shelldocs won't get it
http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a1bf6b7/hadoop-tools/hadoop-gridmix/src/main/shellprofile.d/hadoop-gridmix.sh
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-gridmix/src/main/shellprofile.d/hadoop-gridmix.sh b/hadoop-tools/hadoop-gridmix/src/main/shellprofile.d/hadoop-gridmix.sh
index b7887ba..55997d0 100755
--- a/hadoop-tools/hadoop-gridmix/src/main/shellprofile.d/hadoop-gridmix.sh
+++ b/hadoop-tools/hadoop-gridmix/src/main/shellprofile.d/hadoop-gridmix.sh
@@ -18,7 +18,7 @@
if ! declare -f hadoop_subcommand_gridmix >/dev/null 2>/dev/null; then
if [[ "${HADOOP_SHELL_EXECNAME}" = hadoop ]]; then
- hadoop_add_subcommand "gridmix" "submit a mix of synthetic job, modeling a profiled from production load"
+ hadoop_add_subcommand "gridmix" client "submit a mix of synthetic job, modeling a profiled from production load"
fi
## @description gridmix command for hadoop
http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a1bf6b7/hadoop-tools/hadoop-rumen/src/main/shellprofile.d/hadoop-rumen.sh
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/shellprofile.d/hadoop-rumen.sh b/hadoop-tools/hadoop-rumen/src/main/shellprofile.d/hadoop-rumen.sh
index 77023ff..b0d606d 100755
--- a/hadoop-tools/hadoop-rumen/src/main/shellprofile.d/hadoop-rumen.sh
+++ b/hadoop-tools/hadoop-rumen/src/main/shellprofile.d/hadoop-rumen.sh
@@ -18,7 +18,7 @@
if ! declare -f hadoop_subcommand_rumenfolder >/dev/null 2>/dev/null; then
if [[ "${HADOOP_SHELL_EXECNAME}" = hadoop ]]; then
- hadoop_add_subcommand "rumenfolder" "scale a rumen input trace"
+ hadoop_add_subcommand "rumenfolder" client "scale a rumen input trace"
fi
## @description rumenfolder command for hadoop
@@ -37,7 +37,7 @@ fi
if ! declare -f hadoop_subcommand_rumentrace >/dev/null 2>/dev/null; then
if [[ "${HADOOP_SHELL_EXECNAME}" = hadoop ]]; then
- hadoop_add_subcommand "rumentrace" "convert logs into a rumen trace"
+ hadoop_add_subcommand "rumentrace" client "convert logs into a rumen trace"
fi
## @description rumentrace command for hadoop
http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a1bf6b7/hadoop-tools/hadoop-streaming/src/main/shellprofile.d/hadoop-streaming.sh
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-streaming/src/main/shellprofile.d/hadoop-streaming.sh b/hadoop-tools/hadoop-streaming/src/main/shellprofile.d/hadoop-streaming.sh
index c3010ff..be76b06 100755
--- a/hadoop-tools/hadoop-streaming/src/main/shellprofile.d/hadoop-streaming.sh
+++ b/hadoop-tools/hadoop-streaming/src/main/shellprofile.d/hadoop-streaming.sh
@@ -18,7 +18,7 @@
if ! declare -f mapred_subcommand_streaming >/dev/null 2>/dev/null; then
if [[ "${HADOOP_SHELL_EXECNAME}" = mapred ]]; then
- hadoop_add_subcommand "streaming" "launch a mapreduce streaming job"
+ hadoop_add_subcommand "streaming" client "launch a mapreduce streaming job"
fi
## @description streaming command for mapred
http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a1bf6b7/hadoop-yarn-project/hadoop-yarn/bin/yarn
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/bin/yarn b/hadoop-yarn-project/hadoop-yarn/bin/yarn
index a37d183..dcde0dc 100755
--- a/hadoop-yarn-project/hadoop-yarn/bin/yarn
+++ b/hadoop-yarn-project/hadoop-yarn/bin/yarn
@@ -31,28 +31,28 @@ function hadoop_usage
hadoop_add_option "--hosts filename" "list of hosts to use in worker mode"
hadoop_add_option "--workers" "turn on worker mode"
- hadoop_add_subcommand "application" "prints application(s) report/kill application"
- hadoop_add_subcommand "applicationattempt" "prints applicationattempt(s) report"
- hadoop_add_subcommand "classpath" "prints the class path needed to get the hadoop jar and the required libraries"
- hadoop_add_subcommand "cluster" "prints cluster information"
- hadoop_add_subcommand "container" "prints container(s) report"
- hadoop_add_subcommand "daemonlog" "get/set the log level for each daemon"
- hadoop_add_subcommand "envvars" "display computed Hadoop environment variables"
- hadoop_add_subcommand "jar <jar>" "run a jar file"
- hadoop_add_subcommand "logs" "dump container logs"
- hadoop_add_subcommand "node" "prints node report(s)"
- hadoop_add_subcommand "nodemanager" "run a nodemanager on each worker"
- hadoop_add_subcommand "proxyserver" "run the web app proxy server"
- hadoop_add_subcommand "queue" "prints queue information"
- hadoop_add_subcommand "resourcemanager" "run the ResourceManager"
- hadoop_add_subcommand "rmadmin" "admin tools"
- hadoop_add_subcommand "router" "run the Router daemon"
- hadoop_add_subcommand "scmadmin" "SharedCacheManager admin tools"
- hadoop_add_subcommand "sharedcachemanager" "run the SharedCacheManager daemon"
- hadoop_add_subcommand "timelinereader" "run the timeline reader server"
- hadoop_add_subcommand "timelineserver" "run the timeline server"
- hadoop_add_subcommand "top" "view cluster information"
- hadoop_add_subcommand "version" "print the version"
+ hadoop_add_subcommand "application" client "prints application(s) report/kill application"
+ hadoop_add_subcommand "applicationattempt" client "prints applicationattempt(s) report"
+ hadoop_add_subcommand "classpath" client "prints the class path needed to get the hadoop jar and the required libraries"
+ hadoop_add_subcommand "cluster" client "prints cluster information"
+ hadoop_add_subcommand "container" client "prints container(s) report"
+ hadoop_add_subcommand "daemonlog" admin "get/set the log level for each daemon"
+ hadoop_add_subcommand "envvars" client "display computed Hadoop environment variables"
+ hadoop_add_subcommand "jar <jar>" client "run a jar file"
+ hadoop_add_subcommand "logs" client "dump container logs"
+ hadoop_add_subcommand "node" admin "prints node report(s)"
+ hadoop_add_subcommand "nodemanager" daemon "run a nodemanager on each worker"
+ hadoop_add_subcommand "proxyserver" daemon "run the web app proxy server"
+ hadoop_add_subcommand "queue" client "prints queue information"
+ hadoop_add_subcommand "resourcemanager" daemon "run the ResourceManager"
+ hadoop_add_subcommand "rmadmin" admin "admin tools"
+ hadoop_add_subcommand "router" daemon "run the Router daemon"
+ hadoop_add_subcommand "scmadmin" admin "SharedCacheManager admin tools"
+ hadoop_add_subcommand "sharedcachemanager" admin "run the SharedCacheManager daemon"
+ hadoop_add_subcommand "timelinereader" client "run the timeline reader server"
+ hadoop_add_subcommand "timelineserver" daemon "run the timeline server"
+ hadoop_add_subcommand "top" client "view cluster information"
+ hadoop_add_subcommand "version" client "print the version"
hadoop_generate_usage "${HADOOP_SHELL_EXECNAME}" true
}
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org