hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From a.@apache.org
Subject [16/31] hadoop git commit: HADOOP-12931. bin/hadoop work for dynamic subcommands
Date Sun, 15 May 2016 14:50:56 GMT
HADOOP-12931. bin/hadoop work for dynamic subcommands


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7033b1b1
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7033b1b1
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7033b1b1

Branch: refs/heads/HADOOP-12930
Commit: 7033b1b14f1a067aca708da77d2a9a45918b77cf
Parents: a9e2ec4
Author: Allen Wittenauer <aw@apache.org>
Authored: Tue May 3 10:49:46 2016 -0700
Committer: Allen Wittenauer <aw@apache.org>
Committed: Sun May 15 07:50:15 2016 -0700

----------------------------------------------------------------------
 .../hadoop-common/src/main/bin/hadoop           | 274 ++++++++++---------
 1 file changed, 148 insertions(+), 126 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7033b1b1/hadoop-common-project/hadoop-common/src/main/bin/hadoop
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
index fccb9f8..453ac83 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
@@ -16,7 +16,12 @@
 # limitations under the License.
 
 MYNAME="${BASH_SOURCE-$0}"
+HADOOP_SHELL_EXECNAME="${MYNAME##*/}"
 
+## @description  build up the hadoop command's usage text.
+## @audience     public
+## @stability    stable
+## @replaceable  no
 function hadoop_usage
 {
   hadoop_add_option "buildpaths" "attempt to add class files from build tree"
@@ -43,7 +48,140 @@ function hadoop_usage
   hadoop_add_subcommand "key" "manage keys via the KeyProvider"
   hadoop_add_subcommand "trace" "view and modify Hadoop tracing settings"
   hadoop_add_subcommand "version" "print the version"
-  hadoop_generate_usage "${MYNAME}" true
+  hadoop_generate_usage "${HADOOP_SHELL_EXECNAME}" true
+}
+
+## @description  Default command handler for hadoop command
+## @audience     public
+## @stability    stable
+## @replaceable  no
+## @param        CLI arguments
+function hadoopcmd_case
+{
+  subcmd=$1
+  shift
+
+  case ${subcmd} in
+    balancer|datanode|dfs|dfsadmin|dfsgroups|  \
+    namenode|secondarynamenode|fsck|fetchdt|oiv| \
+    portmap|nfs3)
+      hadoop_error "WARNING: Use of this script to execute ${subcmd} is deprecated."
+      subcmd=${subcmd/dfsgroups/groups}
+      hadoop_error "WARNING: Attempting to execute replacement \"hdfs ${subcmd}\" instead."
+      hadoop_error ""
+      #try to locate hdfs and if present, delegate to it.
+      if [[ -f "${HADOOP_HDFS_HOME}/bin/hdfs" ]]; then
+        # shellcheck disable=SC2086
+        exec "${HADOOP_HDFS_HOME}/bin/hdfs" \
+        --config "${HADOOP_CONF_DIR}" "${subcmd}"  "$@"
+      elif [[ -f "${HADOOP_HOME}/bin/hdfs" ]]; then
+        # shellcheck disable=SC2086
+        exec "${HADOOP_HOME}/bin/hdfs" \
+        --config "${HADOOP_CONF_DIR}" "${subcmd}" "$@"
+      else
+        hadoop_error "HADOOP_HDFS_HOME not found!"
+        exit 1
+      fi
+    ;;
+
+    #mapred commands for backwards compatibility
+    pipes|job|queue|mrgroups|mradmin|jobtracker|tasktracker)
+      hadoop_error "WARNING: Use of this script to execute ${subcmd} is deprecated."
+      subcmd=${subcmd/mrgroups/groups}
+      hadoop_error "WARNING: Attempting to execute replacement \"mapred ${subcmd}\" instead."
+      hadoop_error ""
+      #try to locate mapred and if present, delegate to it.
+      if [[ -f "${HADOOP_MAPRED_HOME}/bin/mapred" ]]; then
+        exec "${HADOOP_MAPRED_HOME}/bin/mapred" \
+        --config "${HADOOP_CONF_DIR}" "${subcmd}" "$@"
+      elif [[ -f "${HADOOP_HOME}/bin/mapred" ]]; then
+        exec "${HADOOP_HOME}/bin/mapred" \
+        --config "${HADOOP_CONF_DIR}" "${subcmd}" "$@"
+      else
+        hadoop_error "HADOOP_MAPRED_HOME not found!"
+        exit 1
+      fi
+    ;;
+    archive)
+      HADOOP_CLASS=org.apache.hadoop.tools.HadoopArchives
+      hadoop_add_to_classpath_tools hadoop-archives
+    ;;
+    checknative)
+      HADOOP_CLASS=org.apache.hadoop.util.NativeLibraryChecker
+    ;;
+    classpath)
+      hadoop_do_classpath_subcommand HADOOP_CLASS "$@"
+    ;;
+    conftest)
+      HADOOP_CLASS=org.apache.hadoop.util.ConfTest
+    ;;
+    credential)
+      HADOOP_CLASS=org.apache.hadoop.security.alias.CredentialShell
+    ;;
+    daemonlog)
+      HADOOP_CLASS=org.apache.hadoop.log.LogLevel
+    ;;
+    distch)
+      HADOOP_CLASS=org.apache.hadoop.tools.DistCh
+      hadoop_add_to_classpath_tools hadoop-extras
+    ;;
+    distcp)
+      HADOOP_CLASS=org.apache.hadoop.tools.DistCp
+      hadoop_add_to_classpath_tools hadoop-distcp
+    ;;
+    dtutil)
+      HADOOP_CLASS=org.apache.hadoop.security.token.DtUtilShell
+    ;;
+    envvars)
+      echo "JAVA_HOME='${JAVA_HOME}'"
+      echo "HADOOP_COMMON_HOME='${HADOOP_COMMON_HOME}'"
+      echo "HADOOP_COMMON_DIR='${HADOOP_COMMON_DIR}'"
+      echo "HADOOP_COMMON_LIB_JARS_DIR='${HADOOP_COMMON_LIB_JARS_DIR}'"
+      echo "HADOOP_COMMON_LIB_NATIVE_DIR='${HADOOP_COMMON_LIB_NATIVE_DIR}'"
+      echo "HADOOP_CONF_DIR='${HADOOP_CONF_DIR}'"
+      echo "HADOOP_TOOLS_HOME='${HADOOP_TOOLS_HOME}'"
+      echo "HADOOP_TOOLS_DIR='${HADOOP_TOOLS_DIR}'"
+      echo "HADOOP_TOOLS_LIB_JARS_DIR='${HADOOP_TOOLS_LIB_JARS_DIR}'"
+      exit 0
+    ;;
+    fs)
+      HADOOP_CLASS=org.apache.hadoop.fs.FsShell
+    ;;
+    gridmix)
+      CLASS=org.apache.hadoop.mapred.gridmix.Gridmix
+      hadoop_add_to_classpath_tools hadoop-rumen
+      hadoop_add_to_classpath_tools hadoop-gridmix
+    ;;
+    jar)
+      if [[ -n "${YARN_OPTS}" ]] || [[ -n "${YARN_CLIENT_OPTS}" ]]; then
+        hadoop_error "WARNING: Use \"yarn jar\" to launch YARN applications."
+      fi
+      HADOOP_CLASS=org.apache.hadoop.util.RunJar
+    ;;
+    jnipath)
+      hadoop_finalize
+      echo "${JAVA_LIBRARY_PATH}"
+      exit 0
+    ;;
+    kerbname)
+      HADOOP_CLASS=org.apache.hadoop.security.HadoopKerberosName
+    ;;
+    key)
+      HADOOP_CLASS=org.apache.hadoop.crypto.key.KeyShell
+    ;;
+    trace)
+      HADOOP_CLASS=org.apache.hadoop.tracing.TraceAdmin
+    ;;
+    version)
+      HADOOP_CLASS=org.apache.hadoop.util.VersionInfo
+    ;;
+    *)
+      HADOOP_CLASS="${subcmd}"
+      if ! hadoop_validate_classname "${HADOOP_CLASS}"; then
+        hadoop_exit_with_usage 1
+      fi
+    ;;
+  esac
 }
 
 # This script runs the hadoop core commands.
@@ -70,132 +208,16 @@ if [ $# = 0 ]; then
   hadoop_exit_with_usage 1
 fi
 
-COMMAND=$1
+HADOOP_SUBCMD=$1
 shift
 
-case ${COMMAND} in
-  balancer|datanode|dfs|dfsadmin|dfsgroups|  \
-  namenode|secondarynamenode|fsck|fetchdt|oiv| \
-  portmap|nfs3)
-    hadoop_error "WARNING: Use of this script to execute ${COMMAND} is deprecated."
-    COMMAND=${COMMAND/dfsgroups/groups}
-    hadoop_error "WARNING: Attempting to execute replacement \"hdfs ${COMMAND}\" instead."
-    hadoop_error ""
-    #try to locate hdfs and if present, delegate to it.
-    if [[ -f "${HADOOP_HDFS_HOME}/bin/hdfs" ]]; then
-      # shellcheck disable=SC2086
-      exec "${HADOOP_HDFS_HOME}/bin/hdfs" \
-      --config "${HADOOP_CONF_DIR}" "${COMMAND}"  "$@"
-    elif [[ -f "${HADOOP_HOME}/bin/hdfs" ]]; then
-      # shellcheck disable=SC2086
-      exec "${HADOOP_HOME}/bin/hdfs" \
-      --config "${HADOOP_CONF_DIR}" "${COMMAND}" "$@"
-    else
-      hadoop_error "HADOOP_HDFS_HOME not found!"
-      exit 1
-    fi
-  ;;
-
-  #mapred commands for backwards compatibility
-  pipes|job|queue|mrgroups|mradmin|jobtracker|tasktracker)
-    hadoop_error "WARNING: Use of this script to execute ${COMMAND} is deprecated."
-    COMMAND=${COMMAND/mrgroups/groups}
-    hadoop_error "WARNING: Attempting to execute replacement \"mapred ${COMMAND}\" instead."
-    hadoop_error ""
-    #try to locate mapred and if present, delegate to it.
-    if [[ -f "${HADOOP_MAPRED_HOME}/bin/mapred" ]]; then
-      exec "${HADOOP_MAPRED_HOME}/bin/mapred" \
-      --config "${HADOOP_CONF_DIR}" "${COMMAND}" "$@"
-    elif [[ -f "${HADOOP_HOME}/bin/mapred" ]]; then
-      exec "${HADOOP_HOME}/bin/mapred" \
-      --config "${HADOOP_CONF_DIR}" "${COMMAND}" "$@"
-    else
-      hadoop_error "HADOOP_MAPRED_HOME not found!"
-      exit 1
-    fi
-  ;;
-  archive)
-    CLASS=org.apache.hadoop.tools.HadoopArchives
-    hadoop_add_to_classpath_tools hadoop-archives
-  ;;
-  checknative)
-    CLASS=org.apache.hadoop.util.NativeLibraryChecker
-  ;;
-  classpath)
-    hadoop_do_classpath_subcommand CLASS "$@"
-  ;;
-  conftest)
-    CLASS=org.apache.hadoop.util.ConfTest
-  ;;
-  credential)
-    CLASS=org.apache.hadoop.security.alias.CredentialShell
-  ;;
-  daemonlog)
-    CLASS=org.apache.hadoop.log.LogLevel
-  ;;
-  distch)
-    CLASS=org.apache.hadoop.tools.DistCh
-    hadoop_add_to_classpath_tools hadoop-extras
-  ;;
-  distcp)
-    CLASS=org.apache.hadoop.tools.DistCp
-    hadoop_add_to_classpath_tools hadoop-distcp
-  ;;
-  dtutil)
-    CLASS=org.apache.hadoop.security.token.DtUtilShell
-  ;;
-  envvars)
-    echo "JAVA_HOME='${JAVA_HOME}'"
-    echo "HADOOP_COMMON_HOME='${HADOOP_COMMON_HOME}'"
-    echo "HADOOP_COMMON_DIR='${HADOOP_COMMON_DIR}'"
-    echo "HADOOP_COMMON_LIB_JARS_DIR='${HADOOP_COMMON_LIB_JARS_DIR}'"
-    echo "HADOOP_COMMON_LIB_NATIVE_DIR='${HADOOP_COMMON_LIB_NATIVE_DIR}'"
-    echo "HADOOP_CONF_DIR='${HADOOP_CONF_DIR}'"
-    echo "HADOOP_TOOLS_HOME='${HADOOP_TOOLS_HOME}'"
-    echo "HADOOP_TOOLS_DIR='${HADOOP_TOOLS_DIR}'"
-    echo "HADOOP_TOOLS_LIB_JARS_DIR='${HADOOP_TOOLS_LIB_JARS_DIR}'"
-    exit 0
-  ;;
-  fs)
-    CLASS=org.apache.hadoop.fs.FsShell
-  ;;
-  gridmix)
-    CLASS=org.apache.hadoop.mapred.gridmix.Gridmix
-    hadoop_add_to_classpath_tools hadoop-rumen
-    hadoop_add_to_classpath_tools hadoop-gridmix
-  ;;
-  jar)
-    if [[ -n "${YARN_OPTS}" ]] || [[ -n "${YARN_CLIENT_OPTS}" ]]; then
-      hadoop_error "WARNING: Use \"yarn jar\" to launch YARN applications."
-    fi
-    CLASS=org.apache.hadoop.util.RunJar
-  ;;
-  jnipath)
-    hadoop_finalize
-    echo "${JAVA_LIBRARY_PATH}"
-    exit 0
-  ;;
-  kerbname)
-    CLASS=org.apache.hadoop.security.HadoopKerberosName
-  ;;
-  key)
-    CLASS=org.apache.hadoop.crypto.key.KeyShell
-  ;;
-  trace)
-    CLASS=org.apache.hadoop.tracing.TraceAdmin
-  ;;
-  version)
-    CLASS=org.apache.hadoop.util.VersionInfo
-  ;;
-  *)
-    CLASS="${COMMAND}"
-    if ! hadoop_validate_classname "${CLASS}"; then
-      hadoop_exit_with_usage 1
-    fi
-  ;;
-esac
-
-hadoop_verify_user "${COMMAND}"
+if declare -f hadoop_subcommand_"${HADOOP_SUBCMD}" >/dev/null 2>&1; then
+  "hadoop_subcommand_${HADOOP_SUBCMD}" "$@"
+else
+  hadoopcmd_case "${HADOOP_SUBCMD}" "$@"
+fi
+
+hadoop_verify_user "${HADOOP_SUBCMD}"
 
 if [[ ${HADOOP_SLAVE_MODE} = true ]]; then
   hadoop_common_slave_mode_execute "${HADOOP_HDFS_HOME}/bin/hdfs" "${HADOOP_USER_PARAMS[@]}"
@@ -207,5 +229,5 @@ hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
 HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
 
 hadoop_finalize
-hadoop_java_exec "${COMMAND}" "${CLASS}" "$@"
+hadoop_java_exec "${HADOOP_SUBCMD}" "${HADOOP_CLASS}" "$@"
 


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


Mime
View raw message