hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From a.@apache.org
Subject [08/10] hadoop git commit: HADOOP-10854. unit tests for the shell scripts (aw)
Date Fri, 31 Jul 2015 21:53:48 GMT
HADOOP-10854. unit tests for the shell scripts (aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a890a315
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a890a315
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a890a315

Branch: refs/heads/HADOOP-12111
Commit: a890a31529cc625326cd3749a4960ad7c02fc6fe
Parents: 666cafc
Author: Allen Wittenauer <aw@apache.org>
Authored: Fri Jul 31 14:34:48 2015 -0700
Committer: Allen Wittenauer <aw@apache.org>
Committed: Fri Jul 31 14:34:48 2015 -0700

----------------------------------------------------------------------
 BUILDING.txt                                    |   4 +-
 dev-support/docker/Dockerfile                   |   8 +
 hadoop-common-project/hadoop-common/CHANGES.txt |   2 +
 hadoop-common-project/hadoop-common/pom.xml     |  33 +++
 .../src/main/bin/hadoop-functions.sh            | 114 +++++++---
 .../scripts/hadoop-functions_test_helper.bash   |  56 +++++
 .../src/test/scripts/hadoop_add_classpath.bats  | 100 +++++++++
 .../src/test/scripts/hadoop_add_colonpath.bats  |  96 +++++++++
 .../scripts/hadoop_add_common_to_classpath.bats |  71 +++++++
 .../test/scripts/hadoop_add_javalibpath.bats    |  98 +++++++++
 .../src/test/scripts/hadoop_add_ldlibpath.bats  |  97 +++++++++
 .../src/test/scripts/hadoop_add_param.bats      |  49 +++++
 .../hadoop_add_to_classpath_userpath.bats       |  98 +++++++++
 .../src/test/scripts/hadoop_basic_init.bats     |  94 +++++++++
 .../src/test/scripts/hadoop_bootstrap.bats      |  51 +++++
 .../src/test/scripts/hadoop_confdir.bats        |  92 +++++++++
 .../test/scripts/hadoop_deprecate_envvar.bats   |  32 +++
 .../src/test/scripts/hadoop_finalize.bats       | 206 +++++++++++++++++++
 .../scripts/hadoop_finalize_catalina_opts.bats  |  56 +++++
 .../test/scripts/hadoop_finalize_classpath.bats |  64 ++++++
 .../scripts/hadoop_finalize_hadoop_heap.bats    |  87 ++++++++
 .../scripts/hadoop_finalize_hadoop_opts.bats    |  52 +++++
 .../test/scripts/hadoop_finalize_libpaths.bats  |  30 +++
 .../src/test/scripts/hadoop_java_setup.bats     |  47 +++++
 .../src/test/scripts/hadoop_os_tricks.bats      |  34 +++
 .../src/test/scripts/hadoop_rotate_log.bats     |  52 +++++
 .../src/test/scripts/hadoop_shellprofile.bats   |  91 ++++++++
 .../src/test/scripts/hadoop_slaves.bats         |  37 ++++
 .../src/test/scripts/hadoop_ssh.bats            |  51 +++++
 .../scripts/hadoop_translate_cygwin_path.bats   |  48 +++++
 .../test/scripts/hadoop_validate_classname.bats |  26 +++
 .../hadoop-common/src/test/scripts/run-bats.sh  |  43 ++++
 32 files changed, 1988 insertions(+), 31 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/BUILDING.txt
----------------------------------------------------------------------
diff --git a/BUILDING.txt b/BUILDING.txt
index 2aeade4..ee6e680 100644
--- a/BUILDING.txt
+++ b/BUILDING.txt
@@ -14,6 +14,8 @@ Requirements:
 * Jansson C XML parsing library ( if compiling libwebhdfs )
 * Linux FUSE (Filesystem in Userspace) version 2.6 or above ( if compiling fuse_dfs )
 * Internet connection for first build (to fetch all Maven and Hadoop dependencies)
+* python (for releasedocs)
+* bats (for shell code testing)
 
 ----------------------------------------------------------------------------------
 The easiest way to get an environment with all the appropriate tools is by means
@@ -106,7 +108,7 @@ Maven build goals:
 
  * Clean                     : mvn clean [-Preleasedocs]
  * Compile                   : mvn compile [-Pnative]
- * Run tests                 : mvn test [-Pnative]
+ * Run tests                 : mvn test [-Pnative] [-Pshelltest]
  * Create JAR                : mvn package
  * Run findbugs              : mvn compile findbugs:findbugs
  * Run checkstyle            : mvn compile checkstyle:checkstyle

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/dev-support/docker/Dockerfile
----------------------------------------------------------------------
diff --git a/dev-support/docker/Dockerfile b/dev-support/docker/Dockerfile
index f761f8b..c8453cc 100644
--- a/dev-support/docker/Dockerfile
+++ b/dev-support/docker/Dockerfile
@@ -63,6 +63,14 @@ ENV FINDBUGS_HOME /opt/findbugs
 RUN apt-get install -y cabal-install
 RUN cabal update && cabal install shellcheck --global
 
+#####
+# bats
+#####
+
+RUN add-apt-repository ppa:duggan/bats --yes
+RUN apt-get update -qq
+RUN apt-get install -qq bats
+
 # Fixing the Apache commons / Maven dependency problem under Ubuntu:
 # See http://wiki.apache.org/commons/VfsProblems
 RUN cd /usr/share/maven/lib && ln -s ../../java/commons-lang.jar .

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 8d0795b..5020e91 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -51,6 +51,8 @@ Trunk (Unreleased)
     HADOOP-7947. Validate XMLs if a relevant tool is available, when using
     scripts (Kengo Seki via aw)
 
+    HADOOP-10854. unit tests for the shell scripts (aw)
+
   IMPROVEMENTS
 
     HADOOP-11203. Allow ditscp to accept bandwitdh in fraction MegaBytes

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml
index 6b1388a..282735d 100644
--- a/hadoop-common-project/hadoop-common/pom.xml
+++ b/hadoop-common-project/hadoop-common/pom.xml
@@ -958,6 +958,39 @@
       </build>
     </profile>
 
+    <!-- profile to test shell code -->
+    <profile>
+      <id>shelltest</id>
+      <activation>
+        <activeByDefault>true</activeByDefault>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <artifactId>maven-antrun-plugin</artifactId>
+            <executions>
+                <execution>
+                    <id>common-test-bats-driver</id>
+                    <phase>process-test-classes</phase>
+                    <goals>
+                        <goal>run</goal>
+                    </goals>
+                    <configuration>
+                      <target>
+                          <exec dir="src/test/scripts"
+                           executable="bash"
+                           failonerror="true">
+                           <arg value="./run-bats.sh" />
+                         </exec>
+                      </target>
+                    </configuration>
+                </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+
   </profiles>
 </project>
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
index 5e2a2e8..b9b7919 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
@@ -358,6 +358,7 @@ function hadoop_import_shellprofiles
 
   if [[ -d "${HADOOP_LIBEXEC_DIR}/shellprofile.d" ]]; then
     files1=(${HADOOP_LIBEXEC_DIR}/shellprofile.d/*.sh)
+    hadoop_debug "shellprofiles: ${files1[*]}"
   else
     hadoop_error "WARNING: ${HADOOP_LIBEXEC_DIR}/shellprofile.d doesn't exist. Functionality may not work."
   fi
@@ -368,7 +369,8 @@ function hadoop_import_shellprofiles
 
   for i in "${files1[@]}" "${files2[@]}"
   do
-    if [[ -n "${i}" ]]; then
+    if [[ -n "${i}"
+      && -f "${i}" ]]; then
       hadoop_debug "Profiles: importing ${i}"
       . "${i}"
     fi
@@ -490,6 +492,26 @@ function hadoop_basic_init
     export HADOOP_MAPRED_HOME="${HADOOP_PREFIX}"
   fi
 
+  if [[ ! -d "${HADOOP_COMMON_HOME}" ]]; then
+    hadoop_error "ERROR: Invalid HADOOP_COMMON_HOME"
+    exit 1
+  fi
+
+  if [[ ! -d "${HADOOP_HDFS_HOME}" ]]; then
+    hadoop_error "ERROR: Invalid HADOOP_HDFS_HOME"
+    exit 1
+  fi
+
+  if [[ ! -d "${HADOOP_YARN_HOME}" ]]; then
+    hadoop_error "ERROR: Invalid HADOOP_YARN_HOME"
+    exit 1
+  fi
+
+  if [[ ! -d "${HADOOP_MAPRED_HOME}" ]]; then
+    hadoop_error "ERROR: Invalid HADOOP_MAPRED_HOME"
+    exit 1
+  fi
+
   HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-$USER}
   HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-"${HADOOP_PREFIX}/logs"}
   HADOOP_LOGFILE=${HADOOP_LOGFILE:-hadoop.log}
@@ -670,7 +692,7 @@ function hadoop_common_slave_mode_execute
   # to prevent loops
   # Also remove --hostnames and --hosts along with arg values
   local argsSize=${#argv[@]};
-  for (( i = 0; i < $argsSize; i++ ))
+  for (( i = 0; i < argsSize; i++ ))
   do
     if [[ "${argv[$i]}" =~ ^--slaves$ ]]; then
       unset argv[$i]
@@ -681,6 +703,10 @@ function hadoop_common_slave_mode_execute
       unset argv[$i];
     fi
   done
+  if [[ ${QATESTMODE} = true ]]; then
+    echo "${argv[@]}"
+    return
+  fi
   hadoop_connect_to_hosts -- "${argv[@]}"
 }
 
@@ -727,8 +753,12 @@ function hadoop_add_param
   # delimited
   #
   if [[ ! ${!1} =~ $2 ]] ; then
-    # shellcheck disable=SC2086
-    eval $1="'${!1} $3'"
+    #shellcheck disable=SC2140
+    eval "$1"="'${!1} $3'"
+    if [[ ${!1:0:1} = ' ' ]]; then
+      #shellcheck disable=SC2140
+      eval "$1"="'${!1# }'"
+    fi
     hadoop_debug "$1 accepted $3"
   else
     hadoop_debug "$1 declined $3"
@@ -766,7 +796,8 @@ function hadoop_add_classpath
   # for wildcard at end, we can
   # at least check the dir exists
   if [[ $1 =~ ^.*\*$ ]]; then
-    local mp=$(dirname "$1")
+    local mp
+    mp=$(dirname "$1")
     if [[ ! -d "${mp}" ]]; then
       hadoop_debug "Rejected CLASSPATH: $1 (not a dir)"
       return 1
@@ -825,7 +856,7 @@ function hadoop_add_colonpath
       hadoop_debug "Prepend colonpath($1): $2"
     else
       # shellcheck disable=SC2086
-      eval $1+="'$2'"
+      eval $1+=":'$2'"
       hadoop_debug "Append colonpath($1): $2"
     fi
     return 0
@@ -864,11 +895,14 @@ function hadoop_add_javalibpath
 ## @return       1 = failure (doesn't exist or some other reason)
 function hadoop_add_ldlibpath
 {
+  local status
   # specialized function for a common use case
   hadoop_add_colonpath LD_LIBRARY_PATH "$1" "$2"
+  status=$?
 
   # note that we export this
   export LD_LIBRARY_PATH
+  return ${status}
 }
 
 ## @description  Add the common/core Hadoop components to the
@@ -876,21 +910,29 @@ function hadoop_add_ldlibpath
 ## @audience     private
 ## @stability    evolving
 ## @replaceable  yes
+## @returns      1 on failure, may exit
+## @returns      0 on success
 function hadoop_add_common_to_classpath
 {
   #
   # get all of the common jars+config in the path
   #
 
+  if [[ -z "${HADOOP_COMMON_HOME}"
+    || -z "${HADOOP_COMMON_DIR}"
+    || -z "${HADOOP_COMMON_LIB_JARS_DIR}" ]]; then
+    hadoop_debug "COMMON_HOME=${HADOOP_COMMON_HOME}"
+    hadoop_debug "COMMON_DIR=${HADOOP_COMMON_DIR}"
+    hadoop_debug "COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR}"
+    hadoop_error "ERROR: HADOOP_COMMON_HOME or related vars are not configured."
+    exit 1
+  fi
+
   # developers
   if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
     hadoop_add_classpath "${HADOOP_COMMON_HOME}/hadoop-common/target/classes"
   fi
 
-  if [[ -d "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}/webapps" ]]; then
-    hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"
-  fi
-
   hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"'/*'
   hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"'/*'
 }
@@ -909,27 +951,27 @@ function hadoop_add_to_classpath_userpath
   # set env-var HADOOP_USER_CLASSPATH_FIRST
   # we'll also dedupe it, because we're cool like that.
   #
-  local c
-  local array
-  local i
-  local j
-  let c=0
+  declare -a array
+  declare -i c=0
+  declare -i j
+  declare -i i
+  declare idx
 
   if [[ -n "${HADOOP_CLASSPATH}" ]]; then
     # I wonder if Java runs on VMS.
-    for i in $(echo "${HADOOP_CLASSPATH}" | tr : '\n'); do
-      array[$c]=$i
-      let c+=1
+    for idx in $(echo "${HADOOP_CLASSPATH}" | tr : '\n'); do
+      array[${c}]=${idx}
+      ((c=c+1))
     done
-    let j=c-1
+    ((j=c-1))
 
     if [[ -z "${HADOOP_USE_CLIENT_CLASSLOADER}" ]]; then
       if [[ -z "${HADOOP_USER_CLASSPATH_FIRST}" ]]; then
-        for ((i=j; i>=0; i--)); do
+        for ((i=0; i<=j; i++)); do
           hadoop_add_classpath "${array[$i]}" after
         done
       else
-        for ((i=0; i<=j; i++)); do
+        for ((i=j; i>=0; i--)); do
           hadoop_add_classpath "${array[$i]}" before
         done
       fi
@@ -951,18 +993,32 @@ function hadoop_os_tricks
     Darwin)
       if [[ -z "${JAVA_HOME}" ]]; then
         if [[ -x /usr/libexec/java_home ]]; then
-          export JAVA_HOME="$(/usr/libexec/java_home)"
+          JAVA_HOME="$(/usr/libexec/java_home)"
+          export JAVA_HOME
         else
-          export JAVA_HOME=/Library/Java/Home
+          JAVA_HOME=/Library/Java/Home
+          export JAVA_HOME
         fi
       fi
     ;;
     Linux)
-      bindv6only=$(/sbin/sysctl -n net.ipv6.bindv6only 2> /dev/null)
+
+      # Newer versions of glibc use an arena memory allocator that
+      # causes virtual # memory usage to explode. This interacts badly
+      # with the many threads that we use in Hadoop. Tune the variable
+      # down to prevent vmem explosion.
+      export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}
+      # we put this in QA test mode off so that non-Linux can test
+      if [[ "${QATESTMODE}" = true ]]; then
+        return
+      fi
 
       # NOTE! HADOOP_ALLOW_IPV6 is a developer hook.  We leave it
       # undocumented in hadoop-env.sh because we don't want users to
       # shoot themselves in the foot while devs make IPv6 work.
+
+      bindv6only=$(/sbin/sysctl -n net.ipv6.bindv6only 2> /dev/null)
+
       if [[ -n "${bindv6only}" ]] &&
          [[ "${bindv6only}" -eq "1" ]] &&
          [[ "${HADOOP_ALLOW_IPV6}" != "yes" ]]; then
@@ -971,11 +1027,6 @@ function hadoop_os_tricks
         hadoop_error "ERROR: For more info: http://wiki.apache.org/hadoop/HadoopIPv6"
         exit 1
       fi
-      # Newer versions of glibc use an arena memory allocator that
-      # causes virtual # memory usage to explode. This interacts badly
-      # with the many threads that we use in Hadoop. Tune the variable
-      # down to prevent vmem explosion.
-      export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}
     ;;
     CYGWIN*)
       # Flag that we're running on Cygwin to trigger path translation later.
@@ -1019,7 +1070,7 @@ function hadoop_finalize_libpaths
   if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
     hadoop_translate_cygwin_path JAVA_LIBRARY_PATH
     hadoop_add_param HADOOP_OPTS java.library.path \
-    "-Djava.library.path=${JAVA_LIBRARY_PATH}"
+      "-Djava.library.path=${JAVA_LIBRARY_PATH}"
     export LD_LIBRARY_PATH
   fi
 }
@@ -1168,6 +1219,7 @@ function hadoop_exit_with_usage
   if [[ -z $exitcode ]]; then
     exitcode=1
   fi
+  # shellcheck disable=SC2034
   if declare -F hadoop_usage >/dev/null ; then
     hadoop_usage
   elif [[ -x /usr/bin/cowsay ]]; then
@@ -1464,6 +1516,7 @@ function hadoop_start_secure_daemon
   hadoop_rotate_log "${daemonoutfile}"
   hadoop_rotate_log "${daemonerrfile}"
 
+  # shellcheck disable=SC2153
   jsvc="${JSVC_HOME}/jsvc"
   if [[ ! -f "${jsvc}" ]]; then
     hadoop_error "JSVC_HOME is not set or set incorrectly. jsvc is required to run secure"
@@ -1490,6 +1543,7 @@ function hadoop_start_secure_daemon
     hadoop_error "ERROR:  Cannot write ${daemonname} pid ${privpidfile}."
   fi
 
+  # shellcheck disable=SC2086
   exec "${jsvc}" \
     "-Dproc_${daemonname}" \
     -outfile "${daemonoutfile}" \

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
new file mode 100755
index 0000000..f718345
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
@@ -0,0 +1,56 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+setup() {
+
+  TMP=../../../target/test-dir/bats.$$.${RANDOM}
+  mkdir -p ${TMP}
+  TMP=$(cd -P -- "${TMP}" >/dev/null && pwd -P)
+  export TMP
+  TESTBINDIR=$(cd -P -- "$(pwd)" >/dev/null && pwd -P)
+  HADOOP_LIBEXEC_DIR=${TESTBINDIR}/../../main/bin
+  HADOOP_LIBEXEC_DIR=$(cd -P -- "${HADOOP_LIBEXEC_DIR}" >/dev/null && pwd -P)
+
+  # shellcheck disable=SC2034
+  HADOOP_SHELL_SCRIPT_DEBUG=true
+  unset HADOOP_CONF_DIR
+  unset HADOOP_HOME
+  unset HADOOP_PREFIX
+
+  echo "bindir: ${TESTBINDIR}" 2>&1
+
+  mkdir -p "${TMP}"
+
+  # shellcheck disable=SC2034
+  QATESTMODE=true
+
+  . ../../main/bin/hadoop-functions.sh
+  pushd "${TMP}" >/dev/null
+}
+
+teardown() {
+  popd >/dev/null
+  rm -rf "${TMP}"
+}
+
+
+strstr() {
+  if [ "${1#*$2}" != "${1}" ]; then
+    echo true
+  else
+    echo false
+  fi
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_classpath.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_classpath.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_classpath.bats
new file mode 100644
index 0000000..8bc50d0
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_classpath.bats
@@ -0,0 +1,100 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_add_classpath (simple not exist)" {
+  run hadoop_add_classpath testvar
+  [ "${status}" -eq 1 ]
+}
+
+@test "hadoop_add_classpath (simple wildcard not exist)" {
+  run hadoop_add_classpath testvar/*
+  [ "${status}" -eq 1 ]
+}
+
+@test "hadoop_add_classpath (simple exist)" {
+  run hadoop_add_classpath "${TMP}"
+  [ "${status}" -eq 0 ]
+}
+
+@test "hadoop_add_classpath (simple wildcard exist)" {
+  run hadoop_add_classpath "${TMP}/*"
+  [ "${status}" -eq 0 ]
+}
+
+@test "hadoop_add_classpath (simple dupecheck)" {
+  hadoop_add_classpath "${TMP}/*"
+  hadoop_add_classpath "${TMP}/*"
+  echo ">${CLASSPATH}<"
+  [ "${CLASSPATH}" = "${TMP}/*" ]
+}
+
+@test "hadoop_add_classpath (default order)" {
+  hadoop_add_classpath "${TMP}/*"
+  hadoop_add_classpath "/tmp"
+  echo ">${CLASSPATH}<"
+  [ "${CLASSPATH}" = "${TMP}/*:/tmp" ]
+}
+
+@test "hadoop_add_classpath (after order)" {
+  hadoop_add_classpath "${TMP}/*"
+  hadoop_add_classpath "/tmp" after
+  echo ">${CLASSPATH}<"
+  [ "${CLASSPATH}" = "${TMP}/*:/tmp" ]
+}
+
+@test "hadoop_add_classpath (before order)" {
+  hadoop_add_classpath "${TMP}/*"
+  hadoop_add_classpath "/tmp" before
+  echo ">${CLASSPATH}<"
+  [ "${CLASSPATH}" = "/tmp:${TMP}/*" ]
+}
+
+@test "hadoop_add_classpath (simple dupecheck 2)" {
+  hadoop_add_classpath "${TMP}/*"
+  hadoop_add_classpath "/tmp"
+  hadoop_add_classpath "${TMP}/*"
+  echo ">${CLASSPATH}<"
+  [ "${CLASSPATH}" = "${TMP}/*:/tmp" ]
+}
+
+@test "hadoop_add_classpath (dupecheck 3)" {
+  hadoop_add_classpath "${TMP}/*"
+  hadoop_add_classpath "/tmp" before
+  hadoop_add_classpath "${TMP}/*"
+  hadoop_add_classpath "/tmp" after
+  echo ">${CLASSPATH}<"
+  [ "${CLASSPATH}" = "/tmp:${TMP}/*" ]
+}
+
+@test "hadoop_add_classpath (complex ordering)" {
+  local j
+  local style="after"
+
+  # 1 -> 2:1 -> 2:1:3 -> 4:2:1:3 -> 4:2:1:3:5
+
+  for j in {1..5}; do
+    mkdir ${TMP}/${j}
+    hadoop_add_classpath "${TMP}/${j}" "${style}"
+    if [ "${style}" = "after" ]; then
+      style=before
+    else
+      style=after
+    fi
+  done
+  echo ">${CLASSPATH}<"
+  [ "${CLASSPATH}" = "${TMP}/4:${TMP}/2:${TMP}/1:${TMP}/3:${TMP}/5" ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_colonpath.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_colonpath.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_colonpath.bats
new file mode 100644
index 0000000..e6c59ad
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_colonpath.bats
@@ -0,0 +1,96 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_add_colonpath (simple not exist)" {
+  run hadoop_add_colonpath testvar
+  [ "${status}" -eq 1 ]
+}
+
+@test "hadoop_add_colonpath (simple exist)" {
+  run hadoop_add_colonpath testvar "${TMP}"
+  [ "${status}" -eq 0 ]
+}
+
+@test "hadoop_add_colonpath (simple dupecheck)" {
+  set +e
+  hadoop_add_colonpath testvar "${TMP}"
+  hadoop_add_colonpath testvar "${TMP}"
+  set -e
+  echo ">${testvar}<"
+  [ "${testvar}" = "${TMP}" ]
+}
+
+@test "hadoop_add_colonpath (default order)" {
+  hadoop_add_colonpath testvar "${TMP}"
+  hadoop_add_colonpath testvar "/tmp"
+  echo ">${testvar}<"
+  [ "${testvar}" = "${TMP}:/tmp" ]
+}
+
+@test "hadoop_add_colonpath (after order)" {
+  hadoop_add_colonpath testvar "${TMP}"
+  hadoop_add_colonpath testvar "/tmp" after
+  echo ">${testvar}<"
+  [ "${testvar}" = "${TMP}:/tmp" ]
+}
+
+@test "hadoop_add_colonpath (before order)" {
+  hadoop_add_colonpath testvar "${TMP}"
+  hadoop_add_colonpath testvar "/tmp" before
+  echo ">${testvar}<"
+  [ "${testvar}" = "/tmp:${TMP}" ]
+}
+
+@test "hadoop_add_colonpath (simple dupecheck 2)" {
+  set +e
+  hadoop_add_colonpath testvar "${TMP}"
+  hadoop_add_colonpath testvar "/tmp"
+  hadoop_add_colonpath testvar "${TMP}"
+  set -e
+  echo ">${testvar}<"
+  [ "${testvar}" = "${TMP}:/tmp" ]
+}
+
+@test "hadoop_add_colonpath (dupecheck 3)" {
+  set +e
+  hadoop_add_colonpath testvar "${TMP}"
+  hadoop_add_colonpath testvar "/tmp" before
+  hadoop_add_colonpath testvar "${TMP}"
+  hadoop_add_colonpath testvar "/tmp" after
+  set -e
+  echo ">${testvar}<"
+  [ "${testvar}" = "/tmp:${TMP}" ]
+}
+
+@test "hadoop_add_colonpath (complex ordering)" {
+  local j
+  local style="after"
+
+  # 1 -> 2:1 -> 2:1:3 -> 4:2:1:3 -> 4:2:1:3:5
+
+  for j in {1..5}; do
+    mkdir ${TMP}/${j}
+    hadoop_add_colonpath testvar "${TMP}/${j}" "${style}"
+    if [ "${style}" = "after" ]; then
+      style=before
+    else
+      style=after
+    fi
+  done
+  echo ">${testvar}<"
+  [ "${testvar}" = "${TMP}/4:${TMP}/2:${TMP}/1:${TMP}/3:${TMP}/5" ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_common_to_classpath.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_common_to_classpath.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_common_to_classpath.bats
new file mode 100644
index 0000000..14e75a6
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_common_to_classpath.bats
@@ -0,0 +1,71 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+freetheclasses () {
+  local j
+
+  for j in HADOOP_CLASSPATH  \
+        HADOOP_ENABLE_BUILD_PATHS \
+        CLASSPATH HADOOP_COMMON_DIR \
+        HADOOP_COMMON_HOME \
+        HADOOP_COMMON_LIB_JARS_DIR \
+        HADOOP_ENABLE_BUILD_PATHS ; do
+      unset ${!j}
+  done
+}
+
+createdirs () {
+  local j
+
+  for j in hadoop-common/target/classes \
+           commondir/webapps commonlibjars ; do
+    mkdir -p "${TMP}/${j}"
+    touch "${TMP}/${j}/fake.jar"
+  done
+  HADOOP_COMMON_HOME=${TMP}
+  HADOOP_COMMON_DIR=commondir
+  HADOOP_COMMON_LIB_JARS_DIR=commonlibjars
+}
+
+@test "hadoop_add_common_to_classpath (negative)" {
+   freetheclasses
+   createdirs
+   unset HADOOP_COMMON_HOME
+   run hadoop_add_common_to_classpath
+   [ "${status}" -eq 1 ]
+}
+
+@test "hadoop_add_common_to_classpath (positive)" {
+   freetheclasses
+   createdirs
+   set +e
+   hadoop_add_common_to_classpath
+   set -e
+   echo ">${CLASSPATH}<"
+   [ "${CLASSPATH}" = "${TMP}/commonlibjars/*:${TMP}/commondir/*" ]
+}
+
+@test "hadoop_add_common_to_classpath (build paths)" {
+   freetheclasses
+   createdirs
+   HADOOP_ENABLE_BUILD_PATHS=true
+   set +e
+   hadoop_add_common_to_classpath
+   set -e
+   echo ">${CLASSPATH}<"
+   [ "${CLASSPATH}" = "${TMP}/hadoop-common/target/classes:${TMP}/commonlibjars/*:${TMP}/commondir/*" ]
+ }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_javalibpath.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_javalibpath.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_javalibpath.bats
new file mode 100644
index 0000000..b17b546
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_javalibpath.bats
@@ -0,0 +1,98 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_add_javalibpath (simple not exist)" {
+  run hadoop_add_javalibpath "${TMP}/foo"
+  [ "${status}" -eq 1 ]
+}
+
+
+@test "hadoop_add_javalibpath (simple exist)" {
+  run hadoop_add_javalibpath "${TMP}"
+  [ "${status}" -eq 0 ]
+}
+
+
+@test "hadoop_add_javalibpath (simple dupecheck)" {
+  set +e
+  hadoop_add_javalibpath "${TMP}"
+  hadoop_add_javalibpath "${TMP}"
+  set -e
+  echo ">${JAVA_LIBRARY_PATH}<"
+  [ "${JAVA_LIBRARY_PATH}" = "${TMP}" ]
+}
+
+@test "hadoop_add_javalibpath (default order)" {
+  hadoop_add_javalibpath "${TMP}"
+  hadoop_add_javalibpath "/tmp"
+  echo ">${JAVA_LIBRARY_PATH}<"
+  [ "${JAVA_LIBRARY_PATH}" = "${TMP}:/tmp" ]
+}
+
+@test "hadoop_add_javalibpath (after order)" {
+  hadoop_add_javalibpath "${TMP}"
+  hadoop_add_javalibpath "/tmp" after
+  echo ">${JAVA_LIBRARY_PATH}<"
+  [ "${JAVA_LIBRARY_PATH}" = "${TMP}:/tmp" ]
+}
+
+@test "hadoop_add_javalibpath (before order)" {
+  hadoop_add_javalibpath "${TMP}"
+  hadoop_add_javalibpath "/tmp" before
+  echo ">${JAVA_LIBRARY_PATH}<"
+  [ "${JAVA_LIBRARY_PATH}" = "/tmp:${TMP}" ]
+}
+
+@test "hadoop_add_javalibpath (simple dupecheck 2)" {
+  set +e
+  hadoop_add_javalibpath "${TMP}"
+  hadoop_add_javalibpath "/tmp"
+  hadoop_add_javalibpath "${TMP}"
+  set -e
+  echo ">${JAVA_LIBRARY_PATH}<"
+  [ "${JAVA_LIBRARY_PATH}" = "${TMP}:/tmp" ]
+}
+
+@test "hadoop_add_javalibpath (dupecheck 3)" {
+  set +e
+  hadoop_add_javalibpath "${TMP}"
+  hadoop_add_javalibpath "/tmp" before
+  hadoop_add_javalibpath "${TMP}"
+  hadoop_add_javalibpath "/tmp" after
+  set -e
+  echo ">${JAVA_LIBRARY_PATH}<"
+  [ "${JAVA_LIBRARY_PATH}" = "/tmp:${TMP}" ]
+}
+
+@test "hadoop_add_javalibpath (complex ordering)" {
+  local j
+  local style="after"
+
+  # 1 -> 2:1 -> 2:1:3 -> 4:2:1:3 -> 4:2:1:3:5
+
+  for j in {1..5}; do
+    mkdir ${TMP}/${j}
+    hadoop_add_javalibpath "${TMP}/${j}" "${style}"
+    if [ "${style}" = "after" ]; then
+      style=before
+    else
+      style=after
+    fi
+  done
+  echo ">${JAVA_LIBRARY_PATH}<"
+  [ "${JAVA_LIBRARY_PATH}" = "${TMP}/4:${TMP}/2:${TMP}/1:${TMP}/3:${TMP}/5" ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_ldlibpath.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_ldlibpath.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_ldlibpath.bats
new file mode 100644
index 0000000..4f909e2
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_ldlibpath.bats
@@ -0,0 +1,97 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_add_ldlibpath (simple not exist)" {
+  run hadoop_add_ldlibpath ${TMP}/foo
+  [ "${status}" -eq 1 ]
+}
+
+
+@test "hadoop_add_ldlibpath (simple exist)" {
+  run hadoop_add_ldlibpath "${TMP}"
+  [ "${status}" -eq 0 ]
+}
+
+@test "hadoop_add_ldlibpath (simple dupecheck)" {
+  set +e
+  hadoop_add_ldlibpath "${TMP}"
+  hadoop_add_ldlibpath "${TMP}"
+  set -e
+  echo ">${LD_LIBRARY_PATH}<"
+  [ "${LD_LIBRARY_PATH}" = "${TMP}" ]
+}
+
+@test "hadoop_add_ldlibpath (default order)" {
+  hadoop_add_ldlibpath "${TMP}"
+  hadoop_add_ldlibpath "/tmp"
+  echo ">${LD_LIBRARY_PATH}<"
+  [ "${LD_LIBRARY_PATH}" = "${TMP}:/tmp" ]
+}
+
+@test "hadoop_add_ldlibpath (after order)" {
+  hadoop_add_ldlibpath "${TMP}"
+  hadoop_add_ldlibpath "/tmp" after
+  echo ">${LD_LIBRARY_PATH}<"
+  [ "${LD_LIBRARY_PATH}" = "${TMP}:/tmp" ]
+}
+
+@test "hadoop_add_ldlibpath (before order)" {
+  hadoop_add_ldlibpath "${TMP}"
+  hadoop_add_ldlibpath "/tmp" before
+  echo ">${LD_LIBRARY_PATH}<"
+  [ "${LD_LIBRARY_PATH}" = "/tmp:${TMP}" ]
+}
+
+@test "hadoop_add_ldlibpath (simple dupecheck 2)" {
+  set +e
+  hadoop_add_ldlibpath "${TMP}"
+  hadoop_add_ldlibpath "/tmp"
+  hadoop_add_ldlibpath "${TMP}"
+  set -e
+  echo ">${LD_LIBRARY_PATH}<"
+  [ "${LD_LIBRARY_PATH}" = "${TMP}:/tmp" ]
+}
+
+@test "hadoop_add_ldlibpath (dupecheck 3)" {
+  set +e
+  hadoop_add_ldlibpath "${TMP}"
+  hadoop_add_ldlibpath "/tmp" before
+  hadoop_add_ldlibpath "${TMP}"
+  hadoop_add_ldlibpath "/tmp" after
+  set -e
+  echo ">${LD_LIBRARY_PATH}<"
+  [ "${LD_LIBRARY_PATH}" = "/tmp:${TMP}" ]
+}
+
+@test "hadoop_add_ldlibpath (complex ordering)" {
+  local j
+  local style="after"
+
+  # 1 -> 2:1 -> 2:1:3 -> 4:2:1:3 -> 4:2:1:3:5
+
+  for j in {1..5}; do
+    mkdir ${TMP}/${j}
+    hadoop_add_ldlibpath "${TMP}/${j}" "${style}"
+    if [ "${style}" = "after" ]; then
+      style=before
+    else
+      style=after
+    fi
+  done
+  echo ">${LD_LIBRARY_PATH}<"
+  [ "${LD_LIBRARY_PATH}" = "${TMP}/4:${TMP}/2:${TMP}/1:${TMP}/3:${TMP}/5" ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_param.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_param.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_param.bats
new file mode 100644
index 0000000..5d65db0
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_param.bats
@@ -0,0 +1,49 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_add_param (positive 1)" {
+  hadoop_add_param testvar foo foo
+  echo ">${testvar}<"
+  [ "${testvar}" = "foo" ]
+}
+
+@test "hadoop_add_param (negative)" {
+  hadoop_add_param testvar foo foo
+  hadoop_add_param testvar foo foo
+  echo ">${testvar}<"
+  [ "${testvar}" = "foo" ]
+}
+
+@test "hadoop_add_param (positive 2)" {
+  hadoop_add_param testvar foo foo
+  hadoop_add_param testvar foo foo
+  hadoop_add_param testvar bar bar
+  echo ">${testvar}<"
+  [ "${testvar}" = "foo bar" ]
+}
+
+@test "hadoop_add_param (positive 3)" {
+  hadoop_add_param testvar foo foo
+  hadoop_add_param testvar foo foo
+  hadoop_add_param testvar bar bar
+  hadoop_add_param testvar bar bar
+  hadoop_add_param testvar baz baz
+  hadoop_add_param testvar baz baz
+
+  echo ">${testvar}<"
+  [ "${testvar}" = "foo bar baz" ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_to_classpath_userpath.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_to_classpath_userpath.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_to_classpath_userpath.bats
new file mode 100644
index 0000000..4d6667f
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_to_classpath_userpath.bats
@@ -0,0 +1,98 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+freetheclasses () {
+  local j
+
+  for j in HADOOP_CLASSPATH  \
+      HADOOP_USE_CLIENT_CLASSLOADER \
+      HADOOP_USER_CLASSPATH_FIRST \
+      CLASSPATH; do
+      unset ${!j}
+  done
+}
+
+createdirs () {
+  local j
+
+  for j in new old foo bar baz; do
+    mkdir -p "${TMP}/${j}"
+  done
+}
+
+@test "hadoop_add_to_classpath_userpath (nothing)" {
+   freetheclasses
+   hadoop_add_to_classpath_userpath
+   [ -z "${CLASSPATH}" ]
+}
+
+@test "hadoop_add_to_classpath_userpath (none)" {
+   freetheclasses
+   CLASSPATH=test
+   hadoop_add_to_classpath_userpath
+   [ "${CLASSPATH}" = "test" ]
+}
+
+@test "hadoop_add_to_classpath_userpath (only)" {
+   freetheclasses
+   createdirs
+   HADOOP_CLASSPATH="${TMP}/new"
+   hadoop_add_to_classpath_userpath
+   [ "${CLASSPATH}" = "${TMP}/new" ]
+}
+
+@test "hadoop_add_to_classpath_userpath (classloader)" {
+   freetheclasses
+   createdirs
+   HADOOP_CLASSPATH="${TMP}/new"
+   HADOOP_USE_CLIENT_CLASSLOADER="true"
+   hadoop_add_to_classpath_userpath
+   [ -z "${CLASSPATH}" ]
+}
+
+@test "hadoop_add_to_classpath_userpath (1+1 dupe)" {
+   freetheclasses
+   createdirs
+   CLASSPATH=${TMP}/foo
+   HADOOP_CLASSPATH=${TMP}/foo
+   HADOOP_USER_CLASSPATH_FIRST=""
+   hadoop_add_to_classpath_userpath
+   echo ">${CLASSPATH}<"
+   [ ${CLASSPATH} = "${TMP}/foo" ]
+}
+
+@test "hadoop_add_to_classpath_userpath (3+2 after)" {
+   freetheclasses
+   createdirs
+   CLASSPATH=${TMP}/foo:${TMP}/bar:${TMP}/baz
+   HADOOP_CLASSPATH=${TMP}/new:${TMP}/old
+   HADOOP_USER_CLASSPATH_FIRST=""
+   hadoop_add_to_classpath_userpath
+   echo ">${CLASSPATH}<"
+   [ ${CLASSPATH} = "${TMP}/foo:${TMP}/bar:${TMP}/baz:${TMP}/new:${TMP}/old" ]
+}
+
+@test "hadoop_add_to_classpath_userpath (3+2 before)" {
+   freetheclasses
+   createdirs
+   CLASSPATH=${TMP}/foo:${TMP}/bar:${TMP}/baz
+   HADOOP_CLASSPATH=${TMP}/new:${TMP}/old
+   HADOOP_USER_CLASSPATH_FIRST="true"
+   hadoop_add_to_classpath_userpath
+   echo ">${CLASSPATH}<"
+   [ ${CLASSPATH} = "${TMP}/new:${TMP}/old:${TMP}/foo:${TMP}/bar:${TMP}/baz" ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_basic_init.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_basic_init.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_basic_init.bats
new file mode 100644
index 0000000..74e2497
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_basic_init.bats
@@ -0,0 +1,94 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+
+
+basicinitsetup () {
+  local j
+
+  testvars="HADOOP_IDENT_STRING \
+        HADOOP_LOG_DIR \
+        HADOOP_LOGFILE \
+        HADOOP_LOGLEVEL \
+        HADOOP_NICENESS \
+        HADOOP_STOP_TIMEOUT \
+        HADOOP_PID_DIR \
+        HADOOP_ROOT_LOGGER \
+        HADOOP_DAEMON_ROOT_LOGGER \
+        HADOOP_SECURITY_LOGGER \
+        HADOOP_SSH_OPTS \
+        HADOOP_SECURE_LOG_DIR \
+        HADOOP_SECURE_PID_DIR \
+        HADOOP_SSH_PARALLEL"
+
+  dirvars="HADOOP_COMMON_HOME \
+        HADOOP_MAPRED_HOME \
+        HADOOP_HDFS_HOME \
+        HADOOP_YARN_HOME"
+
+  for j in ${testvars}; do
+    unset ${!j}
+  done
+
+  HADOOP_PREFIX=${TMP}
+}
+
+check_var_values () {
+  for j in ${testvars}; do
+    echo "Verifying ${j} has a value"
+    [ -n "${!j}" ]
+  done
+}
+
+@test "hadoop_basic_init (bad dir errors)" {
+  local j
+  local i
+  # we need to do these in the same order for
+  # the unit test, so that the tests are easier
+  # to write/test
+  basicinitsetup
+  for j in ${dirvars}; do
+    echo "testing ${j}"
+    i=${TMP}/${j}
+    mkdir -p "${i}"
+    #shellcheck disable=SC2086
+    eval ${j}=${i}
+    hadoop_basic_init
+    echo "Verifying $j has >${i}< >${!j}<"
+    [ ${!j} = ${i} ]
+  done
+}
+
+
+@test "hadoop_basic_init (no non-dir overrides)" {
+  basicinitsetup
+  hadoop_basic_init
+  check_var_values
+}
+
+@test "hadoop_basic_init (test non-dir overrides)" {
+  local j
+  for j in ${testvars}; do
+    basicinitsetup
+    echo testing ${j}
+    eval ${j}=foo
+    hadoop_basic_init
+    check_var_values
+    echo "Verifying $j has foo >${!j}<"
+    [ ${j} = foo ]
+  done
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_bootstrap.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_bootstrap.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_bootstrap.bats
new file mode 100644
index 0000000..0fd5d21
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_bootstrap.bats
@@ -0,0 +1,51 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_deprecate_envvar (no libexec)" {
+  unset HADOOP_LIBEXEC_DIR
+  run hadoop_bootstrap
+  [ "${status}" -eq 1 ]
+}
+
+@test "hadoop_deprecate_envvar (libexec)" {
+  unset   HADOOP_PREFIX
+  unset   HADOOP_COMMON_DIR
+  unset   HADOOP_COMMON_LIB_JARS_DIR
+  unset   HDFS_DIR
+  unset   HDFS_LIB_JARS_DIR
+  unset   YARN_DIR
+  unset   YARN_LIB_JARS_DIR
+  unset   MAPRED_DIR
+  unset   MAPRED_LIB_JARS_DIR
+  unset   TOOL_PATH
+  unset   HADOOP_OS_TYPE
+
+  hadoop_bootstrap
+
+  # all of these should be set
+  [ -n ${HADOOP_PREFIX} ]
+  [ -n ${HADOOP_COMMON_DIR} ]
+  [ -n ${HADOOP_COMMON_LIB_JARS_DIR} ]
+  [ -n ${HDFS_DIR} ]
+  [ -n ${HDFS_LIB_JARS_DIR} ]
+  [ -n ${YARN_DIR} ]
+  [ -n ${YARN_LIB_JARS_DIR} ]
+  [ -n ${MAPRED_DIR} ]
+  [ -n ${MAPRED_LIB_JARS_DIR} ]
+  [ -n ${TOOL_PATH} ]
+  [ -n ${HADOOP_OS_TYPE} ]
+} 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_confdir.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_confdir.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_confdir.bats
new file mode 100644
index 0000000..3e42da9
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_confdir.bats
@@ -0,0 +1,92 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+create_fake_dirs () {
+  HADOOP_PREFIX=${TMP}
+  for j in conf etc/hadoop; do
+    mkdir -p "${HADOOP_PREFIX}/${j}"
+    echo "unittest=${j}" > "${HADOOP_PREFIX}/${j}/hadoop-env.sh"
+  done
+}
+
+@test "hadoop_find_confdir (default)" {
+  create_fake_dirs
+  hadoop_find_confdir
+  [ -n "${HADOOP_CONF_DIR}" ]
+}
+
+@test "hadoop_find_confdir (bw compat: conf)" {
+  create_fake_dirs
+  hadoop_find_confdir
+  echo ">${HADOOP_CONF_DIR}< >${HADOOP_PREFIX}/conf<"
+  [ "${HADOOP_CONF_DIR}" = ${HADOOP_PREFIX}/conf ]
+}
+
+@test "hadoop_find_confdir (etc/hadoop)" {
+  create_fake_dirs
+  rm -rf "${HADOOP_PREFIX}/conf"
+  hadoop_find_confdir
+  [ "${HADOOP_CONF_DIR}" = ${HADOOP_PREFIX}/etc/hadoop ]
+}
+
+@test "hadoop_verify_confdir (negative) " {
+  create_fake_dirs
+  HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
+  run hadoop_verify_confdir
+  [ -n "${output}" ]
+}
+
+@test "hadoop_verify_confdir (positive) " {
+  create_fake_dirs
+  HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
+  touch "${HADOOP_CONF_DIR}/log4j.properties"
+  run hadoop_verify_confdir
+  [ -z "${output}" ]
+}
+
+@test "hadoop_exec_hadoopenv (positive) " {
+  create_fake_dirs
+  HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
+  hadoop_exec_hadoopenv
+  [ -n "${HADOOP_ENV_PROCESSED}" ]
+  [ "${unittest}" = conf ]
+}
+
+@test "hadoop_exec_hadoopenv (negative) " {
+  create_fake_dirs
+  HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
+  HADOOP_ENV_PROCESSED=true
+  hadoop_exec_hadoopenv
+  [ -z "${unittest}" ]
+}
+
+@test "hadoop_exec_userfuncs" {
+  create_fake_dirs
+  HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
+  echo "unittest=userfunc" > "${HADOOP_CONF_DIR}/hadoop-user-functions.sh"
+  hadoop_exec_userfuncs
+  [ "${unittest}" = "userfunc" ]
+}
+
+@test "hadoop_exec_hadooprc" {
+  HOME=${TMP}
+  echo "unittest=hadooprc" > "${TMP}/.hadooprc"
+  hadoop_exec_hadooprc
+  [ ${unittest} = "hadooprc" ]
+}
+
+

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_deprecate_envvar.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_deprecate_envvar.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_deprecate_envvar.bats
new file mode 100644
index 0000000..ae02c1f
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_deprecate_envvar.bats
@@ -0,0 +1,32 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_deprecate_envvar (replace)" {
+  OLD=value1
+  NEW=value2
+  hadoop_deprecate_envvar OLD NEW
+  [ "${NEW}" = "${OLD}" ]
+}
+
+
+@test "hadoop_deprecate_envvar (no replace)" {
+  OLD=
+  NEW=value2
+  hadoop_deprecate_envvar OLD NEW
+  [ "${NEW}" = value2 ]
+}
+

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize.bats
new file mode 100644
index 0000000..668c115
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize.bats
@@ -0,0 +1,206 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_finalize (shellprofiles)" {
+  HADOOP_IS_CYGWIN=false
+
+  hadoop_shellprofiles_finalize () { testvar=shell; }
+  hadoop_finalize_classpath () { true; }
+  hadoop_finalize_libpaths () { true; }
+  hadoop_finalize_hadoop_heap () { true; }
+  hadoop_finalize_hadoop_opts () { true; }
+  hadoop_translate_cygwin_path () { true; }
+
+  hadoop_finalize
+
+  [ "${testvar}" = "shell" ];
+}
+
+@test "hadoop_finalize (classpath)" {
+  HADOOP_IS_CYGWIN=false
+
+  hadoop_shellprofiles_finalize () { true; }
+  hadoop_finalize_classpath () {  testvar=class; }
+  hadoop_finalize_libpaths () { true; }
+  hadoop_finalize_hadoop_heap () { true; }
+  hadoop_finalize_hadoop_opts () { true; }
+  hadoop_translate_cygwin_path () { true; }
+
+  hadoop_finalize
+
+  [ "${testvar}" = "class" ];
+}
+
+@test "hadoop_finalize (libpaths)" {
+  HADOOP_IS_CYGWIN=false
+
+  hadoop_shellprofiles_finalize () { true; }
+  hadoop_finalize_classpath () {  true; }
+  hadoop_finalize_libpaths () { testvar=libpaths; }
+  hadoop_finalize_hadoop_heap () { true; }
+  hadoop_finalize_hadoop_opts () { true; }
+  hadoop_translate_cygwin_path () { true; }
+
+  hadoop_finalize
+
+  [ "${testvar}" = "libpaths" ];
+}
+
+
+@test "hadoop_finalize (heap)" {
+  HADOOP_IS_CYGWIN=false
+
+  hadoop_shellprofiles_finalize () { true; }
+  hadoop_finalize_classpath () {  true; }
+  hadoop_finalize_libpaths () { true; }
+  hadoop_finalize_hadoop_heap () { testvar=heap; }
+  hadoop_finalize_hadoop_opts () { true; }
+  hadoop_translate_cygwin_path () { true; }
+
+  hadoop_finalize
+
+  [ "${testvar}" = "heap" ];
+}
+
+@test "hadoop_finalize (opts)" {
+  HADOOP_IS_CYGWIN=false
+
+  hadoop_shellprofiles_finalize () { true; }
+  hadoop_finalize_classpath () {  true; }
+  hadoop_finalize_libpaths () { true; }
+  hadoop_finalize_hadoop_heap () { true; }
+  hadoop_finalize_hadoop_opts () { testvar=opts; }
+  hadoop_translate_cygwin_path () { true; }
+
+  hadoop_finalize
+
+  [ "${testvar}" = "opts" ];
+}
+
+@test "hadoop_finalize (cygwin prefix)" {
+  HADOOP_IS_CYGWIN=false
+
+  hadoop_shellprofiles_finalize () { true; }
+  hadoop_finalize_classpath () {  true; }
+  hadoop_finalize_libpaths () { true; }
+  hadoop_finalize_hadoop_heap () { true; }
+  hadoop_finalize_hadoop_opts () { true; }
+  hadoop_translate_cygwin_path () {
+    if [ $1 = HADOOP_PREFIX ]; then
+      testvar=prefix;
+    fi
+  }
+
+  hadoop_finalize
+
+  [ "${testvar}" = "prefix" ];
+}
+
+@test "hadoop_finalize (cygwin conf dir)" {
+  HADOOP_IS_CYGWIN=false
+
+  hadoop_shellprofiles_finalize () { true; }
+  hadoop_finalize_classpath () {  true; }
+  hadoop_finalize_libpaths () { true; }
+  hadoop_finalize_hadoop_heap () { true; }
+  hadoop_finalize_hadoop_opts () { true; }
+  hadoop_translate_cygwin_path () {
+    if [ $1 = HADOOP_CONF_DIR ]; then
+      testvar=confdir;
+    fi
+  }
+
+  hadoop_finalize
+
+  [ "${testvar}" = "confdir" ];
+}
+
+@test "hadoop_finalize (cygwin common)" {
+  HADOOP_IS_CYGWIN=false
+
+  hadoop_shellprofiles_finalize () { true; }
+  hadoop_finalize_classpath () {  true; }
+  hadoop_finalize_libpaths () { true; }
+  hadoop_finalize_hadoop_heap () { true; }
+  hadoop_finalize_hadoop_opts () { true; }
+  hadoop_translate_cygwin_path () {
+    if [ $1 = HADOOP_COMMON_HOME ]; then
+      testvar=common;
+    fi
+  }
+
+  hadoop_finalize
+
+  [ "${testvar}" = "common" ];
+}
+
+@test "hadoop_finalize (cygwin hdfs)" {
+  HADOOP_IS_CYGWIN=false
+
+  hadoop_shellprofiles_finalize () { true; }
+  hadoop_finalize_classpath () {  true; }
+  hadoop_finalize_libpaths () { true; }
+  hadoop_finalize_hadoop_heap () { true; }
+  hadoop_finalize_hadoop_opts () { true; }
+  hadoop_translate_cygwin_path () {
+    if [ $1 = HADOOP_HDFS_HOME ]; then
+      testvar=hdfs;
+    fi
+  }
+
+  hadoop_finalize
+
+  [ "${testvar}" = "hdfs" ];
+}
+
+@test "hadoop_finalize (cygwin yarn)" {
+  HADOOP_IS_CYGWIN=false
+
+  hadoop_shellprofiles_finalize () { true; }
+  hadoop_finalize_classpath () {  true; }
+  hadoop_finalize_libpaths () { true; }
+  hadoop_finalize_hadoop_heap () { true; }
+  hadoop_finalize_hadoop_opts () { true; }
+  hadoop_translate_cygwin_path () {
+    if [ $1 = HADOOP_YARN_HOME ]; then
+      testvar=yarn;
+    fi
+  }
+
+  hadoop_finalize
+
+  [ "${testvar}" = "yarn" ];
+}
+
+@test "hadoop_finalize (cygwin mapred)" {
+  HADOOP_IS_CYGWIN=false
+
+  hadoop_shellprofiles_finalize () { true; }
+  hadoop_finalize_classpath () {  true; }
+  hadoop_finalize_libpaths () { true; }
+  hadoop_finalize_hadoop_heap () { true; }
+  hadoop_finalize_hadoop_opts () { true; }
+  hadoop_translate_cygwin_path () {
+    if [ $1 = HADOOP_MAPRED_HOME ]; then
+      testvar=mapred;
+    fi
+  }
+
+  hadoop_finalize
+
+  [ "${testvar}" = "mapred" ];
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_catalina_opts.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_catalina_opts.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_catalina_opts.bats
new file mode 100644
index 0000000..d91223e
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_catalina_opts.bats
@@ -0,0 +1,56 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_finalize_catalina_opts (raw)" {
+  local j
+
+  HADOOP_IS_CYGWIN=false
+  HADOOP_CATALINA_PREFIX=test
+  CATALINA_OPTS=""
+  hadoop_finalize_catalina_opts
+  for j in test.home.dir \
+        test.config.dir \
+        test.log.dir \
+        test.admin.port \
+        test.http.port \
+        test.max.threads \
+        test.ssl.keystore.file; do
+    [ "${CATALINA_OPTS#*${j}}" != "${CATALINA_OPTS}" ]
+  done
+}
+
+@test "hadoop_finalize_catalina_opts (cygwin)" {
+  local j
+
+  skip "catalina commands not supported under cygwin yet"
+
+  HADOOP_IS_CYGWIN=true
+  HADOOP_CATALINA_PREFIX=test
+  CATALINA_OPTS=""
+
+  catalina_translate_cygwin_path () {
+    eval ${1}="foobarbaz"
+  }
+
+  hadoop_finalize_catalina_opts
+  for j in test.home.dir \
+        test.config.dir \
+        test.log.dir \
+        test.ssl.keystore.file; do
+    [ "${CATALINA_OPTS#*${j}=foobarbaz}" != "${CATALINA_OPTS}" ]
+  done
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_classpath.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_classpath.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_classpath.bats
new file mode 100644
index 0000000..ac0d4c1
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_classpath.bats
@@ -0,0 +1,64 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_finalize_classpath (only conf dir)" {
+  CLASSPATH=""
+  HADOOP_CONF_DIR="${TMP}"
+
+  hadoop_translate_cygwin_path () { true; }
+  hadoop_add_to_classpath_userpath () { true; }
+
+  hadoop_finalize_classpath
+
+  [ "${CLASSPATH}" = "${TMP}" ]
+
+}
+
+@test "hadoop_finalize_classpath (before conf dir)" {
+  CLASSPATH="1"
+  HADOOP_CONF_DIR="${TMP}"
+
+  hadoop_translate_cygwin_path () { true; }
+  hadoop_add_to_classpath_userpath () { true; }
+
+  hadoop_finalize_classpath
+
+  [ "${CLASSPATH}" = "${TMP}:1" ]
+}
+
+@test "hadoop_finalize_classpath (adds user)" {
+  CLASSPATH=""
+  HADOOP_CONF_DIR="${TMP}"
+
+  hadoop_translate_cygwin_path () { true; }
+  hadoop_add_to_classpath_userpath () { testvar=true; }
+
+  hadoop_finalize_classpath
+
+  [ "${testvar}" = "true" ]
+}
+
+@test "hadoop_finalize_classpath (calls cygwin)" {
+  CLASSPATH=""
+  HADOOP_CONF_DIR="${TMP}"
+  HADOOP_IS_CYGWIN=true
+
+  hadoop_translate_cygwin_path () { [ $1 = CLASSPATH ]; }
+  hadoop_add_to_classpath_userpath () { true; }
+
+  hadoop_finalize_classpath
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_hadoop_heap.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_hadoop_heap.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_hadoop_heap.bats
new file mode 100644
index 0000000..ef49d5b
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_hadoop_heap.bats
@@ -0,0 +1,87 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+resetops () {
+  unset HADOOP_HEAPSIZE_MAX
+  unset HADOOP_HEAPSIZE
+  unset HADOOP_HEAPSIZE_MIN
+  unset HADOOP_OPTS
+}
+
+@test "hadoop_finalize_hadoop_heap (negative)" {
+  resetops
+  hadoop_finalize_hadoop_heap
+  [ -z "${HADOOP_OPTS}" ]
+}
+
+@test "hadoop_finalize_hadoop_heap (no unit max)" {
+  resetops
+  HADOOP_HEAPSIZE_MAX=1000
+  hadoop_finalize_hadoop_heap
+  echo ">${HADOOP_OPTS}<"
+  [ "${HADOOP_OPTS}" = "-Xmx1000m" ]
+}
+
+@test "hadoop_finalize_hadoop_heap (no unit old)" {
+  resetops
+  HADOOP_HEAPSIZE=1000
+  hadoop_finalize_hadoop_heap
+  echo ">${HADOOP_OPTS}<"
+  [ "${HADOOP_OPTS}" = "-Xmx1000m" ]
+}
+
+@test "hadoop_finalize_hadoop_heap (unit max)" {
+  resetops
+  HADOOP_HEAPSIZE_MAX=10g
+  hadoop_finalize_hadoop_heap
+  echo ">${HADOOP_OPTS}<"
+  [ "${HADOOP_OPTS}" = "-Xmx10g" ]
+}
+
+@test "hadoop_finalize_hadoop_heap (unit old)" {
+  resetops
+  HADOOP_HEAPSIZE=10g
+  hadoop_finalize_hadoop_heap
+  echo ">${HADOOP_OPTS}<"
+  [ "${HADOOP_OPTS}" = "-Xmx10g" ]
+}
+
+@test "hadoop_finalize_hadoop_heap (no unit min)" {
+  resetops
+  HADOOP_HEAPSIZE_MIN=1000
+  hadoop_finalize_hadoop_heap
+  echo ">${HADOOP_OPTS}<"
+  [ "${HADOOP_OPTS}" = "-Xms1000m" ]
+}
+
+@test "hadoop_finalize_hadoop_heap (unit min)" {
+  resetops
+  HADOOP_HEAPSIZE_MIN=10g
+  hadoop_finalize_hadoop_heap
+  echo ">${HADOOP_OPTS}<"
+  [ "${HADOOP_OPTS}" = "-Xms10g" ]
+}
+
+@test "hadoop_finalize_hadoop_heap (dedupe)" {
+  resetops
+  HADOOP_HEAPSIZE_MAX=1000
+  HADOOP_OPTS="-Xmx5g"
+  hadoop_finalize_hadoop_heap
+  hadoop_finalize_hadoop_heap
+  echo ">${HADOOP_OPTS}<"
+  [ "${HADOOP_OPTS}" = "-Xmx5g" ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_hadoop_opts.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_hadoop_opts.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_hadoop_opts.bats
new file mode 100644
index 0000000..3acb1a5
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_hadoop_opts.bats
@@ -0,0 +1,52 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_finalize_hadoop_opts (raw)" {
+  local j
+
+  HADOOP_IS_CYGWIN=false
+  HADOOP_OPTS=""
+  hadoop_finalize_hadoop_opts
+  for j in hadoop.log.dir \
+        hadoop.log.file \
+        hadoop.home.dir \
+        hadoop.root.logger \
+        hadoop.policy.file \
+        hadoop.security.logger \
+        hadoop.id.str; do
+
+    [ "${HADOOP_OPTS#*${j}}" != "${HADOOP_OPTS}" ]
+  done
+}
+
+@test "hadoop_finalize_hadoop_opts (cygwin)" {
+  local j
+
+  HADOOP_IS_CYGWIN=true
+  HADOOP_OPTS=""
+
+  hadoop_translate_cygwin_path () {
+    eval ${1}="foobarbaz"
+  }
+
+  hadoop_finalize_hadoop_opts
+  for j in hadoop.log.dir \
+        hadoop.home.dir; do
+    echo "${j} from >${HADOOP_OPTS}<"
+    [ "${HADOOP_OPTS#*${j}=foobarbaz}" != "${HADOOP_OPTS}" ]
+  done
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_libpaths.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_libpaths.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_libpaths.bats
new file mode 100644
index 0000000..48ba773
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_libpaths.bats
@@ -0,0 +1,30 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_finalize_libpaths (negative)" {
+  unset JAVA_LIBRARY_PATH
+  unset HADOOP_OPTS
+  hadoop_finalize_libpaths
+  [ -z "${HADOOP_OPTS}" ]
+}
+
+@test "hadoop_finalize_libpaths (positive)" {
+  JAVA_LIBRARY_PATH=test
+  unset HADOOP_OPTS
+  hadoop_finalize_libpaths
+  [ "${HADOOP_OPTS}" = "-Djava.library.path=test" ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_java_setup.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_java_setup.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_java_setup.bats
new file mode 100644
index 0000000..5a6ee10
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_java_setup.bats
@@ -0,0 +1,47 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_java_setup (negative not set)" {
+  unset JAVA_HOME
+  run hadoop_java_setup
+  [ "${status}" -eq 1 ]
+}
+
+@test "hadoop_java_setup (negative not a dir)" {
+  touch ${TMP}/foo
+  JAVA_HOME="${TMP}/foo"
+  run hadoop_java_setup
+  [ "${status}" -eq 1 ]
+}
+
+@test "hadoop_java_setup (negative not exec)" {
+  mkdir -p "${TMP}/bin"
+  touch "${TMP}/bin/java"
+  JAVA_HOME="${TMP}"
+  chmod a-x "${TMP}/bin/java"
+  run hadoop_java_setup
+  [ "${status}" -eq 1 ]
+}
+
+@test "hadoop_java_setup (positive)" {
+  mkdir -p "${TMP}/bin"
+  touch "${TMP}/bin/java"
+  JAVA_HOME="${TMP}"
+  chmod a+x "${TMP}/bin/java"
+  run hadoop_java_setup
+  [ "${status}" -eq 0 ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_os_tricks.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_os_tricks.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_os_tricks.bats
new file mode 100644
index 0000000..ae04f72
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_os_tricks.bats
@@ -0,0 +1,34 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_os_tricks (cygwin sets cygwin)" {
+  HADOOP_OS_TYPE=CYGWIN-IS-GNU-USER-LAND
+  hadoop_os_tricks
+  [ "${HADOOP_IS_CYGWIN}" = "true" ]
+}
+
+@test "hadoop_os_tricks (linux sets arena max)" {
+  HADOOP_OS_TYPE=Linux
+  hadoop_os_tricks
+  [ -n "${MALLOC_ARENA_MAX}" ]
+}
+
+@test "hadoop_os_tricks (osx sets java_home)" {
+  HADOOP_OS_TYPE=Darwin
+  hadoop_os_tricks
+  [ -n "${JAVA_HOME}" ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_rotate_log.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_rotate_log.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_rotate_log.bats
new file mode 100644
index 0000000..f73fea6
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_rotate_log.bats
@@ -0,0 +1,52 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_rotate_log (defaults)" {
+  touch "${TMP}/log"
+  hadoop_rotate_log "${TMP}/log"
+  [ -f "${TMP}/log.1" ]
+  [ ! -f "${TMP}/log" ]
+}
+
+@test "hadoop_rotate_log (one archive log)" {
+  touch "${TMP}/log"
+  hadoop_rotate_log "${TMP}/log" 1
+  [ -f "${TMP}/log.1" ]
+  [ ! -f "${TMP}/log" ]
+}
+
+@test "hadoop_rotate_log (default five archive logs)" {
+  local i
+  for i in {1..5}; do
+    echo "Testing ${i}"
+    touch "${TMP}/log"
+    hadoop_rotate_log "${TMP}/log"
+    ls "${TMP}"
+    [ -f "${TMP}/log.${i}" ]
+  done
+}
+
+@test "hadoop_rotate_log (ten archive logs)" {
+  local i
+  for i in {1..10}; do
+    echo "Testing ${i}"
+    touch "${TMP}/log"
+    hadoop_rotate_log "${TMP}/log" 10
+    ls "${TMP}"
+    [ -f "${TMP}/log.${i}" ]
+  done
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_shellprofile.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_shellprofile.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_shellprofile.bats
new file mode 100644
index 0000000..d6e0a25
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_shellprofile.bats
@@ -0,0 +1,91 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+shellprofilesetup () {
+  HADOOP_LIBEXEC_DIR="${TMP}/libexec"
+  HADOOP_CONF_DIR="${TMP}/conf"
+  mkdir -p "${HADOOP_LIBEXEC_DIR}/shellprofile.d" "${HADOOP_CONF_DIR}/shellprofile.d"
+}
+
+_test_hadoop_init () {
+  unittest=init
+}
+
+_test_hadoop_classpath () {
+  unittest=classpath
+}
+
+_test_hadoop_nativelib () {
+  unittest=nativelib
+}
+
+_test_hadoop_finalize () {
+  unittest=finalize
+}
+
+@test "hadoop_import_shellprofiles (negative)" {
+  shellprofilesetup
+  unset HADOOP_LIBEXEC_DIR
+  run hadoop_import_shellprofiles
+  [ -n "${output}" ]
+}
+
+@test "hadoop_import_shellprofiles (libexec sh import)" {
+  shellprofilesetup
+  echo "unittest=libexec" > "${HADOOP_LIBEXEC_DIR}/shellprofile.d/test.sh"
+  hadoop_import_shellprofiles
+  [ "${unittest}" = libexec ]
+}
+
+@test "hadoop_import_shellprofiles (libexec conf sh import+override)" {
+  shellprofilesetup
+  echo "unittest=libexec" > "${HADOOP_LIBEXEC_DIR}/shellprofile.d/test.sh"
+  echo "unittest=conf" > "${HADOOP_CONF_DIR}/shellprofile.d/test.sh"
+  hadoop_import_shellprofiles
+  [ "${unittest}" = conf ]
+}
+
+@test "hadoop_import_shellprofiles (libexec no cmd import)" {
+  shellprofilesetup
+  echo "unittest=libexec" > "${HADOOP_LIBEXEC_DIR}/shellprofile.d/test.cmd"
+  hadoop_import_shellprofiles
+  [ -z "${unittest}" ]
+}
+
+@test "hadoop_add_profile+hadoop_shellprofiles_init" {
+  hadoop_add_profile test
+  hadoop_shellprofiles_init
+  [ "${unittest}" = init ]
+}
+
+@test "hadoop_add_profile+hadoop_shellprofiles_classpath" {
+  hadoop_add_profile test
+  hadoop_shellprofiles_classpath
+  [ "${unittest}" = classpath ]
+}
+
+@test "hadoop_add_profile+hadoop_shellprofiles_nativelib" {
+  hadoop_add_profile test
+  hadoop_shellprofiles_nativelib
+  [ "${unittest}" = nativelib ]
+}
+
+@test "hadoop_add_profile+hadoop_shellprofiles_finalize" {
+  hadoop_add_profile test
+  hadoop_shellprofiles_finalize
+  [ "${unittest}" = finalize ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_slaves.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_slaves.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_slaves.bats
new file mode 100644
index 0000000..cc33f0e
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_slaves.bats
@@ -0,0 +1,37 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_populate_slaves_file (specific file)" {
+  touch "${TMP}/file"
+  hadoop_populate_slaves_file "${TMP}/file"
+  [ "${HADOOP_SLAVES}" = "${TMP}/file" ]
+}
+
+@test "hadoop_populate_slaves_file (specific conf dir file)" {
+  HADOOP_CONF_DIR=${TMP}/1
+  mkdir -p "${HADOOP_CONF_DIR}"
+  touch "${HADOOP_CONF_DIR}/file"
+  hadoop_populate_slaves_file "file"
+  echo "${HADOOP_SLAVES}"
+  [ "${HADOOP_SLAVES}" = "${HADOOP_CONF_DIR}/file" ]
+}
+
+@test "hadoop_populate_slaves_file (no file)" {
+  HADOOP_CONF_DIR=${TMP}
+  run hadoop_populate_slaves_file "foo"
+  [ "${status}" -eq 1 ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_ssh.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_ssh.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_ssh.bats
new file mode 100644
index 0000000..53e86ce
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_ssh.bats
@@ -0,0 +1,51 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_actual_ssh" {
+  skip "Not implemented"
+  hadoop_actual_ssh
+}
+
+@test "hadoop_connect_to_hosts" {
+  skip "Not implemented"
+  hadoop_connect_to_hosts
+}
+
+@test "hadoop_connect_to_hosts_without_pdsh" {
+  skip "Not implemented"
+  hadoop_connect_to_hosts_without_pdsh
+}
+
+@test "hadoop_common_slave_mode_execute (--slaves 1)" {
+  run  hadoop_common_slave_mode_execute --slaves command
+  [ ${output} = command ]
+}
+
+@test "hadoop_common_slave_mode_execute (--slaves 2)" {
+  run  hadoop_common_slave_mode_execute --slaves command1 command2
+  [ ${output} = "command1 command2" ]
+}
+
+@test "hadoop_common_slave_mode_execute (--hosts)" {
+  run  hadoop_common_slave_mode_execute --hosts filename command
+  [ ${output} = command ]
+}
+
+@test "hadoop_common_slave_mode_execute (--hostnames 2)" {
+  run  hadoop_common_slave_mode_execute --hostnames "host1,host2" command1 command2
+  [ ${output} = "command1 command2" ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_translate_cygwin_path.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_translate_cygwin_path.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_translate_cygwin_path.bats
new file mode 100644
index 0000000..e5f6aec
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_translate_cygwin_path.bats
@@ -0,0 +1,48 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_translate_cygwin_path (negative)" {
+  HADOOP_IS_CYGWIN=false
+  testvar="/this/path/is/cool"
+  hadoop_translate_cygwin_path testvar
+  [ "${testvar}" = "/this/path/is/cool" ]
+}
+
+@test "hadoop_translate_cygwin_path (positive)" {
+  HADOOP_IS_CYGWIN=true
+  testvar="/this/path/is/cool"
+
+  cygpath () {
+    echo "test"
+  }
+
+  hadoop_translate_cygwin_path testvar
+  [ "${testvar}" = "test" ]
+}
+
+
+@test "hadoop_translate_cygwin_path (path positive)" {
+  HADOOP_IS_CYGWIN=true
+  testvar="/this/path/is/cool"
+
+  cygpath () {
+    echo "test"
+  }
+
+  hadoop_translate_cygwin_path testvar true
+  [ "${testvar}" = "test" ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_validate_classname.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_validate_classname.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_validate_classname.bats
new file mode 100644
index 0000000..1ba5b32
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_validate_classname.bats
@@ -0,0 +1,26 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_validate_classname (negative)" {
+  run hadoop_validate_classname fakeclass
+  [ ${status} -eq 1 ]
+}
+
+@test "hadoop_validate_classname (positive)" {
+  run hadoop_validate_classname org.apache.hadoop.io.Text
+  [ ${status} -eq 0 ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/run-bats.sh
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/run-bats.sh b/hadoop-common-project/hadoop-common/src/test/scripts/run-bats.sh
new file mode 100755
index 0000000..566f47a
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/run-bats.sh
@@ -0,0 +1,43 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+targetdir=../../../target
+mkdir -p ${targetdir}/surefire-reports ${targetdir}/tap
+
+batsexe=$(which bats) 2>/dev/null
+
+if [[ -z ${batsexe} ]]; then
+  echo "not ok - no bats executable found" >  "${targetdir}/tap/shelltest.tap"
+  echo ""
+  echo ""
+  echo "ERROR: bats not installed. Skipping bash tests."
+  echo "ERROR: Please install bats as soon as possible."
+  echo ""
+  echo ""
+  exit 0
+fi
+
+for j in *.bats; do
+  echo Running bats -t "${j}"
+  bats -t "${j}" 2>&1 | tee "${targetdir}/tap/${j}.tap"
+  result=${PIPESTATUS[0]}
+  ((exitcode=exitcode+result))
+done
+
+if [[ ${exitcode} -gt 0 ]]; then
+  exit 1
+fi
+exit 0


Mime
View raw message