hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From omal...@apache.org
Subject svn commit: r1128385 [2/2] - in /hadoop/common/trunk: ./ bin/ conf/ ivy/ src/docs/cn/src/documentation/content/xdocs/ src/docs/src/documentation/content/xdocs/ src/native/ src/native/lib/ src/native/src/org/apache/hadoop/io/compress/zlib/ src/packages/...
Date Fri, 27 May 2011 16:35:04 GMT
Added: hadoop/common/trunk/src/packages/deb/init.d/hadoop-jobtracker
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/packages/deb/init.d/hadoop-jobtracker?rev=1128385&view=auto
==============================================================================
--- hadoop/common/trunk/src/packages/deb/init.d/hadoop-jobtracker (added)
+++ hadoop/common/trunk/src/packages/deb/init.d/hadoop-jobtracker Fri May 27 16:35:02 2011
@@ -0,0 +1,142 @@
+#! /bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+### BEGIN INIT INFO
+# Provides:		hadoop-jobtracker	
+# Required-Start:	$remote_fs $syslog
+# Required-Stop:	$remote_fs $syslog
+# Default-Start:	2 3 4 5
+# Default-Stop:		
+# Short-Description:	Apache Hadoop Job Tracker server
+### END INIT INFO
+
+set -e
+
+# /etc/init.d/hadoop-jobtracker: start and stop the Apache Hadoop Job Tracker daemon
+
+test -x /usr/bin/hadoop || exit 0
+( /usr/bin/hadoop 2>&1 | grep -q hadoop ) 2>/dev/null || exit 0
+
+umask 022
+
+if test -f /etc/default/hadoop-env.sh; then
+    . /etc/default/hadoop-env.sh
+fi
+
+. /lib/lsb/init-functions
+
+# Are we running from init?
+run_by_init() {
+    ([ "$previous" ] && [ "$runlevel" ]) || [ "$runlevel" = S ]
+}
+
+check_for_no_start() {
+    # forget it if we're trying to start, and /etc/hadoop/hadoop-jobtracker_not_to_be_run exists
+    if [ -e /etc/hadoop/hadoop-jobtracker_not_to_be_run ]; then 
+	if [ "$1" = log_end_msg ]; then
+	    log_end_msg 0
+	fi
+	if ! run_by_init; then
+	    log_action_msg "Apache Hadoop Job Tracker server not in use (/etc/hadoop/hadoop-jobtracker_not_to_be_run)"
+	fi
+	exit 0
+    fi
+}
+
+check_privsep_dir() {
+    # Create the PrivSep empty dir if necessary
+    if [ ! -d ${HADOOP_PID_DIR} ]; then
+	mkdir -p ${HADOOP_PID_DIR}
+        chown root:hadoop ${HADOOP_PID_DIR}
+	chmod 0775 ${HADOOP_PID_DIR} 
+    fi
+}
+
+export PATH="${PATH:+$PATH:}/usr/sbin:/usr/bin"
+
+case "$1" in
+  start)
+	check_privsep_dir
+	check_for_no_start
+	log_daemon_msg "Starting Apache Hadoop Job Tracker server" "hadoop-jobtracker"
+	if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-jobtracker.pid -c mapred -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start jobtracker; then
+	    log_end_msg 0
+	else
+	    log_end_msg 1
+	fi
+	;;
+  stop)
+	log_daemon_msg "Stopping Apache Hadoop Job Tracker server" "hadoop-jobtracker"
+	if start-stop-daemon --stop --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-jobtracker.pid; then
+	    log_end_msg 0
+	else
+	    log_end_msg 1
+	fi
+	;;
+
+  restart)
+	check_privsep_dir
+	log_daemon_msg "Restarting Apache Hadoop Job Tracker server" "hadoop-jobtracker"
+	start-stop-daemon --stop --quiet --oknodo --retry 30 --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-jobtracker.pid
+	check_for_no_start log_end_msg
+	if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-jobtracker.pid -c mapred -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start jobtracker; then
+	    log_end_msg 0
+	else
+	    log_end_msg 1
+	fi
+	;;
+
+  try-restart)
+	check_privsep_dir
+	log_daemon_msg "Restarting Apache Hadoop Job Tracker server" "hadoop-jobtracker"
+	set +e
+	start-stop-daemon --stop --quiet --retry 30 --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-jobtracker.pid
+	RET="$?"
+	set -e
+	case $RET in
+	    0)
+		# old daemon stopped
+		check_for_no_start log_end_msg
+		if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-jobtracker.pid -c mapred -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start jobtracker; then
+		    log_end_msg 0
+		else
+		    log_end_msg 1
+		fi
+		;;
+	    1)
+		# daemon not running
+		log_progress_msg "(not running)"
+		log_end_msg 0
+		;;
+	    *)
+		# failed to stop
+		log_progress_msg "(failed to stop)"
+		log_end_msg 1
+		;;
+	esac
+	;;
+
+  status)
+	status_of_proc -p ${HADOOP_PID_DIR}/hadoop-mapred-jobtracker.pid ${JAVA_HOME}/bin/java hadoop-jobtracker && exit 0 || exit $?
+	;;
+
+  *)
+	log_action_msg "Usage: /etc/init.d/hadoop-jobtracker {start|stop|restart|try-restart|status}"
+	exit 1
+esac
+
+exit 0

Added: hadoop/common/trunk/src/packages/deb/init.d/hadoop-namenode
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/packages/deb/init.d/hadoop-namenode?rev=1128385&view=auto
==============================================================================
--- hadoop/common/trunk/src/packages/deb/init.d/hadoop-namenode (added)
+++ hadoop/common/trunk/src/packages/deb/init.d/hadoop-namenode Fri May 27 16:35:02 2011
@@ -0,0 +1,154 @@
+#! /bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+### BEGIN INIT INFO
+# Provides:		hadoop-namenode	
+# Required-Start:	$remote_fs $syslog
+# Required-Stop:	$remote_fs $syslog
+# Default-Start:	2 3 4 5
+# Default-Stop:		
+# Short-Description:	Apache Hadoop Name Node server
+### END INIT INFO
+
+set -e
+
+# /etc/init.d/hadoop-namenode: start and stop the Apache Hadoop Name Node daemon
+
+test -x /usr/bin/hadoop || exit 0
+( /usr/bin/hadoop 2>&1 | grep -q hadoop ) 2>/dev/null || exit 0
+
+umask 022
+
+if test -f /etc/default/hadoop-env.sh; then
+    . /etc/default/hadoop-env.sh
+fi
+
+. /lib/lsb/init-functions
+
+# Are we running from init?
+run_by_init() {
+    ([ "$previous" ] && [ "$runlevel" ]) || [ "$runlevel" = S ]
+}
+
+check_for_no_start() {
+    # forget it if we're trying to start, and /etc/hadoop/hadoop-namenode_not_to_be_run exists
+    if [ -e /etc/hadoop/hadoop-namenode_not_to_be_run ]; then 
+	if [ "$1" = log_end_msg ]; then
+	    log_end_msg 0
+	fi
+	if ! run_by_init; then
+	    log_action_msg "Apache Hadoop Name Node server not in use (/etc/hadoop/hadoop-namenode_not_to_be_run)"
+	fi
+	exit 0
+    fi
+}
+
+check_privsep_dir() {
+    # Create the PrivSep empty dir if necessary
+    if [ ! -d ${HADOOP_PID_DIR} ]; then
+	mkdir -p ${HADOOP_PID_DIR}
+        chown root:hadoop ${HADOOP_PID_DIR}
+	chmod 0775 ${HADOOP_PID_DIR} 
+    fi
+}
+
+format() {
+    su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} namenode -format' hdfs
+}
+
+export PATH="${PATH:+$PATH:}/usr/sbin:/usr/bin"
+
+case "$1" in
+  start)
+	check_privsep_dir
+	check_for_no_start
+	log_daemon_msg "Starting Apache Hadoop Name Node server" "hadoop-namenode"
+	if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-namenode.pid -c hdfs -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start namenode; then
+	    log_end_msg 0
+	else
+	    log_end_msg 1
+	fi
+	;;
+  stop)
+	log_daemon_msg "Stopping Apache Hadoop Name Node server" "hadoop-namenode"
+	if start-stop-daemon --stop --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-namenode.pid; then
+	    log_end_msg 0
+	else
+	    log_end_msg 1
+	fi
+	;;
+  format)
+	log_daemon_msg "Formatting Apache Hadoop Name Node" "hadoop-namenode"
+	format
+	if [ $? -eq 0 ]; then
+	    log_end_msg 0
+	else
+	    log_end_msg 1
+	fi
+	;;
+  restart)
+	check_privsep_dir
+	log_daemon_msg "Restarting Apache Hadoop Name Node server" "hadoop-namenode"
+	start-stop-daemon --stop --quiet --oknodo --retry 30 --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-namenode.pid
+	check_for_no_start log_end_msg
+	if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-namenode.pid -c hdfs -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start namenode; then
+	    log_end_msg 0
+	else
+	    log_end_msg 1
+	fi
+	;;
+
+  try-restart)
+	check_privsep_dir
+	log_daemon_msg "Restarting Apache Hadoop Name Node server" "hadoop-namenode"
+	set +e
+	start-stop-daemon --stop --quiet --retry 30 --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-namenode.pid
+	RET="$?"
+	set -e
+	case $RET in
+	    0)
+		# old daemon stopped
+		check_for_no_start log_end_msg
+		if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-namenode.pid -c hdfs -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start namenode; then
+		    log_end_msg 0
+		else
+		    log_end_msg 1
+		fi
+		;;
+	    1)
+		# daemon not running
+		log_progress_msg "(not running)"
+		log_end_msg 0
+		;;
+	    *)
+		# failed to stop
+		log_progress_msg "(failed to stop)"
+		log_end_msg 1
+		;;
+	esac
+	;;
+
+  status)
+	status_of_proc -p ${HADOOP_PID_DIR}/hadoop-hdfs-namenode.pid ${JAVA_HOME}/bin/java hadoop-namenode && exit 0 || exit $?
+	;;
+
+  *)
+	log_action_msg "Usage: /etc/init.d/hadoop-namenode {start|stop|restart|try-restart|status}"
+	exit 1
+esac
+
+exit 0

Added: hadoop/common/trunk/src/packages/deb/init.d/hadoop-tasktracker
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/packages/deb/init.d/hadoop-tasktracker?rev=1128385&view=auto
==============================================================================
--- hadoop/common/trunk/src/packages/deb/init.d/hadoop-tasktracker (added)
+++ hadoop/common/trunk/src/packages/deb/init.d/hadoop-tasktracker Fri May 27 16:35:02 2011
@@ -0,0 +1,142 @@
+#! /bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+### BEGIN INIT INFO
+# Provides:		hadoop-tasktracker	
+# Required-Start:	$remote_fs $syslog
+# Required-Stop:	$remote_fs $syslog
+# Default-Start:	2 3 4 5
+# Default-Stop:		
+# Short-Description:	Apache Hadoop Task Tracker server
+### END INIT INFO
+
+set -e
+
+# /etc/init.d/hadoop-tasktracker: start and stop the Apache Hadoop Task Tracker daemon
+
+test -x /usr/bin/hadoop || exit 0
+( /usr/bin/hadoop 2>&1 | grep -q hadoop ) 2>/dev/null || exit 0
+
+umask 022
+
+if test -f /etc/default/hadoop-env.sh; then
+    . /etc/default/hadoop-env.sh
+fi
+
+. /lib/lsb/init-functions
+
+# Are we running from init?
+run_by_init() {
+    ([ "$previous" ] && [ "$runlevel" ]) || [ "$runlevel" = S ]
+}
+
+check_for_no_start() {
+    # forget it if we're trying to start, and /etc/hadoop/hadoop-tasktracker_not_to_be_run exists
+    if [ -e /etc/hadoop/hadoop-tasktracker_not_to_be_run ]; then 
+	if [ "$1" = log_end_msg ]; then
+	    log_end_msg 0
+	fi
+	if ! run_by_init; then
+	    log_action_msg "Apache Hadoop Task Tracker server not in use (/etc/hadoop/hadoop-tasktracker_not_to_be_run)"
+	fi
+	exit 0
+    fi
+}
+
+check_privsep_dir() {
+    # Create the PrivSep empty dir if necessary
+    if [ ! -d ${HADOOP_PID_DIR} ]; then
+	mkdir -p ${HADOOP_PID_DIR}
+        chown root:hadoop ${HADOOP_PID_DIR}
+	chmod 0775 ${HADOOP_PID_DIR} 
+    fi
+}
+
+export PATH="${PATH:+$PATH:}/usr/sbin:/usr/bin"
+
+case "$1" in
+  start)
+	check_privsep_dir
+	check_for_no_start
+	log_daemon_msg "Starting Apache Hadoop Task Tracker server" "hadoop-tasktracker"
+	if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-tasktracker.pid -c mapred -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start tasktracker; then
+	    log_end_msg 0
+	else
+	    log_end_msg 1
+	fi
+	;;
+  stop)
+	log_daemon_msg "Stopping Apache Hadoop Task Tracker server" "hadoop-tasktracker"
+	if start-stop-daemon --stop --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-tasktracker.pid; then
+	    log_end_msg 0
+	else
+	    log_end_msg 1
+	fi
+	;;
+
+  restart)
+	check_privsep_dir
+	log_daemon_msg "Restarting Apache Hadoop Task Tracker server" "hadoop-tasktracker"
+	start-stop-daemon --stop --quiet --oknodo --retry 30 --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-tasktracker.pid
+	check_for_no_start log_end_msg
+	if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-tasktracker.pid -c mapred -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start tasktracker; then
+	    log_end_msg 0
+	else
+	    log_end_msg 1
+	fi
+	;;
+
+  try-restart)
+	check_privsep_dir
+	log_daemon_msg "Restarting Apache Hadoop Task Tracker server" "hadoop-tasktracker"
+	set +e
+	start-stop-daemon --stop --quiet --retry 30 --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-tasktracker.pid
+	RET="$?"
+	set -e
+	case $RET in
+	    0)
+		# old daemon stopped
+		check_for_no_start log_end_msg
+		if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-tasktracker.pid -c mapred -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start tasktracker; then
+		    log_end_msg 0
+		else
+		    log_end_msg 1
+		fi
+		;;
+	    1)
+		# daemon not running
+		log_progress_msg "(not running)"
+		log_end_msg 0
+		;;
+	    *)
+		# failed to stop
+		log_progress_msg "(failed to stop)"
+		log_end_msg 1
+		;;
+	esac
+	;;
+
+  status)
+	status_of_proc -p ${HADOOP_PID_DIR}/hadoop-mapred-tasktracker.pid ${JAVA_HOME}/bin/java hadoop-tasktracker && exit 0 || exit $?
+	;;
+
+  *)
+	log_action_msg "Usage: /etc/init.d/hadoop-tasktracker {start|stop|restart|try-restart|status}"
+	exit 1
+esac
+
+exit 0

Added: hadoop/common/trunk/src/packages/hadoop-create-user.sh
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/packages/hadoop-create-user.sh?rev=1128385&view=auto
==============================================================================
--- hadoop/common/trunk/src/packages/hadoop-create-user.sh (added)
+++ hadoop/common/trunk/src/packages/hadoop-create-user.sh Fri May 27 16:35:02 2011
@@ -0,0 +1,76 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+if [ "$HADOOP_HOME" != "" ]; then
+  echo "Warning: \$HADOOP_HOME is deprecated."
+  echo
+fi
+
+. "$bin"/../libexec/hadoop-config.sh
+
+usage() {
+  echo "
+usage: $0 <parameters>
+  Require parameter:
+     -u <username>                                 Create user on HDFS
+  Optional parameters:
+     -h                                            Display this message
+  "
+  exit 1
+}
+
+# Parse script parameters
+if [ $# != 2 ] ; then
+    usage
+    exit 1
+fi
+
+while getopts "hu:" OPTION
+do
+  case $OPTION in
+    u)
+      SETUP_USER=$2; shift 2
+      ;; 
+    h)
+      usage
+      ;; 
+    --)
+      shift ; break
+      ;;
+    *)
+      echo "Unknown option: $1"
+      usage
+      exit 1 
+      ;;
+  esac
+done 
+
+# Create user directory on HDFS
+export SETUP_USER
+export SETUP_PATH=/user/${SETUP_USER}
+export HADOOP_PREFIX
+export HADOOP_CONF_DIR
+
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -mkdir ${SETUP_PATH}' hdfs
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -chown ${SETUP_USER}:${SETUP_USER} ${SETUP_PATH}' hdfs
+
+if [ "$?" == "0" ]; then
+  echo "User directory has been setup: ${SETUP_PATH}"
+fi

Added: hadoop/common/trunk/src/packages/hadoop-setup-conf.sh
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/packages/hadoop-setup-conf.sh?rev=1128385&view=auto
==============================================================================
--- hadoop/common/trunk/src/packages/hadoop-setup-conf.sh (added)
+++ hadoop/common/trunk/src/packages/hadoop-setup-conf.sh Fri May 27 16:35:02 2011
@@ -0,0 +1,282 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+if [ "$HADOOP_HOME" != "" ]; then
+  echo "Warning: \$HADOOP_HOME is deprecated."
+  echo
+fi
+
+. "$bin"/../libexec/hadoop-config.sh
+
+usage() {
+  echo "
+usage: $0 <parameters>
+
+  Optional parameters:
+     --auto                                                          Setup automatically
+     --default                                                       Generate default config
+     --conf-dir=/etc/hadoop                                          Set config directory
+     --datanode-dir=/var/lib/hadoop/hdfs/datanode                    Set datanode directory
+     -h                                                              Display this message
+     --jobtracker-url=hostname:9001                                  Set jobtracker url
+     --log-dir=/var/log/hadoop                                       Set log directory
+     --hdfs-dir=/var/lib/hadoop/hdfs                                 Set hdfs directory
+     --mapred-dir=/var/lib/hadoop/mapred                             Set mapreduce directory
+     --namenode-dir=/var/lib/hadoop/hdfs/namenode                    Set namenode directory
+     --namenode-url=hdfs://hostname:9000/                            Set namenode url
+     --replication=3                                                 Set replication factor
+     --taskscheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler  Set task scheduler
+  "
+  exit 1
+}
+
+template_generator() {
+  REGEX='(\$\{[a-zA-Z_][a-zA-Z_0-9]*\})'
+  cat $1 |
+  while read line ; do
+    while [[ "$line" =~ $REGEX ]] ; do
+      LHS=${BASH_REMATCH[1]}
+      RHS="$(eval echo "\"$LHS\"")"
+      line=${line//$LHS/$RHS}
+    done
+    echo $line >> $2
+  done
+}
+
+OPTS=$(getopt \
+  -n $0 \
+  -o '' \
+  -l 'auto' \
+  -l 'conf-dir:' \
+  -l 'default' \
+  -l 'hdfs-dir:' \
+  -l 'namenode-dir:' \
+  -l 'datanode-dir:' \
+  -l 'mapred-dir:' \
+  -l 'namenode-url:' \
+  -l 'jobtracker-url:' \
+  -l 'log-dir:' \
+  -l 'replication:' \
+  -l 'taskscheduler:' \
+  -o 'h' \
+  -- "$@") 
+  
+if [ $? != 0 ] ; then
+    usage
+fi
+
+# Make sure the HADOOP_LOG_DIR is not picked up from user environment.
+unset HADOOP_LOG_DIR
+
+# Parse script parameters  
+eval set -- "${OPTS}"
+while true ; do
+  case "$1" in
+    --auto)
+      AUTOSETUP=1
+      AUTOMATED=1
+      shift
+      ;; 
+    --conf-dir)
+      HADOOP_CONF_DIR=$2; shift 2
+      AUTOMATED=1
+      ;; 
+    --default)
+      AUTOMATED=1; shift
+      ;;
+    -h)
+      usage
+      ;; 
+    --hdfs-dir)
+      HADOOP_HDFS_DIR=$2; shift 2
+      AUTOMATED=1
+      ;; 
+    --namenode-dir)
+      HADOOP_NN_DIR=$2; shift 2
+      AUTOMATED=1
+      ;; 
+    --datanode-dir)
+      HADOOP_DN_DIR=$2; shift 2
+      AUTOMATED=1
+      ;; 
+    --mapred-dir)
+      HADOOP_MAPRED_DIR=$2; shift 2
+      AUTOMATED=1
+      ;; 
+    --namenode-url)
+      HADOOP_NN_HOST=$2; shift 2
+      AUTOMATED=1
+      ;; 
+    --jobtracker-url)
+      HADOOP_JT_HOST=$2; shift 2
+      AUTOMATED=1
+      ;; 
+    --log-dir)
+      HADOOP_LOG_DIR=$2; shift 2
+      AUTOMATED=1
+      ;; 
+    --replication)
+      HADOOP_REPLICATION=$2; shift 2
+      AUTOMATED=1
+      ;; 
+    --taskscheduler)
+      HADOOP_TASK_SCHEDULER=$2; shift 2
+      AUTOMATED=1
+      ;; 
+    --)
+      shift ; break
+      ;;
+    *)
+      echo "Unknown option: $1"
+      usage
+      exit 1 
+      ;;
+  esac
+done 
+
+# Fill in default values, if parameters have not been defined.
+AUTOSETUP=${AUTOSETUP:-1}
+JAVA_HOME=${JAVA_HOME:-/usr/java/default}
+HADOOP_NN_HOST=${HADOOP_NN_HOST:-hdfs://`hostname`:9000/}
+HADOOP_NN_DIR=${HADOOP_NN_DIR:-/var/lib/hadoop/hdfs/namenode}
+HADOOP_DN_DIR=${HADOOP_DN_DIR:-/var/lib/hadoop/hdfs/datanode}
+HADOOP_JT_HOST=${HADOOP_JT_HOST:-`hostname`:9001}
+HADOOP_HDFS_DIR=${HADOOP_HDFS_DIR:-/var/lib/hadoop/hdfs}
+HADOOP_MAPRED_DIR=${HADOOP_MAPRED_DIR:-/var/lib/hadoop/mapred}
+HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-/var/log/hadoop}
+HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-/etc/hadoop}
+HADOOP_REPLICATION=${HADOOP_RELICATION:-3}
+HADOOP_TASK_SCHEDULER=${HADOOP_TASK_SCHEDULER:-org.apache.hadoop.mapred.JobQueueTaskScheduler}
+
+# Interactive setup wizard
+if [ "${AUTOMATED}" != "1" ]; then
+  echo "Setup Hadoop Configuration"
+  echo
+  echo -n "Where would you like to put config directory? (${HADOOP_CONF_DIR}) "
+  read USER_HADOOP_CONF_DIR
+  echo -n "Where would you like to put log directory? (${HADOOP_LOG_DIR}) "
+  read USER_HADOOP_LOG_DIR
+  echo -n "What is the url of the namenode? (${HADOOP_NN_HOST}) "
+  read USER_HADOOP_NN_HOST
+  echo -n "Where would you like to put namenode data directory? (${HADOOP_NN_DIR}) "
+  read USER_HADOOP_NN_DIR
+  echo -n "Where would you like to put datanode data directory? (${HADOOP_DN_DIR}) "
+  read USER_HADOOP_DN_DIR
+  echo -n "What is the url of the jobtracker? (${HADOOP_JT_HOST}) "
+  read USER_HADOOP_JT_HOST
+  echo -n "Where would you like to put jobtracker/tasktracker data directory? (${HADOOP_MAPRED_DIR}) "
+  read USER_HADOOP_MAPRED_DIR
+  echo -n "Which taskscheduler would you like? (${HADOOP_TASK_SCHEDULER}) "
+  read USER_HADOOP_TASK_SCHEDULER
+  echo -n "Where is JAVA_HOME directory? (${JAVA_HOME}) "
+  read USER_JAVA_HOME
+  echo -n "Would you like to create directories/copy conf files to localhost? (Y/n) "
+  read USER_AUTOSETUP
+  echo
+  JAVA_HOME=${USER_USER_JAVA_HOME:-$JAVA_HOME}
+  HADOOP_NN_HOST=${USER_HADOOP_NN_HOST:-$HADOOP_NN_HOST}
+  HADOOP_NN_DIR=${USER_HADOOP_NN_DIR:-$HADOOP_NN_DIR}
+  HADOOP_DN_DIR=${USER_HADOOP_DN_DIR:-$HADOOP_DN_DIR}
+  HADOOP_JT_HOST=${USER_HADOOP_JT_HOST:-$HADOOP_JT_HOST}
+  HADOOP_HDFS_DIR=${USER_HADOOP_HDFS_DIR:-$HADOOP_HDFS_DIR}
+  HADOOP_MAPRED_DIR=${USER_HADOOP_MAPRED_DIR:-$HADOOP_MAPRED_DIR}
+  HADOOP_TASK_SCHEDULER=${HADOOP_TASK_SCHEDULER:-org.apache.hadoop.mapred.JobQueueTaskScheduler}
+  HADOOP_LOG_DIR=${USER_HADOOP_LOG_DIR:-$HADOOP_LOG_DIR}
+  HADOOP_CONF_DIR=${USER_HADOOP_CONF_DIR:-$HADOOP_CONF_DIR}
+  AUTOSETUP=${USER_AUTOSETUP:-y}
+  echo "Review your choices:"
+  echo
+  echo "Config directory            : ${HADOOP_CONF_DIR}"
+  echo "Log directory               : ${HADOOP_LOG_DIR}"
+  echo "Namenode url                : ${HADOOP_NN_HOST}"
+  echo "Namenode directory          : ${HADOOP_NN_DIR}"
+  echo "Datanode directory          : ${HADOOP_DN_DIR}"
+  echo "Jobtracker url              : ${HADOOP_JT_HOST}"
+  echo "Mapreduce directory         : ${HADOOP_MAPRED_DIR}"
+  echo "Task scheduler              : ${HADOOP_TASK_SCHEDULER}"
+  echo "JAVA_HOME directory         : ${JAVA_HOME}"
+  echo "Create dirs/copy conf files : ${AUTOSETUP}"
+  echo
+  echo -n "Proceed with generate configuration? (y/N) "
+  read CONFIRM
+  if [ "${CONFIRM}" != "y" ]; then
+    echo "User aborted setup, exiting..."
+    exit 1
+  fi
+fi
+
+if [ "${AUTOSETUP}" == "1" ]; then
+  # If user wants to setup local system automatically,
+  # set config file generation location to HADOOP_CONF_DIR.
+  DEST=${HADOOP_CONF_DIR}
+else
+  # If user is only interested to generate config file locally,
+  # place config files in the current working directory.
+  DEST=`pwd`
+fi
+
+# remove existing config file, they are existed in current directory.
+rm -f ${DEST}/core-site.xml >/dev/null
+rm -f ${DEST}/hdfs-site.xml >/dev/null
+rm -f ${DEST}/mapred-site.xml >/dev/null
+rm -f ${DEST}/hadoop-env.sh >/dev/null
+
+# Generate config file with specified parameters.
+template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/core-site.xml ${DEST}/core-site.xml
+template_generator ${HADOOP_PREFIX}/share/hadoop/hdfs/templates/hdfs-site.xml ${DEST}/hdfs-site.xml
+template_generator ${HADOOP_PREFIX}/share/hadoop/mapreduce/templates/mapred-site.xml ${DEST}/mapred-site.xml
+template_generator ${HADOOP_CONF_DIR}/hadoop-env.sh.template ${DEST}/hadoop-env.sh
+
+chown root:hadoop ${DEST}/hadoop-env.sh
+chmod 755 ${DEST}/hadoop-env.sh
+
+# Setup directory path and copy config files, if AUTOSETUP is chosen.
+if [ "${AUTOSETUP}" == "1" -o "${AUTOSETUP}" == "y" ]; then
+  mkdir -p ${HADOOP_HDFS_DIR}
+  mkdir -p ${HADOOP_NN_DIR}
+  mkdir -p ${HADOOP_DN_DIR}
+  mkdir -p ${HADOOP_MAPRED_DIR}
+  mkdir -p ${HADOOP_CONF_DIR}
+  mkdir -p ${HADOOP_LOG_DIR}
+  mkdir -p ${HADOOP_LOG_DIR}/hdfs
+  mkdir -p ${HADOOP_LOG_DIR}/mapred
+  chown hdfs:hadoop ${HADOOP_HDFS_DIR}
+  chown hdfs:hadoop ${HADOOP_NN_DIR}
+  chown hdfs:hadoop ${HADOOP_DN_DIR}
+  chown mapred:hadoop ${HADOOP_MAPRED_DIR}
+  chown root:hadoop ${HADOOP_LOG_DIR}
+  chmod 775 ${HADOOP_LOG_DIR}
+  chown hdfs:hadoop ${HADOOP_LOG_DIR}/hdfs
+  chown mapred:hadoop ${HADOOP_LOG_DIR}/mapred
+  echo "Configuration setup is completed."
+  if [[ "$HADOOP_NN_HOST" =~ "`hostname`" ]]; then
+    echo "Proceed to run hadoop-setup-hdfs.sh on namenode."
+  fi
+else
+  echo
+  echo "Configuration file has been generated, please copy:"
+  echo
+  echo "core-site.xml"
+  echo "hdfs-site.xml"
+  echo "mapred-site.xml"
+  echo "hadoop-env.sh"
+  echo
+  echo " to ${HADOOP_CONF_DIR} on all nodes, and proceed to run hadoop-setup-hdfs.sh on namenode."
+fi

Added: hadoop/common/trunk/src/packages/hadoop-setup-hdfs.sh
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/packages/hadoop-setup-hdfs.sh?rev=1128385&view=auto
==============================================================================
--- hadoop/common/trunk/src/packages/hadoop-setup-hdfs.sh (added)
+++ hadoop/common/trunk/src/packages/hadoop-setup-hdfs.sh Fri May 27 16:35:02 2011
@@ -0,0 +1,96 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+if [ "$HADOOP_HOME" != "" ]; then
+  echo "Warning: \$HADOOP_HOME is deprecated."
+  echo
+fi
+
+. "$bin"/../libexec/hadoop-config.sh
+
+usage() {
+  echo "
+usage: $0 <parameters>
+  Require parameter:
+     -c <clusterid>                                Set cluster identifier for HDFS
+  Optional parameters:
+     -h                                            Display this message
+  "
+  exit 1
+}
+
+if [ $# != 2 ] ; then
+    usage
+    exit 1
+fi
+
+while getopts "hc:" OPTION
+do
+  case $OPTION in
+    c)
+      SETUP_CLUSTER=$2; shift 2
+      ;;
+    h)
+      usage
+      ;;
+    --)
+      shift ; break
+      ;;
+    *)
+      echo "Unknown option: $1"
+      usage
+      exit 1
+      ;;
+  esac
+done
+
+export HADOOP_PREFIX
+export HADOOP_CONF_DIR
+export SETUP_CLUSTER
+
+# Start namenode and initialize file system structure
+echo "Setup Hadoop Distributed File System"
+echo
+echo "Formatting namenode"
+echo
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} namenode -format -clusterid ${SETUP_CLUSTER}' hdfs
+echo
+echo "Starting namenode process"
+echo
+/etc/init.d/hadoop-namenode start
+echo
+echo "Initialize HDFS file system: "
+echo
+
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -mkdir /jobtracker' hdfs
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -chown mapred:mapred /jobtracker' hdfs
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -mkdir /user/mapred' hdfs
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -chown mapred:mapred /user/mapred' hdfs
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -mkdir /tmp' hdfs
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -chmod 777 /tmp' hdfs
+
+if [ $? -eq 0 ]; then
+  echo "Completed."
+else
+  echo "Unknown error occurred, check hadoop logs for details."
+fi
+
+echo
+echo "Please startup datanode processes: /etc/init.d/hadoop-datanode start"

Added: hadoop/common/trunk/src/packages/hadoop-setup-single-node.sh
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/packages/hadoop-setup-single-node.sh?rev=1128385&view=auto
==============================================================================
--- hadoop/common/trunk/src/packages/hadoop-setup-single-node.sh (added)
+++ hadoop/common/trunk/src/packages/hadoop-setup-single-node.sh Fri May 27 16:35:02 2011
@@ -0,0 +1,212 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Script for setup HDFS file system for single node deployment
+
+bin=`which $0`
+bin=`dirname ${bin}`
+bin=`cd "$bin"; pwd`
+
+export HADOOP_PREFIX=${bin}/..
+
+if [ -e /etc/hadoop/hadoop-env.sh ]; then
+  . /etc/hadoop/hadoop-env.sh
+fi
+
+usage() {
+  echo "
+usage: $0 <parameters>
+
+  Optional parameters:
+     --default                   Setup system as default
+     -h                          Display this message
+  "
+  exit 1
+}
+
+# Parse script parameters
+OPTS=$(getopt \
+  -n $0 \
+  -o '' \
+  -l 'default' \
+  -- "$@")
+
+if [ $? != 0 ] ; then
+    usage
+fi
+
+eval set -- "${OPTS}"
+while true ; do
+  case "$1" in
+    --default)
+      AUTOMATED=1; shift
+      ;;
+    -h)
+      usage
+      ;;
+    --)
+      shift ; break
+      ;;
+    *)
+      echo "Unknown option: $1"
+      usage
+      exit 1
+      ;;
+  esac
+done
+
+# Interactive setup wizard
+if [ "${AUTOMATED}" != "1" ]; then
+  echo "Welcome to Hadoop single node setup wizard"
+  echo
+  echo -n "Would you like to use default single node configuration? (y/n) "
+  read SET_CONFIG
+  echo -n "Would you like to format name node? (y/n) "
+  read SET_FORMAT
+  echo -n "Would you like to setup default directory structure? (y/n) "
+  read SET_MKDIR
+  echo -n "Would you like to start up Hadoop? (y/n) "
+  read STARTUP
+  echo -n "Would you like to start up Hadoop on reboot? (y/n) "
+  read SET_REBOOT
+  echo
+  echo "Review your choices:"
+  echo
+  echo "Setup single node configuration    : ${SET_CONFIG}"
+  echo "Format namenode                    : ${SET_FORMAT}"
+  echo "Setup default file system structure: ${SET_MKDIR}"
+  echo "Start up Hadoop                    : ${STARTUP}"
+  echo "Start up Hadoop on reboot          : ${SET_REBOOT}"
+  echo
+  echo -n "Proceed with setup? (y/n) "
+  read CONFIRM
+  if [ "${CONFIRM}" != "y" ]; then
+    echo "User aborted setup, exiting..."
+    exit 1
+  fi
+else
+  SET_CONFIG="y"
+  SET_FORMAT="y"
+  SET_MKDIR="y"
+  STARTUP="y"
+  SET_REBOOT="y"
+fi
+
+AUTOMATED=${AUTOMATED:-0}
+SET_CONFIG=${SET_CONFIG:-y}
+SET_FORMAT=${SET_FORMAT:-n}
+SET_MKDIR=${SET_MKDIR:-y}
+STARTUP=${STARTUP:-y}
+SET_REBOOT=${SET_REBOOT:-y}
+
+# Make sure system is not already started
+/etc/init.d/hadoop-namenode stop 2>/dev/null >/dev/null
+/etc/init.d/hadoop-datanode stop 2>/dev/null >/dev/null
+/etc/init.d/hadoop-jobtracker stop 2>/dev/null >/dev/null
+/etc/init.d/hadoop-tasktracker stop 2>/dev/null >/dev/null
+
+# Default settings
+JAVA_HOME=${JAVA_HOME:-/usr/java/default}
+HADOOP_NN_HOST=${HADOOP_NN_HOST:-hdfs://localhost:9000/}
+HADOOP_NN_DIR=${HADOOP_NN_DIR:-/var/lib/hadoop/hdfs/namenode}
+HADOOP_DN_DIR=${HADOOP_DN_DIR:-/var/lib/hadoop/hdfs/datanode}
+HADOOP_JT_HOST=${HADOOP_JT_HOST:-localhost:9001}
+HADOOP_HDFS_DIR=${HADOOP_MAPRED_DIR:-/var/lib/hadoop/hdfs}
+HADOOP_MAPRED_DIR=${HADOOP_MAPRED_DIR:-/var/lib/hadoop/mapred}
+HADOOP_LOG_DIR="/var/log/hadoop"
+HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-/etc/hadoop}
+HADOOP_REPLICATION=${HADOOP_RELICATION:-1}
+HADOOP_TASK_SCHEDULER=${HADOOP_TASK_SCHEDULER:-org.apache.hadoop.mapred.JobQueueTaskScheduler}
+
+# Setup config files
+if [ "${SET_CONFIG}" == "y" ]; then
+  ${HADOOP_PREFIX}/sbin/hadoop-setup-conf.sh --auto \
+    --conf-dir=${HADOOP_CONF_DIR} \
+    --datanode-dir=${HADOOP_DN_DIR} \
+    --hdfs-dir=${HADOOP_HDFS_DIR} \
+    --jobtracker-url=${HADOOP_JT_HOST} \
+    --log-dir=${HADOOP_LOG_DIR} \
+    --mapred-dir=${HADOOP_MAPRED_DIR} \
+    --namenode-dir=${HADOOP_NN_DIR} \
+    --namenode-url=${HADOOP_NN_HOST} \
+    --replication=${HADOOP_REPLICATION}
+fi
+
+export HADOOP_CONF_DIR
+
+# Format namenode
+if [ ! -e ${HADOOP_NN_DIR} ]; then
+  rm -rf ${HADOOP_HDFS_DIR} 2>/dev/null >/dev/null
+  mkdir -p ${HADOOP_HDFS_DIR}
+  chmod 755 ${HADOOP_HDFS_DIR}
+  chown hdfs:hadoop ${HADOOP_HDFS_DIR}
+  su -c '${HADOOP_PREFIX}/bin/hdfs --config ${HADOOP_CONF_DIR} namenode -format -clusterid hadoop' hdfs
+elif [ "${SET_FORMAT}" == "y" ]; then
+  rm -rf ${HADOOP_HDFS_DIR} 2>/dev/null >/dev/null
+  mkdir -p ${HADOOP_HDFS_DIR}
+  chmod 755 ${HADOOP_HDFS_DIR}
+  chown hdfs:hadoop ${HADOOP_HDFS_DIR}
+  rm -rf /var/lib/hadoop/hdfs/namenode
+  su -c '${HADOOP_PREFIX}/bin/hdfs --config ${HADOOP_CONF_DIR} namenode -format -clusterid hadoop' hdfs
+fi
+
+# Start hdfs service
+/etc/init.d/hadoop-namenode start
+/etc/init.d/hadoop-datanode start
+
+# Initialize file system structure
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -mkdir /user/mapred' hdfs
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -chown mapred:mapred /user/mapred' hdfs
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -mkdir /tmp' hdfs
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -chmod 777 /tmp' hdfs
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -mkdir /jobtracker' hdfs
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -chown mapred:mapred /jobtracker' hdfs
+
+# Start mapreduce service
+/etc/init.d/hadoop-jobtracker start
+/etc/init.d/hadoop-tasktracker start
+
+# Toggle service startup on reboot
+if [ "${SET_REBOOT}" == "y" ]; then
+  if [ -e /etc/debian_version ]; then
+    ln -sf ../init.d/hadoop-namenode /etc/rc2.d/S90hadoop-namenode
+    ln -sf ../init.d/hadoop-datanode /etc/rc2.d/S91hadoop-datanode
+    ln -sf ../init.d/hadoop-jobtracker /etc/rc2.d/S92hadoop-jobtracker
+    ln -sf ../init.d/hadoop-tasktracker /etc/rc2.d/S93hadoop-tasktracker
+    ln -sf ../init.d/hadoop-namenode /etc/rc6.d/S10hadoop-namenode
+    ln -sf ../init.d/hadoop-datanode /etc/rc6.d/S11hadoop-datanode
+    ln -sf ../init.d/hadoop-jobtracker /etc/rc6.d/S12hadoop-jobtracker
+    ln -sf ../init.d/hadoop-tasktracker /etc/rc6.d/S13hadoop-tasktracker
+  elif [ -e /etc/redhat-release ]; then
+    /sbin/chkconfig hadoop-namenode --add
+    /sbin/chkconfig hadoop-datanode --add
+    /sbin/chkconfig hadoop-jobtracker --add
+    /sbin/chkconfig hadoop-tasktracker --add
+    /sbin/chkconfig hadoop-namenode on
+    /sbin/chkconfig hadoop-datanode on
+    /sbin/chkconfig hadoop-jobtracker on
+    /sbin/chkconfig hadoop-tasktracker on
+  fi
+fi
+
+# Shutdown service, if user choose to stop services after setup
+if [ "${STARTUP}" != "y" ]; then
+  /etc/init.d/hadoop-namenode stop
+  /etc/init.d/hadoop-datanode stop
+  /etc/init.d/hadoop-jobtracker stop
+  /etc/init.d/hadoop-tasktracker stop
+fi

Added: hadoop/common/trunk/src/packages/rpm/init.d/hadoop-datanode
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/packages/rpm/init.d/hadoop-datanode?rev=1128385&view=auto
==============================================================================
--- hadoop/common/trunk/src/packages/rpm/init.d/hadoop-datanode (added)
+++ hadoop/common/trunk/src/packages/rpm/init.d/hadoop-datanode Fri May 27 16:35:02 2011
@@ -0,0 +1,84 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# 
+# Starts a Hadoop datanode
+# 
+# chkconfig: 2345 90 10
+# description: Hadoop datanode
+
+source /etc/rc.d/init.d/functions
+source /etc/default/hadoop-env.sh
+
+RETVAL=0
+PIDFILE="${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid"
+desc="Hadoop datanode daemon"
+
+start() {
+  echo -n $"Starting $desc (hadoop-datanode): "
+  daemon --user hdfs ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" start datanode
+  RETVAL=$?
+  echo
+  [ $RETVAL -eq 0 ] && touch /var/lock/subsys/hadoop-datanode
+  return $RETVAL
+}
+
+stop() {
+  echo -n $"Stopping $desc (hadoop-datanode): "
+  daemon --user hdfs ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" stop datanode
+  RETVAL=$?
+  sleep 5
+  echo
+  [ $RETVAL -eq 0 ] && rm -f /var/lock/subsys/hadoop-datanode $PIDFILE
+}
+
+restart() {
+  stop
+  start
+}
+
+checkstatus(){
+  status -p $PIDFILE ${JAVA_HOME}/bin/java
+  RETVAL=$?
+}
+
+condrestart(){
+  [ -e /var/lock/subsys/hadoop-datanode ] && restart || :
+}
+
+case "$1" in
+  start)
+    start
+    ;;
+  stop)
+    stop
+    ;;
+  status)
+    checkstatus
+    ;;
+  restart)
+    restart
+    ;;
+  condrestart)
+    condrestart
+    ;;
+  *)
+    echo $"Usage: $0 {start|stop|status|restart|condrestart}"
+    exit 1
+esac
+
+exit $RETVAL

Added: hadoop/common/trunk/src/packages/rpm/init.d/hadoop-jobtracker
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/packages/rpm/init.d/hadoop-jobtracker?rev=1128385&view=auto
==============================================================================
--- hadoop/common/trunk/src/packages/rpm/init.d/hadoop-jobtracker (added)
+++ hadoop/common/trunk/src/packages/rpm/init.d/hadoop-jobtracker Fri May 27 16:35:02 2011
@@ -0,0 +1,84 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# 
+# Starts a Hadoop jobtracker
+# 
+# chkconfig: 2345 90 10
+# description: Hadoop jobtracker
+
+source /etc/rc.d/init.d/functions
+source /etc/default/hadoop-env.sh
+
+RETVAL=0
+PIDFILE="${HADOOP_PID_DIR}/hadoop-mapred-jobtracker.pid"
+desc="Hadoop jobtracker daemon"
+
+start() {
+  echo -n $"Starting $desc (hadoop-jobtracker): "
+  daemon --user mapred ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" start jobtracker
+  RETVAL=$?
+  echo
+  [ $RETVAL -eq 0 ] && touch /var/lock/subsys/hadoop-jobtracker
+  return $RETVAL
+}
+
+stop() {
+  echo -n $"Stopping $desc (hadoop-jobtracker): "
+  daemon --user mapred ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" stop jobtracker
+  RETVAL=$?
+  sleep 5
+  echo
+  [ $RETVAL -eq 0 ] && rm -f /var/lock/subsys/hadoop-jobtracker $PIDFILE
+}
+
+restart() {
+  stop
+  start
+}
+
+checkstatus(){
+  status -p $PIDFILE ${JAVA_HOME}/bin/java
+  RETVAL=$?
+}
+
+condrestart(){
+  [ -e /var/lock/subsys/hadoop-jobtracker ] && restart || :
+}
+
+case "$1" in
+  start)
+    start
+    ;;
+  stop)
+    stop
+    ;;
+  status)
+    checkstatus
+    ;;
+  restart)
+    restart
+    ;;
+  condrestart)
+    condrestart
+    ;;
+  *)
+    echo $"Usage: $0 {start|stop|status|restart|condrestart}"
+    exit 1
+esac
+
+exit $RETVAL

Added: hadoop/common/trunk/src/packages/rpm/init.d/hadoop-namenode
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/packages/rpm/init.d/hadoop-namenode?rev=1128385&view=auto
==============================================================================
--- hadoop/common/trunk/src/packages/rpm/init.d/hadoop-namenode (added)
+++ hadoop/common/trunk/src/packages/rpm/init.d/hadoop-namenode Fri May 27 16:35:02 2011
@@ -0,0 +1,98 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# 
+# Starts a Hadoop namenode
+# 
+# chkconfig: 2345 90 10
+# description: Hadoop namenode
+
+source /etc/rc.d/init.d/functions
+source /etc/default/hadoop-env.sh
+
+RETVAL=0
+PIDFILE="${HADOOP_PID_DIR}/hadoop-hdfs-namenode.pid"
+desc="Hadoop namenode daemon"
+
+start() {
+  echo -n $"Starting $desc (hadoop-namenode): "
+  daemon --user hdfs ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" start namenode $1
+  RETVAL=$?
+  echo
+  [ $RETVAL -eq 0 ] && touch /var/lock/subsys/hadoop-namenode
+  return $RETVAL
+}
+
+upgrade() {
+  start -upgrade
+}
+
+stop() {
+  echo -n $"Stopping $desc (hadoop-namenode): "
+  daemon --user hdfs ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" stop namenode
+  RETVAL=$?
+  sleep 5
+  echo
+  [ $RETVAL -eq 0 ] && rm -f /var/lock/subsys/hadoop-namenode $PIDFILE
+}
+
+checkstatus(){
+  status -p $PIDFILE ${JAVA_HOME}/bin/java
+  RETVAL=$?
+}
+
+restart() {
+  stop
+  start
+}
+
+condrestart(){
+  [ -e /var/lock/subsys/hadoop-namenode ] && restart || :
+}
+
+format() {
+  daemon --user hdfs ${HADOOP_PREFIX}/bin/hadoop namenode -format
+}
+
+case "$1" in
+  start)
+    start
+    ;;
+  upgrade)
+    upgrade
+    ;;
+  format)
+    format
+    ;;
+  stop)
+    stop
+    ;;
+  status)
+    checkstatus
+    ;;
+  restart)
+    restart
+    ;;
+  condrestart|try-restart)
+    condrestart
+    ;;
+  *)
+    echo $"Usage: $0 {start|stop|status|restart|try-restart|upgrade}"
+    exit 1
+esac
+
+exit $RETVAL

Added: hadoop/common/trunk/src/packages/rpm/init.d/hadoop-tasktracker
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/packages/rpm/init.d/hadoop-tasktracker?rev=1128385&view=auto
==============================================================================
--- hadoop/common/trunk/src/packages/rpm/init.d/hadoop-tasktracker (added)
+++ hadoop/common/trunk/src/packages/rpm/init.d/hadoop-tasktracker Fri May 27 16:35:02 2011
@@ -0,0 +1,84 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# 
+# Starts a Hadoop tasktracker
+# 
+# chkconfig: 2345 90 10
+# description: Hadoop tasktracker
+
+source /etc/rc.d/init.d/functions
+source /etc/default/hadoop-env.sh
+
+RETVAL=0
+PIDFILE="${HADOOP_PID_DIR}/hadoop-mapred-tasktracker.pid"
+desc="Hadoop tasktracker daemon"
+
+start() {
+  echo -n $"Starting $desc (hadoop-tasktracker): "
+  daemon --user mapred ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" start tasktracker
+  RETVAL=$?
+  echo
+  [ $RETVAL -eq 0 ] && touch /var/lock/subsys/hadoop-tasktracker
+  return $RETVAL
+}
+
+stop() {
+  echo -n $"Stopping $desc (hadoop-tasktracker): "
+  daemon --user mapred ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" stop tasktracker
+  RETVAL=$?
+  sleep 5
+  echo
+  [ $RETVAL -eq 0 ] && rm -f /var/lock/subsys/hadoop-tasktracker $PIDFILE
+}
+
+restart() {
+  stop
+  start
+}
+
+checkstatus(){
+  status -p $PIDFILE ${JAVA_HOME}/bin/java
+  RETVAL=$?
+}
+
+condrestart(){
+  [ -e /var/lock/subsys/hadoop-tasktracker ] && restart || :
+}
+
+case "$1" in
+  start)
+    start
+    ;;
+  stop)
+    stop
+    ;;
+  status)
+    checkstatus
+    ;;
+  restart)
+    restart
+    ;;
+  condrestart)
+    condrestart
+    ;;
+  *)
+    echo $"Usage: $0 {start|stop|status|restart|condrestart}"
+    exit 1
+esac
+
+exit $RETVAL

Added: hadoop/common/trunk/src/packages/rpm/spec/hadoop.spec
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/packages/rpm/spec/hadoop.spec?rev=1128385&view=auto
==============================================================================
--- hadoop/common/trunk/src/packages/rpm/spec/hadoop.spec (added)
+++ hadoop/common/trunk/src/packages/rpm/spec/hadoop.spec Fri May 27 16:35:02 2011
@@ -0,0 +1,173 @@
+#   Licensed to the Apache Software Foundation (ASF) under one or more
+#   contributor license agreements.  See the NOTICE file distributed with
+#   this work for additional information regarding copyright ownership.
+#   The ASF licenses this file to You under the Apache License, Version 2.0
+#   (the "License"); you may not use this file except in compliance with
+#   the License.  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+#
+# RPM Spec file for Hadoop version @version@
+#
+
+%define name         hadoop-common
+%define version      @version@
+%define release      @package.release@
+
+# Installation Locations
+%define _prefix      @package.prefix@
+%define _bin_dir     %{_prefix}/bin
+%define _conf_dir    @package.conf.dir@
+%define _lib_dir     %{_prefix}/lib
+%define _lib64_dir   %{_prefix}/lib64
+%define _libexec_dir %{_prefix}/libexec
+%define _log_dir     @package.log.dir@
+%define _pid_dir     @package.pid.dir@
+%define _sbin_dir    %{_prefix}/sbin
+%define _share_dir   %{_prefix}/share
+%define _var_dir     @package.var.dir@
+
+# Build time settings
+%define _build_dir  @package.build.dir@
+%define _final_name @final.name@
+%define debug_package %{nil}
+
+# Disable brp-java-repack-jars for aspect J
+%define __os_install_post    \
+    /usr/lib/rpm/redhat/brp-compress \
+    %{!?__debug_package:/usr/lib/rpm/redhat/brp-strip %{__strip}} \
+    /usr/lib/rpm/redhat/brp-strip-static-archive %{__strip} \
+    /usr/lib/rpm/redhat/brp-strip-comment-note %{__strip} %{__objdump} \
+    /usr/lib/rpm/brp-python-bytecompile %{nil}
+
+# RPM searches perl files for dependancies and this breaks for non packaged perl lib
+# like thrift so disable this
+%define _use_internal_dependency_generator 0
+
+%ifarch i386
+%global hadoop_arch Linux-i386-32
+%endif
+%ifarch amd64 x86_64
+%global hadoop_arch Linux-amd64-64
+%endif
+%ifarch noarch
+%global hadoop_arch ""
+%endif
+
+Summary: The Apache Hadoop project develops open-source software for reliable, scalable, distributed computing
+License: Apache License, Version 2.0
+URL: http://hadoop.apache.org/core/
+Vendor: Apache Software Foundation
+Group: Development/Libraries
+Name: %{name}
+Version: %{version}
+Release: %{release} 
+Source0: %{_final_name}-bin.tar.gz
+Prefix: %{_prefix}
+Prefix: %{_conf_dir}
+Prefix: %{_log_dir}
+Prefix: %{_pid_dir}
+Buildroot: %{_build_dir}
+Requires: sh-utils, textutils, /usr/sbin/useradd, /usr/sbin/usermod, /sbin/chkconfig, /sbin/service, jdk >= 1.6
+AutoReqProv: no
+Provides: hadoop
+
+%description
+The Apache Hadoop project develops open-source software for reliable, scalable, 
+distributed computing.  Hadoop includes these subprojects:
+
+Hadoop Common: The common utilities that support the other Hadoop subprojects.
+
+%prep
+%setup -n %{_final_name}
+
+%build
+if [ -d ${RPM_BUILD_DIR}%{_prefix} ]; then
+  rm -rf ${RPM_BUILD_DIR}%{_prefix}
+fi
+
+if [ -d ${RPM_BUILD_DIR}%{_log_dir} ]; then
+  rm -rf ${RPM_BUILD_DIR}%{_log_dir}
+fi
+
+if [ -d ${RPM_BUILD_DIR}%{_conf_dir} ]; then
+  rm -rf ${RPM_BUILD_DIR}%{_conf_dir}
+fi
+
+if [ -d ${RPM_BUILD_DIR}%{_pid_dir} ]; then
+  rm -rf ${RPM_BUILD_DIR}%{_pid_dir}
+fi
+
+mkdir -p ${RPM_BUILD_DIR}%{_prefix}
+mkdir -p ${RPM_BUILD_DIR}%{_bin_dir}
+mkdir -p ${RPM_BUILD_DIR}%{_lib_dir}
+%ifarch amd64 x86_64
+mkdir -p ${RPM_BUILD_DIR}%{_lib64_dir}
+%endif
+mkdir -p ${RPM_BUILD_DIR}%{_libexec_dir}
+mkdir -p ${RPM_BUILD_DIR}%{_log_dir}
+mkdir -p ${RPM_BUILD_DIR}%{_conf_dir}
+mkdir -p ${RPM_BUILD_DIR}%{_pid_dir}
+mkdir -p ${RPM_BUILD_DIR}%{_sbin_dir}
+mkdir -p ${RPM_BUILD_DIR}%{_share_dir}
+mkdir -p ${RPM_BUILD_DIR}%{_var_dir}
+
+#########################
+#### INSTALL SECTION ####
+#########################
+%install
+mv ${RPM_BUILD_DIR}/%{_final_name}/bin/* ${RPM_BUILD_DIR}%{_bin_dir}
+mv ${RPM_BUILD_DIR}/%{_final_name}/etc/hadoop/* ${RPM_BUILD_DIR}%{_conf_dir}
+mv ${RPM_BUILD_DIR}/%{_final_name}/lib/* ${RPM_BUILD_DIR}%{_lib_dir}
+mv ${RPM_BUILD_DIR}/%{_final_name}/libexec/* ${RPM_BUILD_DIR}%{_libexec_dir}
+mv ${RPM_BUILD_DIR}/%{_final_name}/sbin/* ${RPM_BUILD_DIR}%{_sbin_dir}
+mv ${RPM_BUILD_DIR}/%{_final_name}/share/* ${RPM_BUILD_DIR}%{_share_dir}
+rm -rf ${RPM_BUILD_DIR}/%{_final_name}/etc
+
+%pre
+getent group hadoop 2>/dev/null >/dev/null || /usr/sbin/groupadd -r hadoop
+
+%post
+bash ${RPM_INSTALL_PREFIX0}/sbin/update-hadoop-env.sh \
+       --prefix=${RPM_INSTALL_PREFIX0} \
+       --bin-dir=${RPM_INSTALL_PREFIX0}/bin \
+       --sbin-dir=${RPM_INSTALL_PREFIX0}/sbin \
+       --conf-dir=${RPM_INSTALL_PREFIX1} \
+       --log-dir=${RPM_INSTALL_PREFIX2} \
+       --pid-dir=${RPM_INSTALL_PREFIX3}
+
+%preun
+bash ${RPM_INSTALL_PREFIX0}/sbin/update-hadoop-env.sh \
+       --prefix=${RPM_INSTALL_PREFIX0} \
+       --bin-dir=${RPM_INSTALL_PREFIX0}/bin \
+       --sbin-dir=${RPM_INSTALL_PREFIX0}/sbin \
+       --conf-dir=${RPM_INSTALL_PREFIX1} \
+       --log-dir=${RPM_INSTALL_PREFIX2} \
+       --pid-dir=${RPM_INSTALL_PREFIX3} \
+       --uninstall
+
+%files 
+%defattr(-,root,root)
+%attr(0755,root,hadoop) %{_log_dir}
+%attr(0775,root,hadoop) %{_pid_dir}
+%config(noreplace) %{_conf_dir}/configuration.xsl
+%config(noreplace) %{_conf_dir}/core-site.xml
+%config(noreplace) %{_conf_dir}/hadoop-env.sh
+%config(noreplace) %{_conf_dir}/hadoop-metrics.properties
+%config(noreplace) %{_conf_dir}/hadoop-metrics2.properties
+%config(noreplace) %{_conf_dir}/hadoop-policy.xml
+%config(noreplace) %{_conf_dir}/log4j.properties
+%config(noreplace) %{_conf_dir}/masters
+%config(noreplace) %{_conf_dir}/slaves
+%{_conf_dir}/hadoop-env.sh.template
+%{_conf_dir}/ssl-client.xml.example
+%{_conf_dir}/ssl-server.xml.example
+%{_prefix}
+

Added: hadoop/common/trunk/src/packages/templates/conf/core-site.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/packages/templates/conf/core-site.xml?rev=1128385&view=auto
==============================================================================
--- hadoop/common/trunk/src/packages/templates/conf/core-site.xml (added)
+++ hadoop/common/trunk/src/packages/templates/conf/core-site.xml Fri May 27 16:35:02 2011
@@ -0,0 +1,11 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+  <property>
+    <name>fs.default.name</name>
+    <value>${HADOOP_NN_HOST}</value>
+  </property>
+</configuration>

Added: hadoop/common/trunk/src/packages/update-hadoop-env.sh
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/packages/update-hadoop-env.sh?rev=1128385&view=auto
==============================================================================
--- hadoop/common/trunk/src/packages/update-hadoop-env.sh (added)
+++ hadoop/common/trunk/src/packages/update-hadoop-env.sh Fri May 27 16:35:02 2011
@@ -0,0 +1,168 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This script configures hadoop-env.sh and symlinkis directories for 
+# relocating RPM locations.
+
+usage() {
+  echo "
+usage: $0 <parameters>
+  Required parameters:
+     --prefix=PREFIX             path to install into
+
+  Optional parameters:
+     --arch=i386                 OS Architecture
+     --bin-dir=PREFIX/bin        Executable directory
+     --conf-dir=/etc/hadoop      Configuration directory
+     --log-dir=/var/log/hadoop   Log directory
+     --pid-dir=/var/run          PID file location
+     --sbin-dir=PREFIX/sbin      System executable directory
+  "
+  exit 1
+}
+
+template_generator() {
+  REGEX='(\$\{[a-zA-Z_][a-zA-Z_0-9]*\})'
+  cat $1 |
+  while read line ; do
+    while [[ "$line" =~ $REGEX ]] ; do
+      LHS=${BASH_REMATCH[1]}
+      RHS="$(eval echo "\"$LHS\"")"
+      line=${line//$LHS/$RHS}
+    done
+    echo $line >> $2
+  done
+}
+
+OPTS=$(getopt \
+  -n $0 \
+  -o '' \
+  -l 'arch:' \
+  -l 'prefix:' \
+  -l 'bin-dir:' \
+  -l 'conf-dir:' \
+  -l 'lib-dir:' \
+  -l 'log-dir:' \
+  -l 'pid-dir:' \
+  -l 'sbin-dir:' \
+  -l 'uninstall' \
+  -- "$@")
+
+if [ $? != 0 ] ; then
+    usage
+fi
+
+eval set -- "${OPTS}"
+while true ; do
+  case "$1" in
+    --arch)
+      ARCH=$2 ; shift 2
+      ;;
+    --prefix)
+      PREFIX=$2 ; shift 2
+      ;;
+    --bin-dir)
+      BIN_DIR=$2 ; shift 2
+      ;;
+    --log-dir)
+      LOG_DIR=$2 ; shift 2
+      ;;
+    --lib-dir)
+      LIB_DIR=$2 ; shift 2
+      ;;
+    --conf-dir)
+      CONF_DIR=$2 ; shift 2
+      ;;
+    --pid-dir)
+      PID_DIR=$2 ; shift 2
+      ;;
+    --sbin-dir)
+      SBIN_DIR=$2 ; shift 2
+      ;;
+    --uninstall)
+      UNINSTALL=1; shift
+      ;;
+    --)
+      shift ; break
+      ;;
+    *)
+      echo "Unknown option: $1"
+      usage
+      exit 1
+      ;;
+  esac
+done
+
+for var in PREFIX; do
+  if [ -z "$(eval "echo \$$var")" ]; then
+    echo Missing param: $var
+    usage
+  fi
+done
+
+ARCH=${ARCH:-i386}
+HADOOP_PREFIX=$PREFIX
+HADOOP_BIN_DIR=${BIN_DIR:-$PREFIX/bin}
+HADOOP_CONF_DIR=${CONF_DIR:-$PREFIX/etc/hadoop}
+HADOOP_LIB_DIR=${LIB_DIR:-$PREFIX/lib}
+HADOOP_LOG_DIR=${LOG_DIR:-$PREFIX/var/log}
+HADOOP_PID_DIR=${PID_DIR:-$PREFIX/var/run}
+HADOOP_SBIN_DIR=${SBIN_DIR:-$PREFIX/sbin}
+UNINSTALL=${UNINSTALL:-0}
+
+if [ "${ARCH}" != "i386" ]; then
+  HADOOP_LIB_DIR=${HADOOP_LIB_DIR}64
+fi
+
+if [ "${UNINSTALL}" -eq "1" ]; then
+  # Remove symlinks
+  if [ "${HADOOP_CONF_DIR}" != "${HADOOP_PREFIX}/etc/hadoop" ]; then
+    rm -rf ${HADOOP_PREFIX}/etc/hadoop
+  fi
+  rm -f /etc/default/hadoop-env.sh
+  rm -f /etc/profile.d/hadoop-env.sh
+else
+  # Create symlinks
+  if [ "${HADOOP_CONF_DIR}" != "${HADOOP_PREFIX}/etc/hadoop" ]; then
+    mkdir -p ${HADOOP_PREFIX}/etc
+    ln -sf ${HADOOP_CONF_DIR} ${HADOOP_PREFIX}/etc/hadoop
+  fi
+  ln -sf ${HADOOP_CONF_DIR}/hadoop-env.sh /etc/default/hadoop-env.sh
+  ln -sf ${HADOOP_CONF_DIR}/hadoop-env.sh /etc/profile.d/hadoop-env.sh
+
+  mkdir -p ${HADOOP_LOG_DIR}
+  chown root:hadoop ${HADOOP_LOG_DIR}
+  chmod 775 ${HADOOP_LOG_DIR}
+
+  if [ ! -d ${HADOOP_PID_DIR} ]; then
+    mkdir -p ${HADOOP_PID_DIR}
+    chown root:hadoop ${HADOOP_PID_DIR}
+    chmod 775 ${HADOOP_PID_DIR}
+  fi
+
+  TFILE="/tmp/$(basename $0).$$.tmp"
+  if [ -z "${JAVA_HOME}" ]; then
+    if [ -e /etc/debian_version ]; then
+      JAVA_HOME=`update-alternatives --config java | grep java | cut -f2 -d':' | cut -f2 -d' ' | sed -e 's/\/bin\/java//'`
+    else
+      JAVA_HOME=/usr/java/default
+    fi
+  fi
+  template_generator ${HADOOP_CONF_DIR}/hadoop-env.sh.template $TFILE
+  cp ${TFILE} ${CONF_DIR}/hadoop-env.sh
+  rm -f ${TFILE}
+fi

Modified: hadoop/common/trunk/src/test/system/c++/runAs/configure
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/system/c%2B%2B/runAs/configure?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/src/test/system/c++/runAs/configure (original)
+++ hadoop/common/trunk/src/test/system/c++/runAs/configure Fri May 27 16:35:02 2011
@@ -3349,11 +3349,11 @@ fi
 done
 
 
-#check for HADOOP_HOME
+#check for HADOOP_PREFIX
 if test "$with_home" != ""
 then
 cat >>confdefs.h <<_ACEOF
-#define HADOOP_HOME "$with_home"
+#define HADOOP_PREFIX "$with_home"
 _ACEOF
 
 fi

Modified: hadoop/common/trunk/src/test/system/c++/runAs/configure.ac
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/system/c%2B%2B/runAs/configure.ac?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/src/test/system/c++/runAs/configure.ac (original)
+++ hadoop/common/trunk/src/test/system/c++/runAs/configure.ac Fri May 27 16:35:02 2011
@@ -40,10 +40,10 @@ AC_PROG_CC
 AC_HEADER_STDC
 AC_CHECK_HEADERS([stdlib.h string.h unistd.h fcntl.h])
 
-#check for HADOOP_HOME
+#check for HADOOP_PREFIX
 if test "$with_home" != ""
 then
-AC_DEFINE_UNQUOTED(HADOOP_HOME,"$with_home")
+AC_DEFINE_UNQUOTED(HADOOP_PREFIX,"$with_home")
 fi
 
 # Checks for typedefs, structures, and compiler characteristics.

Modified: hadoop/common/trunk/src/test/system/c++/runAs/runAs.c
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/system/c%2B%2B/runAs/runAs.c?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/src/test/system/c++/runAs/runAs.c (original)
+++ hadoop/common/trunk/src/test/system/c++/runAs/runAs.c Fri May 27 16:35:02 2011
@@ -86,7 +86,7 @@ int process_cluster_command(char * user,
   }
   len = STRLEN + strlen(command);
   finalcommandstr = (char *) malloc((len + 1) * sizeof(char));
-  snprintf(finalcommandstr, len, SCRIPT_DIR_PATTERN, HADOOP_HOME,
+  snprintf(finalcommandstr, len, SCRIPT_DIR_PATTERN, HADOOP_PREFIX,
       command);
   finalcommandstr[len + 1] = '\0';
   errorcode = switchuser(user);

Modified: hadoop/common/trunk/src/test/system/c++/runAs/runAs.h.in
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/system/c%2B%2B/runAs/runAs.h.in?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/src/test/system/c++/runAs/runAs.h.in (original)
+++ hadoop/common/trunk/src/test/system/c++/runAs/runAs.h.in Fri May 27 16:35:02 2011
@@ -39,13 +39,13 @@ enum errorcodes {
   INVALID_COMMAND_PASSED, //6
 };
 
-#undef HADOOP_HOME
+#undef HADOOP_PREFIX
 
 #define SSH_COMMAND "ssh"
 
 #define SCRIPT_DIR_PATTERN "%s/bin/hadoop-daemon.sh %s" //%s to be substituded 
 
-#define STRLEN strlen(SCRIPT_DIR_PATTERN) + strlen(HADOOP_HOME)
+#define STRLEN strlen(SCRIPT_DIR_PATTERN) + strlen(HADOOP_PREFIX)
 
 /*
  * Function to get the user details populated given a user name. 

Modified: hadoop/common/trunk/src/test/system/java/org/apache/hadoop/test/system/process/HadoopDaemonRemoteCluster.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/system/java/org/apache/hadoop/test/system/process/HadoopDaemonRemoteCluster.java?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/src/test/system/java/org/apache/hadoop/test/system/process/HadoopDaemonRemoteCluster.java (original)
+++ hadoop/common/trunk/src/test/system/java/org/apache/hadoop/test/system/process/HadoopDaemonRemoteCluster.java Fri May 27 16:35:02 2011
@@ -59,7 +59,7 @@ public abstract class HadoopDaemonRemote
   public static final String CONF_HADOOPNEWCONFDIR =
     "test.system.hdrc.hadoopnewconfdir";
   /**
-   * Key used to configure the HADOOP_HOME to be used by the
+   * Key used to configure the HADOOP_PREFIX to be used by the
    * HadoopDaemonRemoteCluster.
    */
   public final static String CONF_HADOOPHOME =
@@ -188,7 +188,7 @@ public abstract class HadoopDaemonRemote
     if (hadoopHome == null || hadoopConfDir == null || hadoopHome.isEmpty()
         || hadoopConfDir.isEmpty()) {
       LOG.error("No configuration "
-          + "for the HADOOP_HOME and HADOOP_CONF_DIR passed");
+          + "for the HADOOP_PREFIX and HADOOP_CONF_DIR passed");
       throw new IllegalArgumentException(
           "No Configuration passed for hadoop home " +
           "and hadoop conf directories");



Mime
View raw message