hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From omal...@apache.org
Subject svn commit: r1128390 [2/2] - in /hadoop/common/branches/branch-0.20-security-204: ./ bin/ conf/ ivy/ src/docs/src/documentation/content/xdocs/ src/packages/ src/packages/deb/ src/packages/deb/hadoop.control/ src/packages/deb/init.d/ src/packages/rpm/ s...
Date Fri, 27 May 2011 16:49:57 GMT
Added: hadoop/common/branches/branch-0.20-security-204/src/packages/hadoop-setup-hdfs.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-204/src/packages/hadoop-setup-hdfs.sh?rev=1128390&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-204/src/packages/hadoop-setup-hdfs.sh (added)
+++ hadoop/common/branches/branch-0.20-security-204/src/packages/hadoop-setup-hdfs.sh Fri
May 27 16:49:55 2011
@@ -0,0 +1,53 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+if [ "$HADOOP_HOME" != "" ]; then
+  echo "Warning: \$HADOOP_HOME is deprecated."
+  echo
+fi
+
+. "$bin"/../libexec/hadoop-config.sh
+
+echo "Setup Hadoop Distributed File System"
+echo
+echo "Formatting namenode"
+echo
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} namenode -format' hdfs
+echo
+echo "Starting namenode process"
+echo
+/etc/init.d/hadoop-namenode start
+echo
+echo "Initialize HDFS file system: "
+echo
+
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} dfs -mkdir /user/mapred' hdfs
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} dfs -chown mapred:mapred /user/mapred'
hdfs
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} dfs -mkdir /tmp' hdfs
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} dfs -chmod 777 /tmp' hdfs
+
+if [ $? -eq 0 ]; then
+  echo "Completed."
+else
+  echo "Unknown error occurred, check hadoop logs for details."
+fi
+
+echo
+echo "Please startup datanode processes: /etc/init.d/hadoop-datanode start"

Added: hadoop/common/branches/branch-0.20-security-204/src/packages/hadoop-setup-single-node.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-204/src/packages/hadoop-setup-single-node.sh?rev=1128390&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-204/src/packages/hadoop-setup-single-node.sh
(added)
+++ hadoop/common/branches/branch-0.20-security-204/src/packages/hadoop-setup-single-node.sh
Fri May 27 16:49:55 2011
@@ -0,0 +1,215 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Script for setup HDFS file system for single node deployment
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+if [ "$HADOOP_HOME" != "" ]; then
+  echo "Warning: \$HADOOP_HOME is deprecated."
+  echo
+fi
+
+. "$bin"/../libexec/hadoop-config.sh
+
+usage() {
+  echo "
+usage: $0 <parameters>
+
+  Optional parameters:
+     --default                   Setup system as default
+     -h                          Display this message
+  "
+  exit 1
+}
+
+template_generator() {
+  REGEX='(\$\{[a-zA-Z_][a-zA-Z_0-9]*\})'
+  cat $1 |
+  while read line ; do
+    while [[ "$line" =~ $REGEX ]] ; do
+      LHS=${BASH_REMATCH[1]}
+      RHS="$(eval echo "\"$LHS\"")"
+      line=${line//$LHS/$RHS}
+    done
+    echo $line >> $2
+  done
+}
+
+OPTS=$(getopt \
+  -n $0 \
+  -o '' \
+  -l 'default' \
+  -- "$@")
+
+if [ $? != 0 ] ; then
+    usage
+fi
+
+if [ -e /etc/hadoop/hadoop-env.sh ]; then
+  . /etc/hadoop/hadoop-env.sh
+fi
+
+eval set -- "${OPTS}"
+while true ; do
+  case "$1" in
+    --default)
+      AUTOMATED=1; shift
+      ;;
+    -h)
+      usage
+      ;;
+    --)
+      shift ; break
+      ;;
+    *)
+      echo "Unknown option: $1"
+      usage
+      exit 1
+      ;;
+  esac
+done
+
+if [ "${AUTOMATED}" != "1" ]; then
+  echo "Welcome to Hadoop single node setup wizard"
+  echo
+  echo -n "Would you like to use default single node configuration? (y/n) "
+  read SET_CONFIG
+  echo -n "Would you like to format name node? (y/n) "
+  read SET_FORMAT
+  echo -n "Would you like to setup default directory structure? (y/n) "
+  read SET_MKDIR
+  echo -n "Would you like to start up Hadoop? (y/n) "
+  read STARTUP
+  echo -n "Would you like to start up Hadoop on reboot? (y/n) "
+  read SET_REBOOT
+  echo
+  echo "Review your choices:"
+  echo
+  echo "Setup single node configuration    : ${SET_CONFIG}"
+  echo "Format namenode                    : ${SET_FORMAT}"
+  echo "Setup default file system structure: ${SET_MKDIR}"
+  echo "Start up Hadoop                    : ${STARTUP}"
+  echo "Start up Hadoop on reboot          : ${SET_REBOOT}"
+  echo
+  echo -n "Proceed with setup? (y/n) "
+  read CONFIRM
+  if [ "${CONFIRM}" != "y" ]; then
+    echo "User aborted setup, exiting..."
+    exit 1
+  fi
+else
+  SET_CONFIG="y"
+  SET_FORMAT="y"
+  SET_MKDIR="y"
+  STARTUP="y"
+  SET_REBOOT="y"
+fi
+
+AUTOMATED=${AUTOMATED:-0}
+SET_CONFIG=${SET_CONFIG:-y}
+SET_FORMAT=${SET_FORMAT:-n}
+SET_MKDIR=${SET_MKDIR:-y}
+STARTUP=${STARTUP:-y}
+SET_REBOOT=${SET_REBOOT:-y}
+
+# Make sure system is not already started
+/etc/init.d/hadoop-namenode stop 2>/dev/null >/dev/null
+/etc/init.d/hadoop-datanode stop 2>/dev/null >/dev/null
+/etc/init.d/hadoop-jobtracker stop 2>/dev/null >/dev/null
+/etc/init.d/hadoop-tasktracker stop 2>/dev/null >/dev/null
+
+if [ "${SET_CONFIG}" == "y" ]; then
+  JAVA_HOME=${JAVA_HOME:-/usr/java/default}
+  HADOOP_NN_HOST=${HADOOP_NN_HOST:-hdfs://localhost:9000/}
+  HADOOP_NN_DIR=${HADOOP_NN_DIR:-/var/lib/hadoop/hdfs/namenode}
+  HADOOP_DN_DIR=${HADOOP_DN_DIR:-/var/lib/hadoop/hdfs/datanode}
+  HADOOP_JT_HOST=${HADOOP_JT_HOST:-localhost:9001}
+  HADOOP_HDFS_DIR=${HADOOP_MAPRED_DIR:-/var/lib/hadoop/hdfs}
+  HADOOP_MAPRED_DIR=${HADOOP_MAPRED_DIR:-/var/lib/hadoop/mapred}
+  HADOOP_PID_DIR=${HADOOP_PID_DIR:-/var/run/hadoop}
+  HADOOP_LOG_DIR="/var/log/hadoop"
+  HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-/etc/hadoop}
+  HADOOP_REPLICATION=${HADOOP_RELICATION:-1}
+  ${HADOOP_PREFIX}/sbin/hadoop-setup-conf.sh --auto \
+    --conf-dir=${HADOOP_CONF_DIR} \
+    --datanode-dir=${HADOOP_DN_DIR} \
+    --hdfs-dir=${HADOOP_HDFS_DIR} \
+    --jobtracker-url=${HADOOP_JT_HOST} \
+    --log-dir=${HADOOP_LOG_DIR} \
+    --pid-dir=${HADOOP_PID_DIR} \
+    --mapred-dir=${HADOOP_MAPRED_DIR} \
+    --namenode-dir=${HADOOP_NN_DIR} \
+    --namenode-url=${HADOOP_NN_HOST} \
+    --replication=${HADOOP_REPLICATION}
+fi
+
+if [ ! -e ${HADOOP_NN_DIR} ]; then
+  rm -rf ${HADOOP_HDFS_DIR} 2>/dev/null >/dev/null
+  mkdir -p ${HADOOP_HDFS_DIR}
+  chmod 755 ${HADOOP_HDFS_DIR}
+  chown hdfs:hadoop ${HADOOP_HDFS_DIR}
+  /etc/init.d/hadoop-namenode format
+elif [ "${SET_FORMAT}" == "y" ]; then
+  rm -rf ${HADOOP_HDFS_DIR} 2>/dev/null >/dev/null
+  mkdir -p ${HADOOP_HDFS_DIR}
+  chmod 755 ${HADOOP_HDFS_DIR}
+  chown hdfs:hadoop ${HADOOP_HDFS_DIR}
+  rm -rf ${HADOOP_NN_DIR}
+  /etc/init.d/hadoop-namenode format
+fi
+
+/etc/init.d/hadoop-namenode start
+/etc/init.d/hadoop-datanode start
+
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} dfs -mkdir /user/mapred' hdfs
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} dfs -chown mapred:mapred /user/mapred'
hdfs
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} dfs -mkdir /tmp' hdfs
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} dfs -chmod 777 /tmp' hdfs
+
+/etc/init.d/hadoop-jobtracker start
+/etc/init.d/hadoop-tasktracker start
+
+if [ "${SET_REBOOT}" == "y" ]; then
+  if [ -e /etc/debian_version ]; then
+    ln -sf ../init.d/hadoop-namenode /etc/rc2.d/S90hadoop-namenode
+    ln -sf ../init.d/hadoop-datanode /etc/rc2.d/S91hadoop-datanode
+    ln -sf ../init.d/hadoop-jobtracker /etc/rc2.d/S92hadoop-jobtracker
+    ln -sf ../init.d/hadoop-tasktracker /etc/rc2.d/S93hadoop-tasktracker
+    ln -sf ../init.d/hadoop-namenode /etc/rc6.d/S10hadoop-namenode
+    ln -sf ../init.d/hadoop-datanode /etc/rc6.d/S11hadoop-datanode
+    ln -sf ../init.d/hadoop-jobtracker /etc/rc6.d/S12hadoop-jobtracker
+    ln -sf ../init.d/hadoop-tasktracker /etc/rc6.d/S13hadoop-tasktracker
+  elif [ -e /etc/redhat-release ]; then
+    /sbin/chkconfig hadoop-namenode --add
+    /sbin/chkconfig hadoop-datanode --add
+    /sbin/chkconfig hadoop-jobtracker --add
+    /sbin/chkconfig hadoop-tasktracker --add
+    /sbin/chkconfig hadoop-namenode on
+    /sbin/chkconfig hadoop-datanode on
+    /sbin/chkconfig hadoop-jobtracker on
+    /sbin/chkconfig hadoop-tasktracker on
+  fi
+fi
+
+if [ "${STARTUP}" != "y" ]; then
+  /etc/init.d/hadoop-namenode stop
+  /etc/init.d/hadoop-datanode stop
+  /etc/init.d/hadoop-jobtracker stop
+  /etc/init.d/hadoop-tasktracker stop
+fi

Added: hadoop/common/branches/branch-0.20-security-204/src/packages/rpm/init.d/hadoop-datanode
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-204/src/packages/rpm/init.d/hadoop-datanode?rev=1128390&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-204/src/packages/rpm/init.d/hadoop-datanode
(added)
+++ hadoop/common/branches/branch-0.20-security-204/src/packages/rpm/init.d/hadoop-datanode
Fri May 27 16:49:55 2011
@@ -0,0 +1,84 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# 
+# Starts a Hadoop datanode
+# 
+# chkconfig: 2345 90 10
+# description: Hadoop datanode
+
+source /etc/rc.d/init.d/functions
+source /etc/default/hadoop-env.sh
+
+RETVAL=0
+PIDFILE="${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid"
+desc="Hadoop datanode daemon"
+
+start() {
+  echo -n $"Starting $desc (hadoop-datanode): "
+  daemon --user hdfs ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}"
start datanode
+  RETVAL=$?
+  echo
+  [ $RETVAL -eq 0 ] && touch /var/lock/subsys/hadoop-datanode
+  return $RETVAL
+}
+
+stop() {
+  echo -n $"Stopping $desc (hadoop-datanode): "
+  daemon --user hdfs ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}"
stop datanode
+  RETVAL=$?
+  sleep 5
+  echo
+  [ $RETVAL -eq 0 ] && rm -f /var/lock/subsys/hadoop-datanode $PIDFILE
+}
+
+restart() {
+  stop
+  start
+}
+
+checkstatus(){
+  status -p $PIDFILE ${JAVA_HOME}/bin/java
+  RETVAL=$?
+}
+
+condrestart(){
+  [ -e /var/lock/subsys/hadoop-datanode ] && restart || :
+}
+
+case "$1" in
+  start)
+    start
+    ;;
+  stop)
+    stop
+    ;;
+  status)
+    checkstatus
+    ;;
+  restart)
+    restart
+    ;;
+  condrestart)
+    condrestart
+    ;;
+  *)
+    echo $"Usage: $0 {start|stop|status|restart|condrestart}"
+    exit 1
+esac
+
+exit $RETVAL

Added: hadoop/common/branches/branch-0.20-security-204/src/packages/rpm/init.d/hadoop-jobtracker
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-204/src/packages/rpm/init.d/hadoop-jobtracker?rev=1128390&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-204/src/packages/rpm/init.d/hadoop-jobtracker
(added)
+++ hadoop/common/branches/branch-0.20-security-204/src/packages/rpm/init.d/hadoop-jobtracker
Fri May 27 16:49:55 2011
@@ -0,0 +1,84 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# 
+# Starts a Hadoop jobtracker
+# 
+# chkconfig: 2345 90 10
+# description: Hadoop jobtracker
+
+source /etc/rc.d/init.d/functions
+source /etc/default/hadoop-env.sh
+
+RETVAL=0
+PIDFILE="${HADOOP_PID_DIR}/hadoop-mapred-jobtracker.pid"
+desc="Hadoop jobtracker daemon"
+
+start() {
+  echo -n $"Starting $desc (hadoop-jobtracker): "
+  daemon --user mapred ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}"
start jobtracker
+  RETVAL=$?
+  echo
+  [ $RETVAL -eq 0 ] && touch /var/lock/subsys/hadoop-jobtracker
+  return $RETVAL
+}
+
+stop() {
+  echo -n $"Stopping $desc (hadoop-jobtracker): "
+  daemon --user mapred ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}"
stop jobtracker
+  RETVAL=$?
+  sleep 5
+  echo
+  [ $RETVAL -eq 0 ] && rm -f /var/lock/subsys/hadoop-jobtracker $PIDFILE
+}
+
+restart() {
+  stop
+  start
+}
+
+checkstatus(){
+  status -p $PIDFILE ${JAVA_HOME}/bin/java
+  RETVAL=$?
+}
+
+condrestart(){
+  [ -e /var/lock/subsys/hadoop-jobtracker ] && restart || :
+}
+
+case "$1" in
+  start)
+    start
+    ;;
+  stop)
+    stop
+    ;;
+  status)
+    checkstatus
+    ;;
+  restart)
+    restart
+    ;;
+  condrestart)
+    condrestart
+    ;;
+  *)
+    echo $"Usage: $0 {start|stop|status|restart|condrestart}"
+    exit 1
+esac
+
+exit $RETVAL

Added: hadoop/common/branches/branch-0.20-security-204/src/packages/rpm/init.d/hadoop-namenode
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-204/src/packages/rpm/init.d/hadoop-namenode?rev=1128390&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-204/src/packages/rpm/init.d/hadoop-namenode
(added)
+++ hadoop/common/branches/branch-0.20-security-204/src/packages/rpm/init.d/hadoop-namenode
Fri May 27 16:49:55 2011
@@ -0,0 +1,98 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# 
+# Starts a Hadoop namenode
+# 
+# chkconfig: 2345 90 10
+# description: Hadoop namenode
+
+source /etc/rc.d/init.d/functions
+source /etc/default/hadoop-env.sh
+
+RETVAL=0
+PIDFILE="${HADOOP_PID_DIR}/hadoop-hdfs-namenode.pid"
+desc="Hadoop namenode daemon"
+
+start() {
+  echo -n $"Starting $desc (hadoop-namenode): "
+  daemon --user hdfs ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}"
start namenode $1
+  RETVAL=$?
+  echo
+  [ $RETVAL -eq 0 ] && touch /var/lock/subsys/hadoop-namenode
+  return $RETVAL
+}
+
+upgrade() {
+  start -upgrade
+}
+
+stop() {
+  echo -n $"Stopping $desc (hadoop-namenode): "
+  daemon --user hdfs ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}"
stop namenode
+  RETVAL=$?
+  sleep 5
+  echo
+  [ $RETVAL -eq 0 ] && rm -f /var/lock/subsys/hadoop-namenode $PIDFILE
+}
+
+checkstatus(){
+  status -p $PIDFILE ${JAVA_HOME}/bin/java
+  RETVAL=$?
+}
+
+restart() {
+  stop
+  start
+}
+
+condrestart(){
+  [ -e /var/lock/subsys/hadoop-namenode ] && restart || :
+}
+
+format() {
+  daemon --user hdfs ${HADOOP_PREFIX}/bin/hadoop namenode -format
+}
+
+case "$1" in
+  start)
+    start
+    ;;
+  upgrade)
+    upgrade
+    ;;
+  format)
+    format
+    ;;
+  stop)
+    stop
+    ;;
+  status)
+    checkstatus
+    ;;
+  restart)
+    restart
+    ;;
+  condrestart|try-restart)
+    condrestart
+    ;;
+  *)
+    echo $"Usage: $0 {start|stop|status|restart|try-restart|upgrade}"
+    exit 1
+esac
+
+exit $RETVAL

Added: hadoop/common/branches/branch-0.20-security-204/src/packages/rpm/init.d/hadoop-tasktracker
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-204/src/packages/rpm/init.d/hadoop-tasktracker?rev=1128390&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-204/src/packages/rpm/init.d/hadoop-tasktracker
(added)
+++ hadoop/common/branches/branch-0.20-security-204/src/packages/rpm/init.d/hadoop-tasktracker
Fri May 27 16:49:55 2011
@@ -0,0 +1,84 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# 
+# Starts a Hadoop tasktracker
+# 
+# chkconfig: 2345 90 10
+# description: Hadoop tasktracker
+
+source /etc/rc.d/init.d/functions
+source /etc/default/hadoop-env.sh
+
+RETVAL=0
+PIDFILE="${HADOOP_PID_DIR}/hadoop-mapred-tasktracker.pid"
+desc="Hadoop tasktracker daemon"
+
+start() {
+  echo -n $"Starting $desc (hadoop-tasktracker): "
+  daemon --user mapred ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}"
start tasktracker
+  RETVAL=$?
+  echo
+  [ $RETVAL -eq 0 ] && touch /var/lock/subsys/hadoop-tasktracker
+  return $RETVAL
+}
+
+stop() {
+  echo -n $"Stopping $desc (hadoop-tasktracker): "
+  daemon --user mapred ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}"
stop tasktracker
+  RETVAL=$?
+  sleep 5
+  echo
+  [ $RETVAL -eq 0 ] && rm -f /var/lock/subsys/hadoop-tasktracker $PIDFILE
+}
+
+restart() {
+  stop
+  start
+}
+
+checkstatus(){
+  status -p $PIDFILE ${JAVA_HOME}/bin/java
+  RETVAL=$?
+}
+
+condrestart(){
+  [ -e /var/lock/subsys/hadoop-tasktracker ] && restart || :
+}
+
+case "$1" in
+  start)
+    start
+    ;;
+  stop)
+    stop
+    ;;
+  status)
+    checkstatus
+    ;;
+  restart)
+    restart
+    ;;
+  condrestart)
+    condrestart
+    ;;
+  *)
+    echo $"Usage: $0 {start|stop|status|restart|condrestart}"
+    exit 1
+esac
+
+exit $RETVAL

Added: hadoop/common/branches/branch-0.20-security-204/src/packages/rpm/spec/hadoop.spec
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-204/src/packages/rpm/spec/hadoop.spec?rev=1128390&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-204/src/packages/rpm/spec/hadoop.spec (added)
+++ hadoop/common/branches/branch-0.20-security-204/src/packages/rpm/spec/hadoop.spec Fri
May 27 16:49:55 2011
@@ -0,0 +1,194 @@
+#   Licensed to the Apache Software Foundation (ASF) under one or more
+#   contributor license agreements.  See the NOTICE file distributed with
+#   this work for additional information regarding copyright ownership.
+#   The ASF licenses this file to You under the Apache License, Version 2.0
+#   (the "License"); you may not use this file except in compliance with
+#   the License.  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+#
+# RPM Spec file for Hadoop version @version@
+#
+
+%define name         hadoop
+%define version      @version@
+%define release      @package.release@
+
+# Installation Locations
+%define _prefix      @package.prefix@
+%define _bin_dir     %{_prefix}/bin
+%define _conf_dir    @package.conf.dir@
+%define _include_dir %{_prefix}/include
+%define _lib_dir     %{_prefix}/lib
+%define _lib64_dir   %{_prefix}/lib64
+%define _libexec_dir %{_prefix}/libexec
+%define _log_dir     @package.log.dir@
+%define _man_dir     %{_prefix}/man
+%define _pid_dir     @package.pid.dir@
+%define _sbin_dir    %{_prefix}/sbin
+%define _share_dir   %{_prefix}/share
+%define _var_dir     /var/lib/hadoop
+
+# Build time settings
+%define _build_dir  @package.build.dir@
+%define _final_name @final.name@
+%define debug_package %{nil}
+
+# Disable brp-java-repack-jars for aspect J
+%define __os_install_post    \
+    /usr/lib/rpm/redhat/brp-compress \
+    %{!?__debug_package:/usr/lib/rpm/redhat/brp-strip %{__strip}} \
+    /usr/lib/rpm/redhat/brp-strip-static-archive %{__strip} \
+    /usr/lib/rpm/redhat/brp-strip-comment-note %{__strip} %{__objdump} \
+    /usr/lib/rpm/brp-python-bytecompile %{nil}
+
+# RPM searches perl files for dependancies and this breaks for non packaged perl lib
+# like thrift so disable this
+%define _use_internal_dependency_generator 0
+
+%ifarch i386
+%global hadoop_arch Linux-i386-32
+%endif
+%ifarch amd64 x86_64
+%global hadoop_arch Linux-amd64-64
+%endif
+%ifarch noarch
+%global hadoop_arch ""
+%endif
+
+Summary: The Apache Hadoop project develops open-source software for reliable, scalable,
distributed computing
+License: Apache License, Version 2.0
+URL: http://hadoop.apache.org/core/
+Vendor: Apache Software Foundation
+Group: Development/Libraries
+Name: %{name}
+Version: %{version}
+Release: %{release} 
+Source0: %{_final_name}-bin.tar.gz
+Source1: %{_final_name}-script.tar.gz
+Prefix: %{_prefix}
+Prefix: %{_conf_dir}
+Prefix: %{_log_dir}
+Prefix: %{_pid_dir}
+Buildroot: %{_build_dir}
+Requires: sh-utils, textutils, /usr/sbin/useradd, /usr/sbin/usermod, /sbin/chkconfig, /sbin/service,
jdk >= 1.6
+AutoReqProv: no
+Provides: hadoop
+
+%description
+The Apache Hadoop project develops open-source software for reliable, scalable, 
+distributed computing.  Hadoop includes these subprojects:
+
+Hadoop Common: The common utilities that support the other Hadoop subprojects.
+HDFS: A distributed file system that provides high throughput access to application data.
+MapReduce: A software framework for distributed processing of large data sets on compute
clusters.
+
+%prep
+%setup -n %{_final_name} -a 0
+%setup -n %{_final_name} -a 1
+
+%build
+if [ -d ${RPM_BUILD_DIR}%{_prefix} ]; then
+  rm -rf ${RPM_BUILD_DIR}%{_prefix}
+fi
+
+if [ -d ${RPM_BUILD_DIR}%{_log_dir} ]; then
+  rm -rf ${RPM_BUILD_DIR}%{_log_dir}
+fi
+
+if [ -d ${RPM_BUILD_DIR}%{_conf_dir} ]; then
+  rm -rf ${RPM_BUILD_DIR}%{_conf_dir}
+fi
+
+if [ -d ${RPM_BUILD_DIR}%{_pid_dir} ]; then
+  rm -rf ${RPM_BUILD_DIR}%{_pid_dir}
+fi
+
+mkdir -p ${RPM_BUILD_DIR}%{_prefix}
+mkdir -p ${RPM_BUILD_DIR}%{_bin_dir}
+mkdir -p ${RPM_BUILD_DIR}%{_include_dir}
+mkdir -p ${RPM_BUILD_DIR}%{_lib_dir}
+%ifarch amd64 x86_64
+mkdir -p ${RPM_BUILD_DIR}%{_lib64_dir}
+%endif
+mkdir -p ${RPM_BUILD_DIR}%{_libexec_dir}
+mkdir -p ${RPM_BUILD_DIR}%{_log_dir}
+mkdir -p ${RPM_BUILD_DIR}%{_conf_dir}
+mkdir -p ${RPM_BUILD_DIR}%{_man_dir}
+mkdir -p ${RPM_BUILD_DIR}%{_pid_dir}
+mkdir -p ${RPM_BUILD_DIR}%{_sbin_dir}
+mkdir -p ${RPM_BUILD_DIR}%{_share_dir}
+mkdir -p ${RPM_BUILD_DIR}%{_var_dir}
+mkdir -p ${RPM_BUILD_DIR}/etc/rc.d/init.d
+
+mv ${RPM_BUILD_DIR}/%{_final_name}/hadoop-namenode ${RPM_BUILD_DIR}/etc/rc.d/init.d/hadoop-namenode
+mv ${RPM_BUILD_DIR}/%{_final_name}/hadoop-datanode ${RPM_BUILD_DIR}/etc/rc.d/init.d/hadoop-datanode
+mv ${RPM_BUILD_DIR}/%{_final_name}/hadoop-jobtracker ${RPM_BUILD_DIR}/etc/rc.d/init.d/hadoop-jobtracker
+mv ${RPM_BUILD_DIR}/%{_final_name}/hadoop-tasktracker ${RPM_BUILD_DIR}/etc/rc.d/init.d/hadoop-tasktracker
+chmod 0755 ${RPM_BUILD_DIR}/etc/rc.d/init.d/*
+chmod 0755 ${RPM_BUILD_DIR}/%{_final_name}/sbin/hadoop-*
+
+#########################
+#### INSTALL SECTION ####
+#########################
+%install
+mv ${RPM_BUILD_DIR}/%{_final_name}/etc/hadoop/* ${RPM_BUILD_DIR}%{_conf_dir}
+mv ${RPM_BUILD_DIR}/%{_final_name}/* ${RPM_BUILD_DIR}%{_prefix}
+
+if [ "${RPM_BUILD_DIR}%{_conf_dir}" != "${RPM_BUILD_DIR}/%{_prefix}/conf" ]; then
+  rm -rf ${RPM_BUILD_DIR}/%{_prefix}/etc
+fi
+
+%pre
+getent group hadoop 2>/dev/null >/dev/null || /usr/sbin/groupadd -r hadoop
+
+/usr/sbin/useradd --comment "Hadoop MapReduce" --shell /bin/bash -M -r --groups hadoop --home
/tmp mapred 2> /dev/null || :
+/usr/sbin/useradd --comment "Hadoop HDFS" --shell /bin/bash -M -r --groups hadoop --home
/tmp hdfs 2> /dev/null || :
+
+%post
+bash ${RPM_INSTALL_PREFIX0}/sbin/update-hadoop-env.sh \
+       --prefix=${RPM_INSTALL_PREFIX0} \
+       --bin-dir=${RPM_INSTALL_PREFIX0}/bin \
+       --sbin-dir=${RPM_INSTALL_PREFIX0}/sbin \
+       --conf-dir=${RPM_INSTALL_PREFIX1} \
+       --log-dir=${RPM_INSTALL_PREFIX2} \
+       --pid-dir=${RPM_INSTALL_PREFIX3}
+
+%preun
+bash ${RPM_INSTALL_PREFIX0}/sbin/update-hadoop-env.sh \
+       --prefix=${RPM_INSTALL_PREFIX0} \
+       --bin-dir=${RPM_INSTALL_PREFIX0}/bin \
+       --sbin-dir=${RPM_INSTALL_PREFIX0}/sbin \
+       --conf-dir=${RPM_INSTALL_PREFIX1} \
+       --log-dir=${RPM_INSTALL_PREFIX2} \
+       --pid-dir=${RPM_INSTALL_PREFIX3} \
+       --uninstall
+
+%files 
+%defattr(-,root,root)
+%attr(0755,root,hadoop) %{_log_dir}
+%attr(0775,root,hadoop) %{_pid_dir}
+%config(noreplace) %{_conf_dir}/capacity-scheduler.xml
+%config(noreplace) %{_conf_dir}/configuration.xsl
+%config(noreplace) %{_conf_dir}/core-site.xml
+%config(noreplace) %{_conf_dir}/hadoop-env.sh
+%config(noreplace) %{_conf_dir}/hadoop-metrics2.properties
+%config(noreplace) %{_conf_dir}/hadoop-policy.xml
+%config(noreplace) %{_conf_dir}/hdfs-site.xml
+%config(noreplace) %{_conf_dir}/log4j.properties
+%config(noreplace) %{_conf_dir}/mapred-queue-acls.xml
+%config(noreplace) %{_conf_dir}/mapred-site.xml
+%config(noreplace) %{_conf_dir}/masters
+%config(noreplace) %{_conf_dir}/slaves
+%config(noreplace) %{_conf_dir}/ssl-client.xml.example
+%config(noreplace) %{_conf_dir}/ssl-server.xml.example
+%config(noreplace) %{_conf_dir}/taskcontroller.cfg
+%{_prefix}
+%attr(0755,root,root) /etc/rc.d/init.d

Added: hadoop/common/branches/branch-0.20-security-204/src/packages/templates/conf/core-site.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-204/src/packages/templates/conf/core-site.xml?rev=1128390&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-204/src/packages/templates/conf/core-site.xml
(added)
+++ hadoop/common/branches/branch-0.20-security-204/src/packages/templates/conf/core-site.xml
Fri May 27 16:49:55 2011
@@ -0,0 +1,11 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+  <property>
+    <name>fs.default.name</name>
+    <value>${HADOOP_NN_HOST}</value>
+  </property>
+</configuration>

Added: hadoop/common/branches/branch-0.20-security-204/src/packages/templates/conf/hadoop-env.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-204/src/packages/templates/conf/hadoop-env.sh?rev=1128390&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-204/src/packages/templates/conf/hadoop-env.sh
(added)
+++ hadoop/common/branches/branch-0.20-security-204/src/packages/templates/conf/hadoop-env.sh
Fri May 27 16:49:55 2011
@@ -0,0 +1,64 @@
+# Set Hadoop-specific environment variables here.
+
+# The only required environment variable is JAVA_HOME.  All others are
+# optional.  When running a distributed configuration it is best to
+# set JAVA_HOME in this file, so that it is correctly defined on
+# remote nodes.
+
+# The java implementation to use.  Required.
+export JAVA_HOME=${JAVA_HOME}
+
+# Location where Hadoop is installed
+export HADOOP_PREFIX=${HADOOP_PREFIX}
+export HADOOP_HOME=${HADOOP_PREFIX}/share/hadoop
+
+# Extra Java CLASSPATH elements.  Optional.
+# export HADOOP_CLASSPATH=
+
+# The maximum amount of heap to use, in MB. Default is 1000.
+# export HADOOP_HEAPSIZE=2000
+
+# Extra Java runtime options.  Empty by default.
+# export HADOOP_OPTS=-server
+
+# Command specific options appended to HADOOP_OPTS when specified
+export HADOOP_NAMENODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_NAMENODE_OPTS"
+export HADOOP_SECONDARYNAMENODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_SECONDARYNAMENODE_OPTS"
+export HADOOP_DATANODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_DATANODE_OPTS"
+export HADOOP_BALANCER_OPTS="-Dcom.sun.management.jmxremote $HADOOP_BALANCER_OPTS"
+export HADOOP_JOBTRACKER_OPTS="-Dcom.sun.management.jmxremote $HADOOP_JOBTRACKER_OPTS"
+# export HADOOP_TASKTRACKER_OPTS=
+# The following applies to multiple commands (fs, dfs, fsck, distcp etc)
+# export HADOOP_CLIENT_OPTS
+
+# Extra ssh options.  Empty by default.
+# export HADOOP_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HADOOP_CONF_DIR"
+
+# File naming remote slave hosts.  $HADOOP_HOME/conf/slaves by default.
+# export HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves
+
+# host:path where hadoop code should be rsync'd from.  Unset by default.
+# export HADOOP_MASTER=master:/home/$USER/src/hadoop
+
+# Seconds to sleep between slave commands.  Unset by default.  This
+# can be useful in large clusters, where, e.g., slave rsyncs can
+# otherwise arrive faster than the master can service them.
+# export HADOOP_SLAVE_SLEEP=0.1
+
+# The directory where pid files are stored. /tmp by default.
+HADOOP_PID_DIR=${HADOOP_PID_DIR}
+export HADOOP_PID_DIR=${HADOOP_PID_DIR:-$HADOOP_PREFIX/var/run}
+
+# A string representing this instance of hadoop. $USER by default.
+export HADOOP_IDENT_STRING=`whoami`
+
+# The scheduling priority for daemon processes.  See 'man nice'.
+# export HADOOP_NICENESS=10
+
+# Where log files are stored.  $HADOOP_HOME/logs by default.
+HADOOP_LOG_DIR=${HADOOP_LOG_DIR}/$HADOOP_IDENT_STRING
+export HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-$HADOOP_HOME/var/log}
+
+# Hadoop configuration directory
+HADOOP_CONF_DIR=${HADOOP_CONF_DIR}
+export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-$HADOOP_PREFIX/conf}

Added: hadoop/common/branches/branch-0.20-security-204/src/packages/templates/conf/hdfs-site.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-204/src/packages/templates/conf/hdfs-site.xml?rev=1128390&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-204/src/packages/templates/conf/hdfs-site.xml
(added)
+++ hadoop/common/branches/branch-0.20-security-204/src/packages/templates/conf/hdfs-site.xml
Fri May 27 16:49:55 2011
@@ -0,0 +1,23 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+  <property>
+    <name>dfs.replication</name>
+    <value>${HADOOP_REPLICATION}</value>
+  </property>
+  <property>
+    <name>dfs.name.dir</name>
+    <value>${HADOOP_NN_DIR}</value>
+  </property>
+  <property>
+    <name>dfs.data.dir</name>
+    <value>${HADOOP_DN_DIR}</value>
+  </property>
+  <property>
+    <name>hadoop.tmp.dir</name>
+    <value>/tmp</value>
+  </property>
+</configuration>

Added: hadoop/common/branches/branch-0.20-security-204/src/packages/templates/conf/mapred-site.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-204/src/packages/templates/conf/mapred-site.xml?rev=1128390&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-204/src/packages/templates/conf/mapred-site.xml
(added)
+++ hadoop/common/branches/branch-0.20-security-204/src/packages/templates/conf/mapred-site.xml
Fri May 27 16:49:55 2011
@@ -0,0 +1,31 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+  <property>
+    <name>mapred.job.tracker</name>
+    <value>${HADOOP_JT_HOST}</value>
+  </property>
+
+  <property>
+    <name>mapred.system.dir</name>
+    <value>/user/mapred/system</value>
+  </property>
+
+  <property>
+    <name>mapred.local.dir</name>
+    <value>${HADOOP_MAPRED_DIR}</value>
+  </property>
+
+  <property>
+    <name>hadoop.tmp.dir</name>
+    <value>/tmp</value>
+  </property>
+
+  <property>
+    <name>mapred.jobtracker.taskScheduler</name>
+    <value>${HADOOP_TASK_SCHEDULER}</value>
+  </property>
+</configuration>

Added: hadoop/common/branches/branch-0.20-security-204/src/packages/update-hadoop-env.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-204/src/packages/update-hadoop-env.sh?rev=1128390&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-204/src/packages/update-hadoop-env.sh (added)
+++ hadoop/common/branches/branch-0.20-security-204/src/packages/update-hadoop-env.sh Fri
May 27 16:49:55 2011
@@ -0,0 +1,178 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This script configures hadoop-env.sh and symlinkis directories for 
+# relocating RPM locations.
+
+usage() {
+  echo "
+usage: $0 <parameters>
+  Required parameters:
+     --prefix=PREFIX             path to install into
+
+  Optional parameters:
+     --arch=i386                 OS Architecture
+     --bin-dir=PREFIX/bin        Executable directory
+     --conf-dir=/etc/hadoop      Configuration directory
+     --log-dir=/var/log/hadoop   Log directory
+     --pid-dir=/var/run          PID file location
+     --sbin-dir=PREFIX/sbin      System executable directory
+  "
+  exit 1
+}
+
+template_generator() {
+  REGEX='(\$\{[a-zA-Z_][a-zA-Z_0-9]*\})'
+  cat $1 |
+  while read line ; do
+    while [[ "$line" =~ $REGEX ]] ; do
+      LHS=${BASH_REMATCH[1]}
+      RHS="$(eval echo "\"$LHS\"")"
+      line=${line//$LHS/$RHS}
+    done
+    echo $line >> $2
+  done
+}
+
+OPTS=$(getopt \
+  -n $0 \
+  -o '' \
+  -l 'arch:' \
+  -l 'prefix:' \
+  -l 'bin-dir:' \
+  -l 'conf-dir:' \
+  -l 'lib-dir:' \
+  -l 'log-dir:' \
+  -l 'pid-dir:' \
+  -l 'sbin-dir:' \
+  -l 'uninstall' \
+  -- "$@")
+
+if [ $? != 0 ] ; then
+    usage
+fi
+
+eval set -- "${OPTS}"
+while true ; do
+  case "$1" in
+    --arch)
+      ARCH=$2 ; shift 2
+      ;;
+    --prefix)
+      PREFIX=$2 ; shift 2
+      ;;
+    --bin-dir)
+      BIN_DIR=$2 ; shift 2
+      ;;
+    --log-dir)
+      LOG_DIR=$2 ; shift 2
+      ;;
+    --lib-dir)
+      LIB_DIR=$2 ; shift 2
+      ;;
+    --conf-dir)
+      CONF_DIR=$2 ; shift 2
+      ;;
+    --pid-dir)
+      PID_DIR=$2 ; shift 2
+      ;;
+    --sbin-dir)
+      SBIN_DIR=$2 ; shift 2
+      ;;
+    --uninstall)
+      UNINSTALL=1; shift
+      ;;
+    --)
+      shift ; break
+      ;;
+    *)
+      echo "Unknown option: $1"
+      usage
+      exit 1
+      ;;
+  esac
+done
+
+for var in PREFIX; do
+  if [ -z "$(eval "echo \$$var")" ]; then
+    echo Missing param: $var
+    usage
+  fi
+done
+
+ARCH=${ARCH:-i386}
+BIN_DIR=${BIN_DIR:-$PREFIX/share/hadoop/bin}
+CONF_DIR=${CONF_DIR:-$PREFIX/etc/hadoop}
+LIB_DIR=${LIB_DIR:-$PREFIX/lib}
+LOG_DIR=${LOG_DIR:-$PREFIX/var/log}
+PID_DIR=${PID_DIR:-$PREFIX/var/run}
+SBIN_DIR=${SBIN_DIR:-$PREFIX/share/hadoop/sbin}
+UNINSTALL=${UNINSTALL:-0}
+
+if [ "${ARCH}" != "i386" ]; then
+  LIB_DIR=${LIB_DIR}64
+fi
+
+if [ "${UNINSTALL}" -eq "1" ]; then
+  # Remove symlinks
+  if [ "${CONF_DIR}" != "${PREFIX}/etc/hadoop" ]; then
+    rm -rf ${PREFIX}/etc/hadoop
+  fi
+  rm -f /etc/default/hadoop-env.sh
+  rm -f /etc/profile.d/hadoop-env.sh
+else
+  # Create symlinks
+  if [ "${CONF_DIR}" != "${PREFIX}/etc/hadoop" ]; then
+    mkdir -p ${PREFIX}/etc
+    ln -sf ${CONF_DIR} ${PREFIX}/etc/hadoop
+  fi
+  ln -sf ${CONF_DIR}/hadoop-env.sh /etc/default/hadoop-env.sh
+  ln -sf ${CONF_DIR}/hadoop-env.sh /etc/profile.d/hadoop-env.sh
+
+  mkdir -p ${LOG_DIR}
+  mkdir -p ${LOG_DIR}/hdfs
+  mkdir -p ${LOG_DIR}/mapred
+  chown root:hadoop ${LOG_DIR}
+  chown hdfs ${LOG_DIR}/hdfs
+  chown mapred ${LOG_DIR}/mapred
+  chmod 755 ${LOG_DIR}
+  chmod 755 ${LOG_DIR}/hdfs
+  chmod 755 ${LOG_DIR}/mapred
+
+  if [ ! -d ${PID_DIR} ]; then
+    mkdir -p ${PID_DIR}
+    chown root:hadoop ${PID_DIR}
+    chmod 775 ${PID_DIR}
+  fi
+
+  TFILE="/tmp/$(basename $0).$$.tmp"
+  if [ -z "${JAVA_HOME}" ]; then
+    if [ -e /etc/debian_version ]; then
+      JAVA_HOME=`update-alternatives --config java | grep java | cut -f2 -d':' | cut -f2
-d' ' | sed -e 's/\/bin\/java//'`
+    else
+      JAVA_HOME=/usr/java/default
+    fi
+  fi
+  HADOOP_CONF_DIR=${CONF_DIR}
+  HADOOP_LOG_DIR=${LOG_DIR}
+  HADOOP_PID_DIR=${PID_DIR}
+  HADOOP_PREFIX=${PREFIX}
+  HADOOP_HOME=${PREFIX}/share/hadoop
+  template_generator ${PREFIX}/share/hadoop/templates/conf/hadoop-env.sh $TFILE
+  cp ${TFILE} ${CONF_DIR}/hadoop-env.sh
+  rm -f ${TFILE}
+fi



Mime
View raw message