hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From omal...@apache.org
Subject svn commit: r1089323 - in /hadoop/hdfs/branches/yahoo-merge: CHANGES.txt bin/hdfs bin/start-dfs.sh bin/stop-dfs.sh
Date Wed, 06 Apr 2011 06:22:54 GMT
Author: omalley
Date: Wed Apr  6 06:22:53 2011
New Revision: 1089323

URL: http://svn.apache.org/viewvc?rev=1089323&view=rev
Log:
commit 71368c14b66af1b29a7a35924eb0bdfe38c37668
Author: Owen O'Malley <omalley@apache.org>
Date:   Tue Apr 5 16:29:07 2011 -0700

    HDFS-1703. Start namenodes and secondary namenodes on hosts returned by hdfs
    getconf (Erik Steffl)
    
    Conflicts:
    
    	YAHOO-CHANGES.txt

Modified:
    hadoop/hdfs/branches/yahoo-merge/CHANGES.txt
    hadoop/hdfs/branches/yahoo-merge/bin/hdfs
    hadoop/hdfs/branches/yahoo-merge/bin/start-dfs.sh
    hadoop/hdfs/branches/yahoo-merge/bin/stop-dfs.sh

Modified: hadoop/hdfs/branches/yahoo-merge/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/yahoo-merge/CHANGES.txt?rev=1089323&r1=1089322&r2=1089323&view=diff
==============================================================================
--- hadoop/hdfs/branches/yahoo-merge/CHANGES.txt (original)
+++ hadoop/hdfs/branches/yahoo-merge/CHANGES.txt Wed Apr  6 06:22:53 2011
@@ -265,6 +265,9 @@ Trunk (unreleased changes)
 
     HDFS-1730. Move DaemonFactory from HDFS to Common (Erik Steffl)
 
+    HDFS-1703. Run the namenodes and secondary namenodes on hosts
+    returned by hdfs getconf (Erik Steffl)
+
 Release 0.22.0 - Unreleased
 
   NEW FEATURES

Modified: hadoop/hdfs/branches/yahoo-merge/bin/hdfs
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/yahoo-merge/bin/hdfs?rev=1089323&r1=1089322&r2=1089323&view=diff
==============================================================================
--- hadoop/hdfs/branches/yahoo-merge/bin/hdfs (original)
+++ hadoop/hdfs/branches/yahoo-merge/bin/hdfs Wed Apr  6 06:22:53 2011
@@ -92,7 +92,7 @@ elif [ "$COMMAND" = "oiv" ] ; then
   CLASS=org.apache.hadoop.hdfs.tools.offlineImageViewer.OfflineImageViewer
 elif [ "$COMMAND" = "fetchdt" ] ; then
   CLASS=org.apache.hadoop.hdfs.tools.DelegationTokenFetcher
-elif [ "$COMMAND" = "getconfig" ] ; then
+elif [ "$COMMAND" = "getconf" ] ; then
   CLASS=org.apache.hadoop.hdfs.tools.GetConf
 else
   echo $COMMAND - invalid command

Modified: hadoop/hdfs/branches/yahoo-merge/bin/start-dfs.sh
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/yahoo-merge/bin/start-dfs.sh?rev=1089323&r1=1089322&r2=1089323&view=diff
==============================================================================
--- hadoop/hdfs/branches/yahoo-merge/bin/start-dfs.sh (original)
+++ hadoop/hdfs/branches/yahoo-merge/bin/start-dfs.sh Wed Apr  6 06:22:53 2011
@@ -25,17 +25,17 @@ usage="Usage: start-dfs.sh [-upgrade|-ro
 bin=`dirname "${BASH_SOURCE-$0}"`
 bin=`cd "$bin"; pwd`
 
-. "$bin"/hdfs-config.sh
+. "$bin/hdfs-config.sh"
 
 # get arguments
 if [ $# -ge 1 ]; then
-	nameStartOpt=$1
+	nameStartOpt="$1"
 	shift
-	case $nameStartOpt in
+	case "$nameStartOpt" in
 	  (-upgrade)
 	  	;;
 	  (-rollback) 
-	  	dataStartOpt=$nameStartOpt
+	  	dataStartOpt="$nameStartOpt"
 	  	;;
 	  (*)
 		  echo $usage
@@ -44,14 +44,47 @@ if [ $# -ge 1 ]; then
 	esac
 fi
 
-# start dfs daemons
-# start namenode after datanodes, to minimize time namenode is up w/o data
-# note: datanodes will log connection errors until namenode starts
-"$HADOOP_COMMON_HOME"/bin/hadoop-daemon.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs
start namenode $nameStartOpt
-#
+#---------------------------------------------------------
+# namenodes
+
+NAMENODES=$($HADOOP_HOME/bin/hdfs getconf -namenodes)
+
+echo "Starting namenodes on [$NAMENODES]"
+
+"$HADOOP_COMMON_HOME/bin/hadoop-daemons.sh" \
+  --config "$HADOOP_CONF_DIR" \
+  --hostnames "$NAMENODES" \
+  --script "$bin/hdfs" start namenode $nameStartOpt
+
+#---------------------------------------------------------
+# datanodes (using defalut slaves file)
+
 if [ -n "$HADOOP_SECURE_DN_USER" ]; then
-  echo "Attempting to start secure cluster, skipping datanodes. Run start-secure-dns.sh as
root to complete startup."
+  echo \
+    "Attempting to start secure cluster, skipping datanodes. " \
+    "Run start-secure-dns.sh as root to complete startup."
+else
+  "$HADOOP_COMMON_HOME/bin/hadoop-daemons.sh" \
+    --config "$HADOOP_CONF_DIR" \
+    --script "$bin/hdfs" start datanode $dataStartOpt
+fi
+
+#---------------------------------------------------------
+# secondary namenodes (if any)
+
+SECONDARY_NAMENODES=$($HADOOP_HOME/bin/hdfs getconf -secondarynamenodes)
+
+if [ "$SECONDARY_NAMENODES" = '0.0.0.0' ] ; then
+  echo \
+    "Secondary namenodes are not configured. " \
+    "Cannot start secondary namenodes."
 else
-"$HADOOP_COMMON_HOME"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs
start datanode $dataStartOpt
+  echo "Starting secondary namenodes [$SECONDARY_NAMENODES]"
+
+  "$HADOOP_COMMON_HOME/bin/hadoop-daemons.sh" \
+    --config "$HADOOP_CONF_DIR" \
+    --hostnames "$SECONDARY_NAMENODES" \
+    --script "$bin/hdfs" start secondarynamenode
 fi
-"$HADOOP_COMMON_HOME"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --hosts masters --script
"$bin"/hdfs start secondarynamenode
+
+# eof

Modified: hadoop/hdfs/branches/yahoo-merge/bin/stop-dfs.sh
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/yahoo-merge/bin/stop-dfs.sh?rev=1089323&r1=1089322&r2=1089323&view=diff
==============================================================================
--- hadoop/hdfs/branches/yahoo-merge/bin/stop-dfs.sh (original)
+++ hadoop/hdfs/branches/yahoo-merge/bin/stop-dfs.sh Wed Apr  6 06:22:53 2011
@@ -15,18 +15,52 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
-# Stop hadoop DFS daemons.  Run this on master node.
-
 bin=`dirname "${BASH_SOURCE-$0}"`
 bin=`cd "$bin"; pwd`
 
 . "$bin"/hdfs-config.sh
 
-"$HADOOP_COMMON_HOME"/bin/hadoop-daemon.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs
stop namenode
+#---------------------------------------------------------
+# namenodes
+
+NAMENODES=$($HADOOP_HOME/bin/hdfs getconf -namenodes)
+
+echo "Stopping namenodes on [$NAMENODES]"
+
+"$HADOOP_COMMON_HOME/bin/hadoop-daemons.sh" \
+  --config "$HADOOP_CONF_DIR" \
+  --hostnames "$NAMENODES" \
+  --script "$bin/hdfs" stop namenode
+
+#---------------------------------------------------------
+# datanodes (using default slaves file)
+
 if [ -n "$HADOOP_SECURE_DN_USER" ]; then
-  echo "Attempting to stop secure cluster, skipping datanodes. Run stop-secure-dns.sh as
root to complete shutdown."
+  echo \
+    "Attempting to stop secure cluster, skipping datanodes. " \
+    "Run stop-secure-dns.sh as root to complete shutdown."
 else
-  "$HADOOP_COMMON_HOME"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs
stop datanode
+  "$HADOOP_COMMON_HOME/bin/hadoop-daemons.sh" \
+    --config "$HADOOP_CONF_DIR" \
+    --script "$bin/hdfs" stop datanode
 fi
-"$HADOOP_COMMON_HOME"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --hosts masters --script
"$bin"/hdfs stop secondarynamenode
\ No newline at end of file
+
+#---------------------------------------------------------
+# secondary namenodes (if any)
+
+SECONDARY_NAMENODES=$($HADOOP_HOME/bin/hdfs getconf -secondarynamenodes)
+
+if [ "$SECONDARY_NAMENODES" = '0.0.0.0' ] ; then
+  echo \
+    "Secondary namenodes are not configured. " \
+    "Cannot stop secondary namenodes."
+else
+  echo "Stopping secondary namenodes [$SECONDARY_NAMENODES]"
+
+  "$HADOOP_COMMON_HOME/bin/hadoop-daemons.sh" \
+    --config "$HADOOP_CONF_DIR" \
+    --hostnames "$SECONDARY_NAMENODES" \
+    --script "$bin/hdfs" stop secondarynamenode
+fi
+
+# eof



Mime
View raw message