hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From c..@apache.org
Subject svn commit: r882733 - in /hadoop/hdfs/trunk: CHANGES.txt src/c++/libhdfs/tests/test-libhdfs.sh
Date Fri, 20 Nov 2009 22:06:42 GMT
Author: cos
Date: Fri Nov 20 22:06:41 2009
New Revision: 882733

URL: http://svn.apache.org/viewvc?rev=882733&view=rev
Log:
HDFS-756. libhdfs unit tests do not run. (Eli Collins via cos).

Modified:
    hadoop/hdfs/trunk/CHANGES.txt
    hadoop/hdfs/trunk/src/c++/libhdfs/tests/test-libhdfs.sh

Modified: hadoop/hdfs/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/CHANGES.txt?rev=882733&r1=882732&r2=882733&view=diff
==============================================================================
--- hadoop/hdfs/trunk/CHANGES.txt (original)
+++ hadoop/hdfs/trunk/CHANGES.txt Fri Nov 20 22:06:41 2009
@@ -68,6 +68,8 @@
 
     HDFS-727. bug setting block size hdfsOpenFile (Eli Collins via cos)
 
+    HDFS-756. libhdfs unit tests do not run. (Eli Collins via cos)
+
 Release 0.21.0 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/hdfs/trunk/src/c++/libhdfs/tests/test-libhdfs.sh
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/c%2B%2B/libhdfs/tests/test-libhdfs.sh?rev=882733&r1=882732&r2=882733&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/c++/libhdfs/tests/test-libhdfs.sh (original)
+++ hadoop/hdfs/trunk/src/c++/libhdfs/tests/test-libhdfs.sh Fri Nov 20 22:06:41 2009
@@ -1,3 +1,4 @@
+#!/usr/bin/env bash
 #
 # Licensed to the Apache Software Foundation (ASF) under one or more
 # contributor license agreements.  See the NOTICE file distributed with
@@ -31,6 +32,24 @@
 HADOOP_LIB_DIR=$HADOOP_HOME/lib
 HADOOP_BIN_DIR=$HADOOP_HOME/bin
 
+COMMON_BUILD_DIR=$HADOOP_HOME/build/ivy/lib/Hadoop-Hdfs/common
+COMMON_JAR=$COMMON_BUILD_DIR/hadoop-core-0.22.0-SNAPSHOT.jar
+
+# If we are running from the hdfs repo we need to create HADOOP_BIN_DIR.  
+# If the bin directory does not and we've got a core jar extract it's
+# bin directory to HADOOP_HOME/bin. The bin scripts hdfs-config.sh and
+# hadoop-config.sh assume the bin directory is named "bin" and that it
+# is located in HADOOP_HOME.
+created_bin_dir=0
+if [ ! -d $HADOOP_BIN_DIR ]; then
+  if [ -f $COMMON_JAR ]; then
+    mkdir $HADOOP_BIN_DIR
+    jar xf $COMMON_JAR bin.tgz
+    tar xfz bin.tgz -C $HADOOP_BIN_DIR
+    created_bin_dir=1
+  fi
+fi
+
 # Manipulate HADOOP_CONF_DIR too
 # which is necessary to circumvent bin/hadoop
 HADOOP_CONF_DIR=$HADOOP_CONF_DIR:$HADOOP_HOME/conf
@@ -61,17 +80,18 @@
   CLASSPATH=${CLASSPATH}:$f;
 done
 
-for ff in $HADOOP_HOME/*.jar; do 
-  CLASSPATH=${CLASSPATH}:$ff
+for f in $HADOOP_HOME/*.jar; do 
+  CLASSPATH=${CLASSPATH}:$f
 done
-for f in $HADOOP_HOME/lib/jsp-2.0/*.jar; do
+for f in $HADOOP_HOME/lib/jsp-2.1/*.jar; do
   CLASSPATH=${CLASSPATH}:$f;
 done
 
-if [ -d "$HADOOP_HOME/build/ivy/lib/Hadoop/common" ]; then
-for f in $HADOOP_HOME/build/ivy/lib/Hadoop/common/*.jar; do
-  CLASSPATH=${CLASSPATH}:$f;
-done
+if [ -d "$COMMON_BUILD_DIR" ]; then
+  CLASSPATH=$CLASSPATH:$COMMON_JAR
+  for f in $COMMON_BUILD_DIR/*.jar; do
+    CLASSPATH=${CLASSPATH}:$f;
+  done
 fi
 
 # restore ordinary behaviour
@@ -115,16 +135,19 @@
 # Put delays to ensure hdfs is up and running and also shuts down 
 # after the tests are complete
 cd $HADOOP_HOME
-echo Y | $HADOOP_BIN_DIR/hadoop namenode -format &&
-$HADOOP_BIN_DIR/hadoop-daemon.sh --script $HADOOP_BIN_DIR/hdfs start namenode &&
sleep 2 && 
-$HADOOP_BIN_DIR/hadoop-daemon.sh --script $HADOOP_BIN_DIR/hdfs start datanode &&
sleep 2 && 
+echo Y | $HADOOP_BIN_DIR/hdfs namenode -format &&
+$HADOOP_BIN_DIR/hadoop-daemon.sh --script $HADOOP_BIN_DIR/hdfs start namenode &&
sleep 2
+$HADOOP_BIN_DIR/hadoop-daemon.sh --script $HADOOP_BIN_DIR/hdfs start datanode &&
sleep 2
 sleep 20
-echo CLASSPATH=$HADOOP_CONF_DIR:$CLASSPATH LD_PRELOAD="$LIBHDFS_INSTALL_DIR/libhdfs.so:$LIB_JVM_DIR/libjvm.so"
$LIBHDFS_BUILD_DIR/$HDFS_TEST && 
-CLASSPATH=$HADOOP_CONF_DIR:$CLASSPATH LD_PRELOAD="$LIB_JVM_DIR/libjvm.so:$LIBHDFS_INSTALL_DIR/libhdfs.so:"
$LIBHDFS_BUILD_DIR/$HDFS_TEST
+CLASSPATH=$CLASSPATH LD_PRELOAD="$LIB_JVM_DIR/libjvm.so:$LIBHDFS_INSTALL_DIR/libhdfs.so:"
$LIBHDFS_BUILD_DIR/$HDFS_TEST
 BUILD_STATUS=$?
 sleep 3
-$HADOOP_BIN_DIR/hadoop-daemon.sh --script $HADOOP_BIN_DIR/hdfs stop datanode && sleep
2 && 
+$HADOOP_BIN_DIR/hadoop-daemon.sh --script $HADOOP_BIN_DIR/hdfs stop datanode && sleep
2
 $HADOOP_BIN_DIR/hadoop-daemon.sh --script $HADOOP_BIN_DIR/hdfs stop namenode && sleep
2 
 
+if [ $created_bin_dir -eq 1 ]; then
+  rm -rf bin.tgz $HADOOP_BIN_DIR 
+fi
+
 echo exiting with $BUILD_STATUS
 exit $BUILD_STATUS



Mime
View raw message