hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tomwh...@apache.org
Subject svn commit: r925495 - in /hadoop/hdfs/trunk: ./ src/c++/libhdfs/tests/ src/c++/libhdfs/tests/conf/
Date Sat, 20 Mar 2010 00:11:51 GMT
Author: tomwhite
Date: Sat Mar 20 00:11:50 2010
New Revision: 925495

URL: http://svn.apache.org/viewvc?rev=925495&view=rev
Log:
HDFS-939. libhdfs test is broken. Contributed by Eli Collins.

Removed:
    hadoop/hdfs/trunk/src/c++/libhdfs/tests/conf/core-site.xml
    hadoop/hdfs/trunk/src/c++/libhdfs/tests/conf/hadoop-site.xml
    hadoop/hdfs/trunk/src/c++/libhdfs/tests/conf/hdfs-site.xml
    hadoop/hdfs/trunk/src/c++/libhdfs/tests/conf/mapred-site.xml
    hadoop/hdfs/trunk/src/c++/libhdfs/tests/conf/slaves
Modified:
    hadoop/hdfs/trunk/CHANGES.txt
    hadoop/hdfs/trunk/build.xml
    hadoop/hdfs/trunk/src/c++/libhdfs/tests/test-libhdfs.sh

Modified: hadoop/hdfs/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/CHANGES.txt?rev=925495&r1=925494&r2=925495&view=diff
==============================================================================
--- hadoop/hdfs/trunk/CHANGES.txt (original)
+++ hadoop/hdfs/trunk/CHANGES.txt Sat Mar 20 00:11:50 2010
@@ -205,6 +205,8 @@ Trunk (unreleased changes)
 
     HDFS-1015. Fix intermittent failure in TestSecurityTokenEditLog.
     (Jitendra Nath Pandey via suresh)
+
+    HDFS-939. libhdfs test is broken. (Eli Collins via tomwhite)
     
 Release 0.21.0 - Unreleased
 

Modified: hadoop/hdfs/trunk/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/build.xml?rev=925495&r1=925494&r2=925495&view=diff
==============================================================================
--- hadoop/hdfs/trunk/build.xml (original)
+++ hadoop/hdfs/trunk/build.xml Sat Mar 20 00:11:50 2010
@@ -97,7 +97,6 @@
   <property name="test.hdfs.commit.tests.file" value="${test.src.dir}/commit-tests" />
   <property name="test.hdfs.all.tests.file" value="${test.src.dir}/all-tests" />
 
-  <property name="test.libhdfs.conf.dir" value="${c++.libhdfs.src}/tests/conf"/>
   <property name="test.libhdfs.dir" value="${test.build.dir}/libhdfs"/>
 
   <property name="web.src.dir" value="${basedir}/src/web"/>
@@ -1022,6 +1021,7 @@
  <target name="test-c++-libhdfs" depends="compile-c++-libhdfs, compile-core" if="islibhdfs"
unless="clover.enabled">
     <delete dir="${test.libhdfs.dir}"/>
     <mkdir dir="${test.libhdfs.dir}"/>
+    <mkdir dir="${test.libhdfs.dir}/conf"/>
     <mkdir dir="${test.libhdfs.dir}/logs"/>
     <mkdir dir="${test.libhdfs.dir}/hdfs/name"/>
 
@@ -1031,8 +1031,9 @@
         <env key="JVM_ARCH" value="${jvm.arch}"/>
         <env key="LIBHDFS_BUILD_DIR" value="${build.c++.libhdfs}"/>
         <env key="HADOOP_HOME" value="${basedir}"/>
-        <env key="HADOOP_CONF_DIR" value="${test.libhdfs.conf.dir}"/>
+        <env key="HADOOP_CONF_DIR" value="${test.libhdfs.dir}/conf"/>
         <env key="HADOOP_LOG_DIR" value="${test.libhdfs.dir}/logs"/>
+        <env key="LIBHDFS_TEST_DIR" value="${test.libhdfs.dir}"/>
         <env key="LIBHDFS_SRC_DIR" value="${c++.libhdfs.src}"/>
         <env key="LIBHDFS_INSTALL_DIR" value="${install.c++}/lib"/>  
         <env key="LIB_DIR" value="${common.ivy.lib.dir}"/>

Modified: hadoop/hdfs/trunk/src/c++/libhdfs/tests/test-libhdfs.sh
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/c%2B%2B/libhdfs/tests/test-libhdfs.sh?rev=925495&r1=925494&r2=925495&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/c++/libhdfs/tests/test-libhdfs.sh (original)
+++ hadoop/hdfs/trunk/src/c++/libhdfs/tests/test-libhdfs.sh Sat Mar 20 00:11:50 2010
@@ -36,6 +36,44 @@ HADOOP_BIN_DIR=$HADOOP_HOME/bin
 COMMON_BUILD_DIR=$HADOOP_HOME/build/ivy/lib/Hadoop-Hdfs/common
 COMMON_JAR=$COMMON_BUILD_DIR/hadoop-core-0.22.0-SNAPSHOT.jar
 
+cat > $HADOOP_CONF_DIR/core-site.xml <<EOF
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<configuration>
+<property>
+  <name>hadoop.tmp.dir</name>
+  <value>file:///$LIBHDFS_TEST_DIR</value>
+</property>
+<property>
+  <name>fs.default.name</name>
+  <value>hdfs://localhost:23000/</value>
+</property>
+</configuration>
+EOF
+
+cat > $HADOOP_CONF_DIR/hdfs-site.xml <<EOF
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<configuration>
+<property>
+  <name>dfs.replication</name>
+  <value>1</value>
+</property>
+<property>
+  <name>dfs.support.append</name>
+  <value>true</value>
+</property>
+<property>
+  <name>dfs.namenode.logging.level</name>
+  <value>DEBUG</value>
+</property>
+</configuration>
+EOF
+
+cat > $HADOOP_CONF_DIR/slaves <<EOF
+localhost
+EOF
+
 # If we are running from the hdfs repo we need to create HADOOP_BIN_DIR.  
 # If the bin directory does not and we've got a core jar extract it's
 # bin directory to HADOOP_HOME/bin. The bin scripts hdfs-config.sh and
@@ -142,7 +180,8 @@ cd $HADOOP_HOME
 echo Y | $HADOOP_BIN_DIR/hdfs namenode -format &&
 $HADOOP_BIN_DIR/hadoop-daemon.sh --script $HADOOP_BIN_DIR/hdfs start namenode &&
sleep 2
 $HADOOP_BIN_DIR/hadoop-daemon.sh --script $HADOOP_BIN_DIR/hdfs start datanode &&
sleep 2
-sleep 20
+echo "Wait 30s for the datanode to start up..."
+sleep 30
 CLASSPATH=$CLASSPATH LD_PRELOAD="$LIB_JVM_DIR/libjvm.so:$LIBHDFS_INSTALL_DIR/libhdfs.so:"
$LIBHDFS_BUILD_DIR/$HDFS_TEST
 BUILD_STATUS=$?
 sleep 3



Mime
View raw message