hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject svn commit: r618560 - in /hadoop/hbase/trunk: bin/hbase bin/start-hbase.sh conf/hbase-env.sh conf/log4j.properties
Date Tue, 05 Feb 2008 06:15:45 GMT
Author: stack
Date: Mon Feb  4 22:15:44 2008
New Revision: 618560

URL: http://svn.apache.org/viewvc?rev=618560&view=rev
Log:
HBASE-403 Fix build after move of hbase in svn
Make it so scripts basically work again.  Make up a log4j
basic properites file (currently broke).
M    trunk/conf/hbase-env.sh
    Put a JAVA_HOME in here.  Need to set it here now instead of up in hadoop-env
A    trunk/conf/log4j.properties
    Basic log4j for hbase.
M    trunk/bin/hbase
    Made pointers up into hadoop instead point to hbase substitutions.
M    trunk/bin/start-hbase.sh
    Remove wait on dfs for moment.  Need to figure how to do it now
    we're detached (Besides there is talk that we'd acually have the
    servers come up even if they can't talk to dfs).

Added:
    hadoop/hbase/trunk/conf/log4j.properties
Modified:
    hadoop/hbase/trunk/bin/hbase
    hadoop/hbase/trunk/bin/start-hbase.sh
    hadoop/hbase/trunk/conf/hbase-env.sh

Modified: hadoop/hbase/trunk/bin/hbase
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/bin/hbase?rev=618560&r1=618559&r2=618560&view=diff
==============================================================================
--- hadoop/hbase/trunk/bin/hbase (original)
+++ hadoop/hbase/trunk/bin/hbase Mon Feb  4 22:15:44 2008
@@ -36,12 +36,12 @@
 #
 #   HBASE_CONF_DIR   Alternate conf dir. Default is ${HBASE_HOME}/conf.
 #
+#   HBASE_ROOT_LOGGER The root appender. Default is INFO,console
+#
 #   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_HOME}/conf.
 #
 #   HADOOP_HOME      Hadoop home directory.
 #
-#   HADOOP_ROOT_LOGGER The root appender. Default is INFO,console
-#
 
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
@@ -74,10 +74,9 @@
 COMMAND=$1
 shift
 
-# Source the hadoop-env.sh.  Will have JAVA_HOME defined. There is no
-# hbase-env.sh as yet.
-if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
-  . "${HADOOP_CONF_DIR}/hadoop-env.sh"
+# Source the hbase-env.sh.  Will have JAVA_HOME defined.
+if [ -f "${HBASE_CONF_DIR}/hbase-env.sh" ]; then
+  . "${HBASE_CONF_DIR}/hbase-env.sh"
 fi
 
 # some Java parameters
@@ -109,68 +108,47 @@
 CLASSPATH="${CLASSPATH}:${HBASE_CONF_DIR}"
 CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
 
-# for developers, add hbase and hadoop classes to CLASSPATH
-if [ -d "$HADOOP_HOME/build/contrib/hbase/classes" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/contrib/hbase/classes
-fi
-if [ -d "$HADOOP_HOME/build/contrib/hbase/test" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/contrib/hbase/test
-fi
-if [ -d "$HADOOP_HOME/build/classes" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/classes
-fi
-if [ -d "$HADOOP_HOME/build/contrib/hbase/webapps" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/contrib/hbase/
+# for developers, add hbase classes to CLASSPATH
+if [ -d "$HBASE_HOME/build/classes" ]; then
+  CLASSPATH=${CLASSPATH}:$HBASE_HOME/build/classes
 fi
-if [ -d "$HADOOP_HOME/build/webapps" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build
+if [ -d "$HBASE_HOME/build/test" ]; then
+  CLASSPATH=${CLASSPATH}:$HBASE_HOME/hbase/test
 fi
-if [ -d "$HADOOP_HOME/build/test/classes" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/test/classes
+if [ -d "$HBASE_HOME/build/webapps" ]; then
+  CLASSPATH=${CLASSPATH}:$HBASE_HOME/build/webapps
 fi
 
 # so that filenames w/ spaces are handled correctly in loops below
 IFS=
 
-# for releases, add core hbase, hadoop jar & webapps to CLASSPATH
-# Look in two places for our hbase jar.
-for f in $HBASE_HOME/hadoop-*-hbase*.jar; do
+# for releases, add hbase, hadoop & webapps to CLASSPATH
+for f in $HBASE_HOME/hbase*.jar; do
   if [ -f $f ]; then
     CLASSPATH=${CLASSPATH}:$f;
   fi
 done
-for f in $HADOOP_HOME/contrib/hadoop-*-hbase*.jar; do
-  if [ -f $f ]; then
-    CLASSPATH=${CLASSPATH}:$f;
-  fi
-done
-if [ -d "$HADOOP_HOME/webapps" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_HOME
+if [ -d "$HBASE_HOME/webapps" ]; then
+  CLASSPATH=${CLASSPATH}:$HBASE_HOME
 fi
-for f in $HADOOP_HOME/hadoop-*.jar; do
-  CLASSPATH=${CLASSPATH}:$f;
-done
 
-# add hbase and hadoop libs to CLASSPATH
+# add libs to CLASSPATH
 for f in $HBASE_HOME/lib/*.jar; do
   CLASSPATH=${CLASSPATH}:$f;
 done
-for f in $HADOOP_HOME/lib/*.jar; do
-  CLASSPATH=${CLASSPATH}:$f;
-done
 
-for f in $HADOOP_HOME/lib/jetty-ext/*.jar; do
+for f in $HBASE_HOME/lib/jetty-ext/*.jar; do
   CLASSPATH=${CLASSPATH}:$f;
 done
 
 
 # default log directory & file
 # TODO: Should we log to hadoop or under hbase?
-if [ "$HADOOP_LOG_DIR" = "" ]; then
-  HADOOP_LOG_DIR="$HADOOP_HOME/logs"
+if [ "$HBASE_LOG_DIR" = "" ]; then
+  HBASE_LOG_DIR="$HBASE_HOME/logs"
 fi
-if [ "$HADOOP_LOGFILE" = "" ]; then
-  HADOOP_LOGFILE='hbase.log'
+if [ "$HBASE_LOGFILE" = "" ]; then
+  HBASE_LOGFILE='hbase.log'
 fi
 
 # cygwin path translation
@@ -181,26 +159,6 @@
   HADOOP_LOG_DIR=`cygpath -d "$HADOOP_LOG_DIR"`
 fi
 
-# TODO: Can this be put into separate script so don't have to duplicate
-# hadoop command script code?
-# setup 'java.library.path' for native-hadoop code if necessary
-JAVA_LIBRARY_PATH=''
-if [ -d "${HADOOP_HOME}/build/native" -o -d "${HADOOP_HOME}/lib/native" ]; then
-  JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA} org.apache.hadoop.util.PlatformName | sed
-e "s/ /_/g"`
-  
-  if [ -d "$HADOOP_HOME/build/native" ]; then
-    JAVA_LIBRARY_PATH=${HADOOP_HOME}/build/native/${JAVA_PLATFORM}/lib
-  fi
-  
-  if [ -d "${HADOOP_HOME}/lib/native" ]; then
-    if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
-      JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:${HADOOP_HOME}/lib/native/${JAVA_PLATFORM}
-    else
-      JAVA_LIBRARY_PATH=${HADOOP_HOME}/lib/native/${JAVA_PLATFORM}
-    fi
-  fi
-fi
-
 # cygwin path translation
 if $cygwin; then
   JAVA_LIBRARY_PATH=`cygpath -p "$JAVA_LIBRARY_PATH"`
@@ -235,7 +193,7 @@
 HBASE_OPTS="$HBASE_OPTS -Dhadoop.log.file=$HADOOP_LOGFILE"
 HBASE_OPTS="$HBASE_OPTS -Dhadoop.home.dir=$HADOOP_HOME"
 HBASE_OPTS="$HBASE_OPTS -Dhadoop.id.str=$HADOOP_IDENT_STRING"
-HBASE_OPTS="$HBASE_OPTS -Dhadoop.root.logger=${HADOOP_ROOT_LOGGER:-INFO,console}"
+HBASE_OPTS="$HBASE_OPTS -Dhbase.root.logger=${HBASE_ROOT_LOGGER:-INFO,console}"
 HBASE_OPTS="$HBASE_OPTS -Dhbase.home.dir=$HBASE_HOME"
 if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
   HBASE_OPTS="$HBASE_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"

Modified: hadoop/hbase/trunk/bin/start-hbase.sh
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/bin/start-hbase.sh?rev=618560&r1=618559&r2=618560&view=diff
==============================================================================
--- hadoop/hbase/trunk/bin/start-hbase.sh (original)
+++ hadoop/hbase/trunk/bin/start-hbase.sh Mon Feb  4 22:15:44 2008
@@ -32,7 +32,7 @@
 . "$bin"/hbase-config.sh
 
 # start hbase daemons
-"${HADOOP_HOME}"/bin/hadoop dfsadmin -safemode wait
+# TODO: PUT BACK !!! "${HADOOP_HOME}"/bin/hadoop dfsadmin -safemode wait
 errCode=$?
 if [ $errCode -ne 0 ]
 then

Modified: hadoop/hbase/trunk/conf/hbase-env.sh
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/conf/hbase-env.sh?rev=618560&r1=618559&r2=618560&view=diff
==============================================================================
--- hadoop/hbase/trunk/conf/hbase-env.sh (original)
+++ hadoop/hbase/trunk/conf/hbase-env.sh Mon Feb  4 22:15:44 2008
@@ -19,7 +19,10 @@
 # * limitations under the License.
 # */
 
-# Set HBase-specific environment variables here.
+# Set environment variables here.
+
+# The java implementation to use.  Required.
+# export JAVA_HOME=/usr/lib/j2sdk1.5-sun
 
 # Extra Java CLASSPATH elements.  Optional.
 # export HBASE_CLASSPATH=

Added: hadoop/hbase/trunk/conf/log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/conf/log4j.properties?rev=618560&view=auto
==============================================================================
--- hadoop/hbase/trunk/conf/log4j.properties (added)
+++ hadoop/hbase/trunk/conf/log4j.properties Mon Feb  4 22:15:44 2008
@@ -0,0 +1,43 @@
+# Define some default values that can be overridden by system properties
+hbase.root.logger=INFO,console
+hbase.log.dir=.
+hbase.log.file=hbase.log
+
+# Define the root logger to the system property "hbase.root.logger".
+log4j.rootLogger=${hbase.root.logger}
+
+# Logging Threshold
+log4j.threshhold=ALL
+
+#
+# Daily Rolling File Appender
+#
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
+
+# 30-day backup
+#log4j.appender.DRFA.MaxBackupIndex=30
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+
+# Debugging Pattern format
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this 
+#
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+
+# Custom Logging levels
+
+#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG



Mime
View raw message