hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cdoug...@apache.org
Subject svn commit: r748770 [1/2] - in /hadoop/core/trunk: ./ src/contrib/hdfsproxy/ src/contrib/hdfsproxy/bin/ src/contrib/hdfsproxy/conf/ src/contrib/hdfsproxy/ivy/ src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ src/contrib/hdfsproxy/src/test/or...
Date Sat, 28 Feb 2009 01:58:21 GMT
Author: cdouglas
Date: Sat Feb 28 01:58:20 2009
New Revision: 748770

URL: http://svn.apache.org/viewvc?rev=748770&view=rev
Log:
HADOOP-5023. Add Tomcat support to HdfsProxy. Contributed by Zhiyong Zhang

Added:
    hadoop/core/trunk/src/contrib/hdfsproxy/bin/hdfsproxy-tomcat-server.sh
    hadoop/core/trunk/src/contrib/hdfsproxy/bin/hdfsproxy-tomcat-servers.sh
    hadoop/core/trunk/src/contrib/hdfsproxy/bin/hdfsproxy-tomcat-slaves.sh
    hadoop/core/trunk/src/contrib/hdfsproxy/bin/proxy-util
    hadoop/core/trunk/src/contrib/hdfsproxy/bin/start-hdfsproxy-tomcat.sh
    hadoop/core/trunk/src/contrib/hdfsproxy/bin/stop-hdfsproxy-tomcat.sh
    hadoop/core/trunk/src/contrib/hdfsproxy/conf/ssl-server.xml
    hadoop/core/trunk/src/contrib/hdfsproxy/conf/tomcat-web.xml
    hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java
    hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyFilter.java
    hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyUtil.java
    hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/
    hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/cactus-web.xml
    hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/
    hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-default.xml
    hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-hosts
    hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/log4j.properties
    hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/ssl-client.xml
    hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/ssl-server.xml
    hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/user-certs.xml
    hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/user-permissions.xml
    hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/ssl-keys/
    hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/ssl-keys/client.keystore   (with props)
    hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/ssl-keys/proxy.keystore   (with props)
    hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/ssl-keys/test.crt
    hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/tomcat-config/
    hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/tomcat-config/server.xml
    hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/tomcat-config/tomcat-users.xml
    hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/tomcat-config/web.xml
Modified:
    hadoop/core/trunk/CHANGES.txt
    hadoop/core/trunk/src/contrib/hdfsproxy/README
    hadoop/core/trunk/src/contrib/hdfsproxy/build.xml
    hadoop/core/trunk/src/contrib/hdfsproxy/conf/hdfsproxy-default.xml
    hadoop/core/trunk/src/contrib/hdfsproxy/conf/user-certs.xml
    hadoop/core/trunk/src/contrib/hdfsproxy/conf/user-permissions.xml
    hadoop/core/trunk/src/contrib/hdfsproxy/ivy.xml
    hadoop/core/trunk/src/contrib/hdfsproxy/ivy/libraries.properties
    hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/HdfsProxy.java
    hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java

Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=748770&r1=748769&r2=748770&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Sat Feb 28 01:58:20 2009
@@ -145,6 +145,8 @@
 
     HADOOP-4546. Fix DF reporting for AIX. (Bill Habermaas via cdouglas)
 
+    HADOOP-5023. Add Tomcat support to HdfsProxy. (Zhiyong Zhang via cdouglas)
+
   OPTIMIZATIONS
 
   BUG FIXES

Modified: hadoop/core/trunk/src/contrib/hdfsproxy/README
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/README?rev=748770&r1=748769&r2=748770&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/README (original)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/README Sat Feb 28 01:58:20 2009
@@ -28,3 +28,24 @@
 user-certs.xml and user-permissions.xml files on all proxy servers listed in 
 the hdfsproxy-hosts file. Similarly, "hdfsproxy -clearUgiCache" command can be 
 used to clear the UGI caches on all proxy servers.
+
+For tomcat based installation.
+1. set up the environment and configuration files. 
+	 a) export HADOOP_CONF_DIR=${user.home}/devel/source-conf
+	 	source-conf directory should point to the source cluster's configuration directory, 
+	 	where core-site.xml, and hdfs-site.xml should already be correctly configured for 
+	 	the source cluster settings.
+	 b) export HDFSPROXY_CONF_DIR=${user.home}/devel/proxy-conf
+	  proxy-conf directory should point to the proxy's configuration directory, where 
+	  hdfsproxy-default.xml, etc, should already be properly configured.
+
+2. cd ==> hdfsproxy directory,  ant war
+	 
+3. download and install tomcat6, change tomcat conf/server.xml file to include https support. 
+	 uncomment item below SSL HTTP/1.1 Connector and add paths, resulting something look like this:
+	 <Connector port="8443" protocol="HTTP/1.1" SSLEnabled="true"
+               maxThreads="150" scheme="https" secure="true" keystoreFile="${user.home}/grid/hdfsproxy-conf/server2.keystore" 
+               keystorePass="changeme" keystoreType="JKS"  clientAuth="true" sslProtocol="TLS" />
+4. copy war file in step 2 to tomcat's webapps directory and rename it to ROOT.war
+5. export JAVA_OPTS="-Djavax.net.ssl.trustStore=${user.home}/grid/hdfsproxy-conf/server2.keystore -Djavax.net.ssl.trustStorePassword=changeme"
+6. start up tomcat with tomcat's bin/startup.sh 

Added: hadoop/core/trunk/src/contrib/hdfsproxy/bin/hdfsproxy-tomcat-server.sh
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/bin/hdfsproxy-tomcat-server.sh?rev=748770&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/bin/hdfsproxy-tomcat-server.sh (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/bin/hdfsproxy-tomcat-server.sh Sat Feb 28 01:58:20 2009
@@ -0,0 +1,92 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# Runs a HdfsProxy as a daemon.
+#
+# Environment Variables
+#
+#   HDFSPROXY_CONF_DIR  Alternate conf dir. Default is ${HDFSPROXY_HOME}/conf.
+#   HDFSPROXY_MASTER    host:path where hdfsproxy code should be rsync'd from
+#   HDFSPROXY_PID_DIR   The pid files are stored. /tmp by default.
+#   HDFSPROXY_IDENT_STRING   A string representing this instance of hdfsproxy. $USER by default
+#   HDFSPROXY_NICENESS The scheduling priority for daemons. Defaults to 0.
+#		TOMCAT_HOME_DIR tomcat home directory.
+##
+
+usage="Usage: hdfsproxy-tomcat-server.sh [--config <conf-dir>] [--hosts hostlistfile] (start|stop) "
+
+# if no args specified, show usage
+if [ $# -le 1 ]; then
+  echo $usage
+  exit 1
+fi
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+. "$bin"/hdfsproxy-config.sh
+
+# get arguments
+startStop=$1
+shift
+
+
+if [ -f "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh" ]; then
+  . "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh"
+fi
+
+
+if [ "$HDFSPROXY_IDENT_STRING" = "" ]; then
+  export HDFSPROXY_IDENT_STRING="$USER"
+fi
+
+
+# Set default scheduling priority
+if [ "$HDFSPROXY_NICENESS" = "" ]; then
+    export HDFSPROXY_NICENESS=0
+fi
+
+case $startStop in
+
+  (start)
+    if [ "$HDFSPROXY_MASTER" != "" ]; then
+      echo rsync from $HDFSPROXY_MASTER
+      rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' $HDFSPROXY_MASTER/ "$HDFSPROXY_HOME"
+    fi
+
+    echo starting hdfsproxy tomcat server
+    cd "$HDFSPROXY_HOME"
+    nohup nice -n $HDFSPROXY_NICENESS "$TOMCAT_HOME_DIR"/bin/startup.sh >& /dev/null &
+    sleep 1
+    ;;
+          
+  (stop)
+
+    echo stopping hdfsproxy tomcat server
+    cd "$HDFSPROXY_HOME"
+    nohup nice -n $HDFSPROXY_NICENESS "$TOMCAT_HOME_DIR"/bin/shutdown.sh >& /dev/null &
+    ;;
+
+  (*)
+    echo $usage
+    exit 1
+    ;;
+
+esac
+
+

Added: hadoop/core/trunk/src/contrib/hdfsproxy/bin/hdfsproxy-tomcat-servers.sh
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/bin/hdfsproxy-tomcat-servers.sh?rev=748770&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/bin/hdfsproxy-tomcat-servers.sh (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/bin/hdfsproxy-tomcat-servers.sh Sat Feb 28 01:58:20 2009
@@ -0,0 +1,34 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# Run a HdfsProxy command on all slave hosts.
+
+usage="Usage: hdfsproxy-tomcat-servers.sh [--config confdir] [--hosts hostlistfile] [start|stop] "
+
+# if no args specified, show usage
+if [ $# -le 1 ]; then
+  echo $usage
+  exit 1
+fi
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+. $bin/hdfsproxy-config.sh
+
+exec "$bin/hdfsproxy-tomcat-slaves.sh" --config $HDFSPROXY_CONF_DIR cd "$HDFSPROXY_HOME" \; "$bin/hdfsproxy-tomcat-server.sh" --config $HDFSPROXY_CONF_DIR "$@"

Added: hadoop/core/trunk/src/contrib/hdfsproxy/bin/hdfsproxy-tomcat-slaves.sh
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/bin/hdfsproxy-tomcat-slaves.sh?rev=748770&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/bin/hdfsproxy-tomcat-slaves.sh (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/bin/hdfsproxy-tomcat-slaves.sh Sat Feb 28 01:58:20 2009
@@ -0,0 +1,68 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# Run a shell command on all slave hosts.
+#
+# Environment Variables
+#
+#   HDFSPROXY_SLAVES    File naming remote hosts.
+#     Default is ${HDFSPROXY_CONF_DIR}/hdfsproxy-hosts.
+#   HDFSPROXY_CONF_DIR  Alternate conf dir. Default is ${HDFSPROXY_HOME}/conf.
+#   HDFSPROXY_SLAVE_SLEEP Seconds to sleep between spawning remote commands.
+#   HDFSPROXY_SSH_OPTS Options passed to ssh when running remote commands.
+##
+
+usage="Usage: hdfsproxy-tomcat-slaves.sh [--config confdir] command..."
+
+# if no args specified, show usage
+if [ $# -le 0 ]; then
+  echo $usage
+  exit 1
+fi
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+. "$bin"/hdfsproxy-config.sh
+
+# If the slaves file is specified in the command line,
+# then it takes precedence over the definition in 
+# hdfsproxy-env.sh. Save it here.
+HOSTLIST=$HDFSPROXY_SLAVES
+
+if [ -f "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh" ]; then
+  . "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh"
+fi
+
+if [ "$HOSTLIST" = "" ]; then
+  if [ "$HDFSPROXY_SLAVES" = "" ]; then
+    export HOSTLIST="${HDFSPROXY_CONF_DIR}/hdfsproxy-hosts"
+  else
+    export HOSTLIST="${HDFSPROXY_SLAVES}"
+  fi
+fi
+
+for slave in `cat "$HOSTLIST"`; do
+ ssh $HDFSPROXY_SSH_OPTS $slave $"${@// /\\ }" \
+   2>&1 | sed "s/^/$slave: /" & 
+ if [ "$HDFSPROXY_SLAVE_SLEEP" != "" ]; then
+   sleep $HDFSPROXY_SLAVE_SLEEP
+ fi
+done
+
+wait

Added: hadoop/core/trunk/src/contrib/hdfsproxy/bin/proxy-util
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/bin/proxy-util?rev=748770&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/bin/proxy-util (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/bin/proxy-util Sat Feb 28 01:58:20 2009
@@ -0,0 +1,152 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# The Proxy command utility script
+#
+# Environment Variables
+#
+#   JAVA_HOME        The java implementation to use.  Overrides JAVA_HOME.
+#
+#   HDFSPROXY_CLASSPATH Extra Java CLASSPATH entries.
+#
+#   HDFSPROXY_HEAPSIZE  The maximum amount of heap to use, in MB. 
+#                    Default is 1000.
+#
+#   HDFSPROXY_OPTS      Extra Java runtime options.
+#   
+#   HDFSPROXY_NAMENODE_OPTS       These options are added to HDFSPROXY_OPTS 
+#   HDFSPROXY_CLIENT_OPTS         when the respective command is run.
+#   HDFSPROXY_{COMMAND}_OPTS etc  HDFSPROXY_JT_OPTS applies to JobTracker 
+#                              for e.g.  HDFSPROXY_CLIENT_OPTS applies to 
+#                              more than one command (fs, dfs, fsck, 
+#                              dfsadmin etc)  
+#
+#   HDFSPROXY_CONF_DIR  Alternate conf dir. Default is ${HDFSPROXY_HOME}/conf.
+#
+#   HDFSPROXY_ROOT_LOGGER The root appender. Default is INFO,console
+#
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+. "$bin"/hdfsproxy-config.sh
+
+cygwin=false
+case "`uname`" in
+CYGWIN*) cygwin=true;;
+esac
+
+if [ -f "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh" ]; then
+  . "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh"
+fi
+
+# some Java parameters
+if [ "$JAVA_HOME" != "" ]; then
+  #echo "run java in $JAVA_HOME"
+  JAVA_HOME=$JAVA_HOME
+fi
+  
+if [ "$JAVA_HOME" = "" ]; then
+  echo "Error: JAVA_HOME is not set."
+  exit 1
+fi
+
+JAVA=$JAVA_HOME/bin/java
+JAVA_HEAP_MAX=-Xmx1000m 
+
+# check envvars which might override default args
+if [ "$HDFSPROXY_HEAPSIZE" != "" ]; then
+  #echo "run with heapsize $HDFSPROXY_HEAPSIZE"
+  JAVA_HEAP_MAX="-Xmx""$HDFSPROXY_HEAPSIZE""m"
+  #echo $JAVA_HEAP_MAX
+fi
+
+# CLASSPATH initially contains $HDFSPROXY_CONF_DIR
+CLASSPATH="${HADOOP_CONF_DIR}"
+CLASSPATH="${CLASSPATH}:${HDFSPROXY_CONF_DIR}"
+CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
+
+# for developers, add HdfsProxy classes to CLASSPATH
+if [ -d "$HDFSPROXY_HOME/build/classes" ]; then
+  CLASSPATH=${CLASSPATH}:$HDFSPROXY_HOME/build/classes
+fi
+if [ -d "$HDFSPROXY_HOME/build/webapps" ]; then
+  CLASSPATH=${CLASSPATH}:$HDFSPROXY_HOME/build
+fi
+if [ -d "$HDFSPROXY_HOME/build/test/classes" ]; then
+  CLASSPATH=${CLASSPATH}:$HDFSPROXY_HOME/build/test/classes
+fi
+
+# so that filenames w/ spaces are handled correctly in loops below
+IFS=
+
+# for releases, add hdfsproxy jar & webapps to CLASSPATH
+if [ -d "$HDFSPROXY_HOME/webapps" ]; then
+  CLASSPATH=${CLASSPATH}:$HDFSPROXY_HOME
+fi
+for f in $HDFSPROXY_HOME/hdfsproxy-*.jar; do
+  CLASSPATH=${CLASSPATH}:$f;
+done
+
+# add libs to CLASSPATH
+for f in $HDFSPROXY_HOME/lib/*.jar; do
+  CLASSPATH=${CLASSPATH}:$f;
+done
+
+# add user-specified CLASSPATH last
+if [ "$HDFSPROXY_CLASSPATH" != "" ]; then
+  CLASSPATH=${CLASSPATH}:${HDFSPROXY_CLASSPATH}
+fi
+
+# default log directory & file
+if [ "$HDFSPROXY_LOG_DIR" = "" ]; then
+  HDFSPROXY_LOG_DIR="$HDFSPROXY_HOME/logs"
+fi
+if [ "$HDFSPROXY_LOGFILE" = "" ]; then
+  HDFSPROXY_LOGFILE='proxy-util.log'
+fi
+
+# restore ordinary behaviour
+unset IFS
+
+# figure out which class to run
+CLASS='org.apache.hadoop.hdfsproxy.ProxyUtil'
+
+# cygwin path translation
+if $cygwin; then
+  CLASSPATH=`cygpath -p -w "$CLASSPATH"`
+  HDFSPROXY_HOME=`cygpath -d "$HDFSPROXY_HOME"`
+  HDFSPROXY_LOG_DIR=`cygpath -d "$HDFSPROXY_LOG_DIR"`
+fi
+
+# cygwin path translation
+if $cygwin; then
+  JAVA_LIBRARY_PATH=`cygpath -p "$JAVA_LIBRARY_PATH"`
+fi
+
+HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Dhdfsproxy.log.dir=$HDFSPROXY_LOG_DIR"
+HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Dhdfsproxy.log.file=$HDFSPROXY_LOGFILE"
+HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Dhdfsproxy.home.dir=$HDFSPROXY_HOME"
+HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Dhdfsproxy.id.str=$HDFSPROXY_IDENT_STRING"
+HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Dhdfsproxy.root.logger=${HDFSPROXY_ROOT_LOGGER:-INFO,console}"
+if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
+  HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
+fi  
+
+# run it
+exec "$JAVA" $JAVA_HEAP_MAX $HDFSPROXY_OPTS -classpath "$CLASSPATH" $CLASS "$@"

Added: hadoop/core/trunk/src/contrib/hdfsproxy/bin/start-hdfsproxy-tomcat.sh
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/bin/start-hdfsproxy-tomcat.sh?rev=748770&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/bin/start-hdfsproxy-tomcat.sh (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/bin/start-hdfsproxy-tomcat.sh Sat Feb 28 01:58:20 2009
@@ -0,0 +1,36 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# Start hdfsproxy tomcat servers.
+# Run this on master node.
+
+usage="Usage: start-hdfsproxy-tomcat.sh"
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+. "$bin"/hdfsproxy-config.sh
+
+# get arguments
+if [ $# -ge 1 ]; then
+  echo $usage
+  exit 1
+fi
+
+# start hdfsproxy tomcat servers
+"$bin"/hdfsproxy-tomcat-servers.sh --config $HDFSPROXY_CONF_DIR --hosts hdfsproxy-hosts start

Added: hadoop/core/trunk/src/contrib/hdfsproxy/bin/stop-hdfsproxy-tomcat.sh
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/bin/stop-hdfsproxy-tomcat.sh?rev=748770&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/bin/stop-hdfsproxy-tomcat.sh (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/bin/stop-hdfsproxy-tomcat.sh Sat Feb 28 01:58:20 2009
@@ -0,0 +1,28 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# Stop hdfsproxy tomcat servers.  Run this on master node.
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+. "$bin"/hdfsproxy-config.sh
+
+# "$bin"/hdfsproxy-daemon.sh --config $HDFSPROXY_CONF_DIR stop
+"$bin"/hdfsproxy-tomcat-servers.sh --config $HDFSPROXY_CONF_DIR --hosts hdfsproxy-hosts stop
+

Modified: hadoop/core/trunk/src/contrib/hdfsproxy/build.xml
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/build.xml?rev=748770&r1=748769&r2=748770&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/build.xml (original)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/build.xml Sat Feb 28 01:58:20 2009
@@ -17,14 +17,63 @@
    limitations under the License.
 -->
 
-<project name="hdfsproxy" default="jar">
+<project name="hdfsproxy" default="jar" xmlns:ivy="antlib:org.apache.ivy.ant">
 	<property name="hdfsproxyVersion" value="1.0"/>
 	<property name="final.name" value="${ant.project.name}-${hdfsproxyVersion}"/>
+	<property name="javac.debug" value="on"/>
+	<property name="javac.optimize" value="on"/>
+	<import file="../build-contrib.xml"/>
+	
 	<property name="bin.dir" value="${basedir}/bin"/>
 	<property name="lib.dir" value="${basedir}/lib"/>
-	<property name="conf.dir" value="${basedir}/conf"/>
+	<property name="hadoop.jars.dir" value="${basedir}/hadoopjars"/>
+	
 	<property name="docs.dir" value="${basedir}/docs"/>
-	<import file="../build-contrib.xml"/>
+	<property name="test.build.dir" value="${build.dir}/test"/>
+	<property name="test.build.classes" value="${test.build.dir}/classes"/>	
+	<property name="src.test.resources" value="${basedir}/src/test/resources"/>
+	<property name="ssl.keystore.proxy" value="${src.test.resources}/ssl-keys/proxy.keystore"/>
+	<property name="ssl.keystore.client" value="${src.test.resources}/ssl-keys/client.keystore"/>
+	<property name="ssl.client.cert" value="${src.test.resources}/ssl-keys/test.crt"/>
+	<property name="proxy.conf.test" value="${src.test.resources}/proxy-config"/>
+	<property name="tomcat.conf.test" value="${src.test.resources}/tomcat-config"/>
+	<property name="target.dir" value="${build.dir}/target"/>
+	<property name="logs.dir" value="${target.dir}/logs"/>
+	<property name="reports.dir" value="${target.dir}/reports"/>
+	<property name="tomcatconfig.dir" value="${target.dir}/tomcat-config"/>
+	<property name="tomcat.container.id" value="tomcat5x"/>
+	<property name="cargo.servlet.port" value="8087"/>
+	<property name="cargo.logging" value="high"/>
+	<property name="cactus.formatter.type" value="xml"/>
+	<property name="cactus.warfile.name" value="test"/>
+	<property environment="env"/>
+	
+	<!-- check if environment has been set -->
+  <condition property="source.conf.dir" value="${env.HADOOP_CONF_DIR}" else="${basedir}/conf">
+    <and>
+        <isset property="env.HADOOP_CONF_DIR"/>
+        <available file="${env.HADOOP_CONF_DIR}/core-site.xml"/>        
+    </and>
+  </condition>
+  <condition property="proxy.conf.dir" value="${env.HDFSPROXY_CONF_DIR}" else="${basedir}/conf">
+    <and>
+        <isset property="env.HDFSPROXY_CONF_DIR"/>
+        <available file="${env.HDFSPROXY_CONF_DIR}/hdfsproxy-default.xml"/>
+    </and>
+  </condition>
+	<property name="ivy.settings.file" location="${hadoop.root}/ivy/ivysettings.xml"/>
+  
+  <target name="ivy-init" depends="ivy-init-antlib">
+    		<ivy:settings id="${ant.project.name}.ivy.settings"/>
+	</target>
+	
+	<!-- Define the Cactus tasks -->	
+	<target name="load-tasks" depends="ivy-retrieve-common">
+		<taskdef resource="cactus.tasks"
+			 classpathref="cactus.classpath">
+		</taskdef>		
+	</target>
+	
 
 	<target name="jar" depends="compile" description="Create jar">
 		<echo>
@@ -44,6 +93,124 @@
 			</fileset>
 		</jar>
 	</target>
+	
+	
+	<!-- ================================================================== -->
+	<!-- Make war file                                              -->
+	<!-- ================================================================== -->
+	
+	<target name="war" depends="local-package" description="Create war">
+		<echo>
+			Building the .war file
+		</echo>
+	  <war destfile="${build.dir}/${final.name}.war" webxml="${basedir}/conf/tomcat-web.xml">
+	    <lib dir="${common.ivy.lib.dir}">
+	      <include name="commons-logging-${commons-logging.version}.jar"/>
+        <include name="junit-${junit.version}.jar"/>
+        <include name="log4j-${log4j.version}.jar"/>
+        <include name="slf4j-api-${slf4j-api.version}.jar"/>
+        <include name="slf4j-log4j12-${slf4j-log4j12.version}.jar"/>
+        <include name="xmlenc-${xmlenc.version}.jar"/>
+        <include name="core-${core.vesion}.jar"/> 
+	    </lib>
+	    <classes dir="${source.conf.dir}" excludes="**/*.example **/*.sh"/>
+	    <classes dir="${proxy.conf.dir}" excludes="**/*.template **/*.sh"/>
+	    <classes dir="${build.classes}"/>
+	    <classes dir="${hadoop.root}/build/classes"/>
+			<classes dir="${test.build.dir}"/>
+	  </war>
+	</target>
+	
+	
+	<target name="cactifywar" depends="war,load-tasks">
+		<mkdir dir="${target.dir}" />
+    <cactifywar srcfile="${build.dir}/${final.name}.war"
+        destfile="${target.dir}/${cactus.warfile.name}.war"
+        mergewebxml="${src.test.resources}/cactus-web.xml">
+      <servletredirector/>
+      <servletredirector name="ServletRedirectorSecure"
+          mapping="/ServletRedirectorSecure" roles="test"/>
+      <filterredirector mapping="/test/filterRedirector.jsp"/>
+    </cactifywar>    	
+	</target>
+
+	<target name="test" depends="compile,compile-test,cactifywar" if="test.available">
+	  <echo>Please take a deep breath while Cargo gets the Tomcat for running the servlet tests...</echo>
+	  
+	  <mkdir dir="${tomcatconfig.dir}" />
+	  <mkdir dir="${tomcatconfig.dir}/conf" />
+	  <mkdir dir="${tomcatconfig.dir}/webapps" />
+	  <mkdir dir="${tomcatconfig.dir}/temp" />
+	  <mkdir dir="${logs.dir}" />
+	  <mkdir dir="${reports.dir}" />	  
+	  <copy file="${tomcat.conf.test}/server.xml" tofile="${tomcatconfig.dir}/conf/server.xml"/>
+		<copy file="${tomcat.conf.test}/web.xml" tofile="${tomcatconfig.dir}/conf/web.xml"/>
+		<copy file="${tomcat.conf.test}/tomcat-users.xml" tofile="${tomcatconfig.dir}/conf/tomcat-users.xml"/>
+        
+		<cactus warfile="${target.dir}/${cactus.warfile.name}.war" printsummary="yes" failureproperty="tests.failed">
+			<classpath>
+				<path refid="cactus.classpath"/>
+				<pathelement location="${build.classes}"/>
+				<pathelement location="${proxy.conf.dir}"/>
+				<pathelement location="${src.test.resources}"/>
+				<pathelement location="${src.test.resources}/proxy-config"/>
+			</classpath>		
+		
+			<containerset>
+				<cargo containerId="${tomcat.container.id}" output="${logs.dir}/output.log" log="${logs.dir}/cargo.log">
+				 <zipUrlInstaller
+            installUrl="http://apache.osuosl.org/tomcat/tomcat-6/v6.0.18/bin/apache-tomcat-6.0.18.zip"
+            installDir="${target.dir}/${tomcat.container.id}"/>
+				  <!--<configuration type="standalone" home="${tomcatconfig.dir}">-->
+				  <configuration type="existing" home="${tomcatconfig.dir}">
+						<property name="cargo.servlet.port" value="${cargo.servlet.port}"/>
+						<property name="cargo.logging" value="${cargo.logging}"/>
+						<property name="cactus.toDir" value="${build.test}"/>
+						<deployable type="war" file="${target.dir}/${cactus.warfile.name}.war"/>
+					</configuration>
+				</cargo>
+			</containerset>
+			<sysproperty key="javax.net.ssl.trustStore" value="${ssl.keystore.proxy}"/>
+			<sysproperty key="javax.net.ssl.trustStorePassword" value="changeme"/>
+			<sysproperty key="javax.net.ssl.keyStore.proxy" value="${ssl.keystore.proxy}"/>
+			<sysproperty key="javax.net.ssl.keyStore" value="${ssl.keystore.client}"/>
+			<sysproperty key="javax.net.ssl.keyStorePassword" value="changeme"/>
+			<sysproperty key="javax.net.ssl.keyPassword" value="changeme"/>
+			
+			<sysproperty key="javax.net.ssl.clientCert" value="${ssl.client.cert}"/>
+			
+			<sysproperty key="test.build.data" value="${build.test}/data"/>
+      <sysproperty key="build.test" value="${build.test}"/>
+      <sysproperty key="build.target" value="${target.dir}"/>
+      
+      <sysproperty key="test.proxy.conf.dir" value="${proxy.conf.test}"/>
+      <!-- requires fork=yes for: 
+        relative File paths to use the specified user.dir 
+        classpath to use build/contrib/*.jar
+      -->
+      <sysproperty key="user.dir" value="${build.test}/data"/>
+      
+      <sysproperty key="fs.default.name" value="${fs.default.name}"/>
+      <sysproperty key="hadoop.test.localoutputfile" value="${hadoop.test.localoutputfile}"/>
+      <sysproperty key="hadoop.log.dir" value="${hadoop.log.dir}"/> 
+      
+			
+			<formatter type="${cactus.formatter.type}"/>
+			<batchtest todir="${reports.dir}" unless="testcase">
+				<fileset dir="${src.test}">
+					<include name="**/Test*.java"/>
+				</fileset>
+			</batchtest>
+			<batchtest todir="${reports.dir}" if="testcase">
+        <fileset dir="${src.test}" includes="**/${testcase}.java"/>
+      </batchtest>
+		</cactus>
+		<junitreport todir="${reports.dir}">
+			<fileset dir="${target.dir}/${tomcat.container.id}" includes="TEST-*.xml"/>
+      <report todir="${reports.dir}" format="frames"/>
+    </junitreport>
+    <fail if="tests.failed">Tests failed!</fail>
+	</target>
 
 	<!-- ====================================================== -->
 	<!-- Macro definitions                                      -->
@@ -75,20 +242,21 @@
 		</copy>
 		<copy todir="${build.dir}/${final.name}/lib" includeEmptyDirs="false">
 			<fileset dir="${common.ivy.lib.dir}">
-        <include name="commons-logging-${commons-logging.version}"/>
+        <include name="commons-logging-${commons-logging.version}.jar"/>
         <include name="commons-logging-api-${commons-logging-api.version}.jar"/>
         <include name="junit-${junit.version}.jar"/>
         <include name="log4j-${log4j.version}.jar"/>
         <include name="slf4j-api-${slf4j-api.version}.jar"/>
-        <include name="slf4j-log4j${slf4j-log4j.version}.jar"/>
+        <include name="slf4j-log4j12-${slf4j-log4j12.version}.jar"/>
         <include name="xmlenc-${xmlenc.version}.jar"/>
+        <include name="jetty-util-${jetty-util.version}.jar"/>
         <include name="jetty-${jetty.version}.jar"/>
-        <include name="servlet-api-${servlet-api-2.5.version}.jar"/>
-        <include name="core-${core.vesion}"/> 
+        <include name="servlet-api-2.5-${servlet-api-2.5.version}.jar"/>
+        <include name="core-${core.vesion}.jar"/> 
                        </fileset>
 		       <fileset dir="${hadoop.root}/lib/jsp-${jsp.version}">
-        <include name="jsp-${jsp.version}"/> 
-        <include name="jsp-api-${jsp-api.vesion}"/> 
+        <include name="jsp-${jsp.version}.jar"/> 
+        <include name="jsp-api-${jsp.version}.jar"/> 
 			</fileset>
 		</copy>
 
@@ -102,10 +270,12 @@
 		<copy todir="${build.dir}/${final.name}/bin">
 			<fileset dir="${bin.dir}"/>
 		</copy>
+	
 
 		<copy todir="${build.dir}/${final.name}/conf">
-			<fileset dir="${conf.dir}"/>
+			<fileset dir="${proxy.conf.dir}"/>
 		</copy>
+		
 
 		<copy todir="${build.dir}/${final.name}">
 			<fileset dir="${basedir}">
@@ -165,11 +335,13 @@
 		</macro_tar>
 	</target>
 
-  <!-- the unit test classpath -->
+
+	 <!-- the unit test classpath -->
   <path id="test.classpath">
-    <pathelement location="${build.test}" />
+  	<pathelement location="${proxy.conf.test}" />
+    <pathelement location="${test.build.dir}" />
     <pathelement location="${hadoop.root}/build/test/classes"/>
-    <pathelement location="${hadoop.root}/src/contrib/test"/>
+    <!--<pathelement location="${hadoop.root}/src/contrib/test"/>-->
     <pathelement location="${hadoop.root}/conf"/>
     <pathelement location="${hadoop.root}/build"/>
     <pathelement location="${hadoop.root}/build/classes"/>
@@ -177,6 +349,9 @@
     <pathelement location="${build.examples}"/>
     <path refid="contrib-classpath"/>
   </path>
-
+  
+  <path id="cactus.classpath">
+    <path refid="test.classpath"/>
+  </path>
 
 </project>

Modified: hadoop/core/trunk/src/contrib/hdfsproxy/conf/hdfsproxy-default.xml
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/conf/hdfsproxy-default.xml?rev=748770&r1=748769&r2=748770&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/conf/hdfsproxy-default.xml (original)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/conf/hdfsproxy-default.xml Sat Feb 28 01:58:20 2009
@@ -7,7 +7,7 @@
 
 <property>
   <name>hdfsproxy.https.address</name>
-  <value>0.0.0.0:50479</value>
+  <value>0.0.0.0:8443</value>
   <description>the SSL port that hdfsproxy listens on
   </description>
 </property>
@@ -21,7 +21,7 @@
 
 <property>
   <name>hdfsproxy.dfs.namenode.address</name>
-  <value></value>
+  <value>localhost:54321</value>
   <description>namenode address of the HDFS cluster being proxied
   </description>
 </property>

Added: hadoop/core/trunk/src/contrib/hdfsproxy/conf/ssl-server.xml
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/conf/ssl-server.xml?rev=748770&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/conf/ssl-server.xml (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/conf/ssl-server.xml Sat Feb 28 01:58:20 2009
@@ -0,0 +1,48 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<configuration>
+
+<property>
+  <name>ssl.server.truststore.location</name>
+  <value>${javax.net.ssl.keyStore.proxy}</value>
+</property>
+
+<property>
+  <name>ssl.server.truststore.password</name>
+  <value>changeme</value>
+</property>
+
+<property>
+  <name>ssl.server.keystore.location</name>
+  <value>${javax.net.ssl.keyStore.proxy}</value>
+</property>
+
+<property>
+  <name>ssl.server.keystore.password</name>
+  <value>changeme</value>
+</property>
+
+<property>
+  <name>ssl.server.keystore.keypassword</name>
+  <value>changeme</value>
+</property>
+
+</configuration>

Added: hadoop/core/trunk/src/contrib/hdfsproxy/conf/tomcat-web.xml
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/conf/tomcat-web.xml?rev=748770&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/conf/tomcat-web.xml (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/conf/tomcat-web.xml Sat Feb 28 01:58:20 2009
@@ -0,0 +1,158 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+
+<!DOCTYPE web-app 
+    PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN" 
+    "http://java.sun.com/dtd/web-app_2_3.dtd">
+
+<web-app>
+
+
+    <!-- General description of your web application -->
+
+    <display-name>HDFS Proxy</display-name>
+    <description>
+      get data from grid 
+    </description>
+
+
+    <!-- Context initialization parameters that define shared
+         String constants used within your application, which
+         can be customized by the system administrator who is
+         installing your application.  The values actually
+         assigned to these parameters can be retrieved in a
+         servlet or JSP page by calling:
+
+             String value =
+               getServletContext().getInitParameter("name");
+
+         where "name" matches the <param-name> element of
+         one of these initialization parameters.
+
+         You can define any number of context initialization
+         parameters, including zero.
+    -->
+
+    <context-param>
+      <param-name>webmaster</param-name>
+      <param-value>zhiyong1@yahoo-inc.com</param-value>
+      <description>
+        The EMAIL address of the administrator to whom questions
+        and comments about this application should be addressed.
+      </description>
+    </context-param>
+    
+    <filter>
+	   	<filter-name>proxyFilter</filter-name>
+	   	<filter-class>org.apache.hadoop.hdfsproxy.ProxyFilter</filter-class>
+	   	<init-param>
+	      <param-name>filteraddress</param-name>
+	      <param-value>10</param-value>
+	   	</init-param>
+		</filter>
+
+		<filter-mapping>
+        <filter-name>proxyFilter</filter-name>
+				<url-pattern>/*</url-pattern>
+    </filter-mapping>
+    	
+
+
+    <!-- Servlet definitions for the servlets that make up
+         your web application, including initialization
+         parameters.  With Tomcat, you can also send requests
+         to servlets not listed here with a request like this:
+
+           http://localhost:8080/{context-path}/servlet/{classname}
+
+         but this usage is not guaranteed to be portable.  It also
+         makes relative references to images and other resources
+         required by your servlet more complicated, so defining
+         all of your servlets (and defining a mapping to them with
+         a servlet-mapping element) is recommended.
+
+         Servlet initialization parameters can be retrieved in a
+         servlet or JSP page by calling:
+
+             String value =
+               getServletConfig().getInitParameter("name");
+
+         where "name" matches the <param-name> element of
+         one of these initialization parameters.
+
+         You can define any number of servlets, including zero.
+    -->
+
+    
+    <servlet>
+    	<servlet-name>listPaths</servlet-name>
+      <description>list paths data access</description>
+      <servlet-class>org.apache.hadoop.hdfsproxy.ProxyListPathsServlet</servlet-class>
+    </servlet>
+    
+    <servlet-mapping>
+        <servlet-name>listPaths</servlet-name>
+        <url-pattern>/listPaths/*</url-pattern>
+    </servlet-mapping>
+
+		<servlet>
+    	<servlet-name>data</servlet-name>
+      <description>data access</description>
+      <servlet-class>org.apache.hadoop.hdfsproxy.ProxyFileDataServlet</servlet-class>
+    </servlet>
+    
+	  <servlet-mapping>
+        <servlet-name>data</servlet-name>
+        <url-pattern>/data/*</url-pattern>
+    </servlet-mapping>
+    
+    <servlet>
+    	<servlet-name>streamFile</servlet-name>
+      <description>stream file access</description>
+      <servlet-class>org.apache.hadoop.hdfsproxy.ProxyStreamFile</servlet-class>
+    </servlet>
+    
+    <servlet-mapping>
+        <servlet-name>streamFile</servlet-name>
+        <url-pattern>/streamFile/*</url-pattern>
+    </servlet-mapping>
+    
+
+		<welcome-file-list>
+		  <welcome-file>index.html</welcome-file>
+		</welcome-file-list>
+
+    <!-- Define the default session timeout for your application,
+         in minutes.  From a servlet or JSP page, you can modify
+         the timeout for a particular session dynamically by using
+         HttpSession.getMaxInactiveInterval(). -->
+
+    <session-config>
+      <session-timeout>30</session-timeout>    <!-- 30 minutes -->
+    </session-config>    
+
+
+</web-app>
+
+
+
+
+
+
+
+

Modified: hadoop/core/trunk/src/contrib/hdfsproxy/conf/user-certs.xml
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/conf/user-certs.xml?rev=748770&r1=748769&r2=748770&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/conf/user-certs.xml (original)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/conf/user-certs.xml Sat Feb 28 01:58:20 2009
@@ -6,21 +6,27 @@
 This file defines the mappings from username to comma seperated list
 of certificate serial numbers that the user is allowed to use. One mapping
 per user. Wildcard characters, such as "*" and "?", are not recognized. 
-Any leading or trailing whitespaces are stripped/ignored. Note that user
-"Admin" is the special hdfsproxy admin user. To make a user an admin, add 
-the user's certificate serial number to user "Admin". Normal users cannot 
-have "Admin" as username. Usernames can only comprise of 0-9a-zA-Z and
-underscore.
+Any leading or trailing whitespaces are stripped/ignored.
 
 -->
 
 <configuration>
+<property>
+  <name> nobody </name>
+  <value> ,6  ,,  3 , 9a2cf0be9ddf8280
+
+
+
+         </value>
+</property>
 
 <property>
-  <name>Admin</name>
-  <value></value>
-  <description> Special hdfsproxy admin user
-  </description>
+  <name> Admin </name>
+  <value>, 6,  ,,  3 , 9a2cf0be9ddf8280
+
+
+
+         </value>
 </property>
 
 </configuration>

Modified: hadoop/core/trunk/src/contrib/hdfsproxy/conf/user-permissions.xml
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/conf/user-permissions.xml?rev=748770&r1=748769&r2=748770&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/conf/user-permissions.xml (original)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/conf/user-permissions.xml Sat Feb 28 01:58:20 2009
@@ -3,26 +3,24 @@
 
 <!-- 
 
-This file defines the mappings from username to comma seperated list
-of directories/files that the user is allowed to use. One mapping
+This file defines the mappings from user name to comma seperated list
+of directories/files that the user is allowed to access. One mapping
 per user. Wildcard characters, such as "*" and "?", are not recognized. 
 For example, to match "/output" directory, one can use "/output" or 
-"/output/", but not "/output/*". Any leading or trailing whitespaces 
-in the name field are stripped/ignored, while only leading whitespaces 
-in the value field are. Note that the special hdfsproxy admin user "Admin"
-doesn't automatically have access to any files, unless explicitly 
-specified in this file. Usernames can only comprise of 0-9a-zA-Z and 
-underscore.
+"/output/", but not "/output/*". Note that any leading or trailing
+whitespaces are stripped/ignored for the name field.
 
 -->
 
 <configuration>
-
 <property>
-  <name></name>
-  <value></value>
-  <description>
-  </description>
+  <name> nobody </name>
+  <value> ,
+
+
+
+        /input, /user, /data </value>
 </property>
 
+
 </configuration>

Modified: hadoop/core/trunk/src/contrib/hdfsproxy/ivy.xml
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/ivy.xml?rev=748770&r1=748769&r2=748770&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/ivy.xml (original)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/ivy.xml Sat Feb 28 01:58:20 2009
@@ -22,18 +22,10 @@
     <artifact conf="master"/>
   </publications>
   <dependencies>
-    <dependency org="commons-logging"
-      name="commons-logging"
-      rev="${commons-logging.version}"
-      conf="common->default"/>
     <dependency org="log4j"
       name="log4j"
       rev="${log4j.version}"
       conf="common->master"/>
-    <dependency org="org.mortbay.jetty"
-      name="servlet-api-2.5"
-      rev="${servlet-api-2.5.version}"
-      conf="common->default"/>
     <dependency org="commons-logging"
       name="commons-logging"
       rev="${commons-logging.version}"
@@ -41,15 +33,15 @@
     <dependency org="commons-logging"
       name="commons-logging-api"
       rev="${commons-logging-api.version}"
-      conf="common->default"/>
+      conf="common->master"/>
     <dependency org="junit"
       name="junit"
       rev="${junit.version}"
-      conf="common->default"/>
+      conf="common->master"/>
     <dependency org="org.slf4j"
       name="slf4j-api"
       rev="${slf4j-api.version}"
-      conf="common->default"/>
+      conf="common->master"/>
     <dependency org="org.slf4j"
       name="slf4j-log4j12"
       rev="${slf4j-log4j12.version}"
@@ -57,18 +49,42 @@
     <dependency org="xmlenc"
       name="xmlenc"
       rev="${xmlenc.version}"
-      conf="common->default"/>
+      conf="common->master"/>
     <dependency org="org.mortbay.jetty"
       name="jetty"
       rev="${jetty.version}"
-      conf="common->default"/>
+      conf="common->master"/>
+    <dependency org="org.mortbay.jetty"
+      name="jetty-util"
+      rev="${jetty-util.version}"
+      conf="common->master"/>
     <dependency org="org.mortbay.jetty"
       name="servlet-api-2.5"
       rev="${servlet-api-2.5.version}"
-      conf="common->default"/>
+      conf="common->master"/>
     <dependency org="org.eclipse.jdt"
       name="core"
       rev="${core.version}"
-      conf="common->default"/>
+      conf="common->master"/>
+    <dependency org="org.apache.cactus" name="cactus.core.framework.uberjar.javaEE.14" rev="${cactus.version}" conf="common->master"/>
+		<dependency org="org.apache.cactus" name="cactus.integration.ant" rev="${cactus.version}" conf="common->master"/>
+		<dependency org="org.apache.cactus" name="cactus.integration.shared.api" rev="${cactus.version}" conf="common->master"/>
+				
+		<dependency org="commons-httpclient" name="commons-httpclient" rev="3.1" conf="common->master"/>
+		<dependency org="commons-io" name="commons-io" rev="1.4" conf="common->master"/>
+		<dependency org="commons-lang" name="commons-lang" rev="2.3" conf="common->master"/>
+		<dependency org="commons-codec" name="commons-codec" rev="1.3" conf="common->master"/>
+		
+		<dependency org="aspectj" name="aspectjrt" rev="1.5.3" conf="common->master"/>
+		
+		<dependency org="org.codehaus.cargo" name="cargo-core-uberjar" rev="0.9" conf="common->master"/>
+		<dependency org="org.codehaus.cargo" name="cargo-ant" rev="0.9" conf="common->master"/>
+
+		<dependency org="javax.servlet" name="jsp-api" rev="2.0" conf="common->master"/>
+		<dependency org="javax.servlet" name="servlet-api" rev="2.5" conf="common->master"/>
+		<dependency org="javax.servlet" name="jstl" rev="1.1.2" conf="common->master"/>
+		<dependency org="taglibs" name="standard" rev="1.1.2" conf="common->master"/>
+	  
+		<dependency org="junitperf" name="junitperf" rev="1.8" conf="common->master"/>
   </dependencies>
 </ivy-module>

Modified: hadoop/core/trunk/src/contrib/hdfsproxy/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/ivy/libraries.properties?rev=748770&r1=748769&r2=748770&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/ivy/libraries.properties (original)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/ivy/libraries.properties Sat Feb 28 01:58:20 2009
@@ -1,5 +1,18 @@
 #This properties file lists the versions of the various artifacts used by hadoop.
 #It drives ivy and the generation of a maven POM
 #These are the versions of our dependencies (in alphabetical order)
+ivy.version=2.0.0-rc2
+
 log4j.version=1.2.15
 slf4j-api.version=1.4.3
+slf4j-log4j12.version=1.4.3
+jetty.version=6.1.14
+jetty-util.version=6.1.14
+servlet-api-2.5.version=6.1.14
+cactus.version=1.8.0
+commons-logging.version=1.1
+commons-logging-api.version=1.0.4
+junit.version=3.8.2
+jsp.version=2.1
+core.version=3.1.1
+xmlenc.version=0.52
\ No newline at end of file

Modified: hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/HdfsProxy.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/HdfsProxy.java?rev=748770&r1=748769&r2=748770&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/HdfsProxy.java (original)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/HdfsProxy.java Sat Feb 28 01:58:20 2009
@@ -23,18 +23,11 @@
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URL;
-import java.util.Set;
-
-import javax.net.ssl.HttpsURLConnection;
-import javax.net.ssl.HostnameVerifier;
-import javax.net.ssl.SSLSession;
-import javax.servlet.http.HttpServletResponse;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.util.HostsFileReader;
 import org.apache.hadoop.util.StringUtils;
 
 /**
@@ -123,87 +116,6 @@
     }
   }
   
-  private static enum StartupOption {
-    RELOAD("-reloadPermFiles"), CLEAR("-clearUgiCache"), REGULAR("-regular");
-
-    private String name = null;
-
-    private StartupOption(String arg) {
-      this.name = arg;
-    }
-
-    public String getName() {
-      return name;
-    }
-  }
-
-  private static void printUsage() {
-    System.err.println("Usage: hdfsproxy ["
-        + StartupOption.RELOAD.getName() + "] | ["
-        + StartupOption.CLEAR.getName() + "]");
-  }
-
-  private static StartupOption parseArguments(String args[]) {
-    int argsLen = (args == null) ? 0 : args.length;
-    StartupOption startOpt = StartupOption.REGULAR;
-    for (int i = 0; i < argsLen; i++) {
-      String cmd = args[i];
-      if (StartupOption.RELOAD.getName().equalsIgnoreCase(cmd)) {
-        startOpt = StartupOption.RELOAD;
-      } else if (StartupOption.CLEAR.getName().equalsIgnoreCase(cmd)) {
-        startOpt = StartupOption.CLEAR;
-      } else if (StartupOption.REGULAR.getName().equalsIgnoreCase(cmd)) {
-        startOpt = StartupOption.REGULAR;
-      } else
-        return null;
-    }
-    return startOpt;
-  }
-
-  /**
-   * Dummy hostname verifier that is used to bypass hostname checking
-   */
-  private static class DummyHostnameVerifier implements HostnameVerifier {
-    public boolean verify(String hostname, SSLSession session) {
-      return true;
-    }
-  }
-
-  private static HttpsURLConnection openConnection(String hostname, int port,
-      String path) throws IOException {
-    try {
-      final URL url = new URI("https", null, hostname, port, path, null, null)
-          .toURL();
-      HttpsURLConnection conn = (HttpsURLConnection) url.openConnection();
-      // bypass hostname verification
-      conn.setHostnameVerifier(new DummyHostnameVerifier());
-      conn.setRequestMethod("GET");
-      return conn;
-    } catch (URISyntaxException e) {
-      throw (IOException) new IOException().initCause(e);
-    }
-  }
-
-  private static void setupSslProps(Configuration conf) {
-    Configuration sslConf = new Configuration(false);
-    sslConf.addResource(conf.get("hdfsproxy.https.server.keystore.resource",
-        "ssl-server.xml"));
-    System.setProperty("javax.net.ssl.trustStore", sslConf
-        .get("ssl.server.truststore.location"));
-    System.setProperty("javax.net.ssl.trustStorePassword", sslConf.get(
-        "ssl.server.truststore.password", ""));
-    System.setProperty("javax.net.ssl.trustStoreType", sslConf.get(
-        "ssl.server.truststore.type", "jks"));
-    System.setProperty("javax.net.ssl.keyStore", sslConf
-        .get("ssl.server.keystore.location"));
-    System.setProperty("javax.net.ssl.keyStorePassword", sslConf.get(
-        "ssl.server.keystore.password", ""));
-    System.setProperty("javax.net.ssl.keyPassword", sslConf.get(
-        "ssl.server.keystore.keypassword", ""));
-    System.setProperty("javax.net.ssl.keyStoreType", sslConf.get(
-        "ssl.server.keystore.type", "jks"));
-  }
-
   static InetSocketAddress getSslAddr(Configuration conf) throws IOException {
     String addr = conf.get("hdfsproxy.https.address");
     if (addr == null)
@@ -211,66 +123,21 @@
     return NetUtils.createSocketAddr(addr);
   }
 
-  private static boolean sendCommand(Configuration conf, String path)
-      throws IOException {
-    setupSslProps(conf);
-    int sslPort = getSslAddr(conf).getPort();
-    int err = 0;
-    StringBuilder b = new StringBuilder();
-    HostsFileReader hostsReader = new HostsFileReader(conf.get("hdfsproxy.hosts",
-        "hdfsproxy-hosts"), "");
-    Set<String> hostsList = hostsReader.getHosts();
-    for (String hostname : hostsList) {
-      HttpsURLConnection connection = null;
-      try {
-        connection = openConnection(hostname, sslPort, path);
-        connection.connect();
-        if (connection.getResponseCode() != HttpServletResponse.SC_OK) {
-          b.append("\n\t" + hostname + ": " + connection.getResponseCode()
-              + " " + connection.getResponseMessage());
-          err++;
-        }
-      } catch (IOException e) {
-        b.append("\n\t" + hostname + ": " + e.getLocalizedMessage());
-        err++;
-      } finally {
-        if (connection != null)
-          connection.disconnect();
-      }
-    }
-    if (err > 0) {
-      System.err.print("Command failed on the following "
-          + err + " host" + (err==1?":":"s:") + b.toString() + "\n");
-      return true;
-    }
-    return false;
-  }
+ 
 
   public static HdfsProxy createHdfsProxy(String argv[], Configuration conf)
       throws IOException {
+    if (argv.length > 0) {
+      System.err.println("Usage: HdfsProxy");
+      return null;
+    }
     if (conf == null) {
       conf = new Configuration(false);
       conf.addResource("hdfsproxy-default.xml");
     }
-    StartupOption startOpt = parseArguments(argv);
-    if (startOpt == null) {
-      printUsage();
-      return null;
-    }
-
-    switch (startOpt) {
-    case RELOAD:
-      boolean error = sendCommand(conf, "/reloadPermFiles");
-      System.exit(error ? 1 : 0);
-    case CLEAR:
-      error = sendCommand(conf, "/clearUgiCache");
-      System.exit(error ? 1 : 0);
-    default:
-    }
-
+   
     StringUtils.startupShutdownMessage(HdfsProxy.class, argv, LOG);
     HdfsProxy proxy = new HdfsProxy(conf);
-    //proxy.addSslListener(conf);
     proxy.start();
     return proxy;
   }

Modified: hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java?rev=748770&r1=748769&r2=748770&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java (original)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java Sat Feb 28 01:58:20 2009
@@ -17,8 +17,11 @@
  */
 package org.apache.hadoop.hdfsproxy;
 
+import java.io.FileInputStream;
 import java.io.IOException;
+import java.io.InputStream;
 import java.math.BigInteger;
+import java.security.cert.CertificateFactory;
 import java.security.cert.X509Certificate;
 import java.security.cert.CertificateExpiredException;
 import java.security.cert.CertificateNotYetValidException;
@@ -28,6 +31,7 @@
 import java.util.Map;
 import java.util.Set;
 import java.util.regex.Pattern;
+import java.net.InetSocketAddress;
 
 import javax.servlet.Filter;
 import javax.servlet.FilterChain;
@@ -37,12 +41,14 @@
 import javax.servlet.ServletException;
 import javax.servlet.ServletRequest;
 import javax.servlet.ServletResponse;
+import javax.servlet.ServletContext;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.net.NetUtils;
 
 public class ProxyFilter implements Filter {
   public static final Log LOG = LogFactory.getLog(ProxyFilter.class);
@@ -74,9 +80,23 @@
     Map<String, Set<BigInteger>> cMap = getCertsMap(conf);
     certsMap = cMap != null ? cMap : new HashMap<String, Set<BigInteger>>();
   }
+  
 
   /** {@inheritDoc} */
   public void init(FilterConfig filterConfig) throws ServletException {
+    ServletContext context = filterConfig.getServletContext();
+    Configuration conf = new Configuration(false);
+    conf.addResource("hdfsproxy-default.xml");
+    conf.addResource("ssl-server.xml");
+    String nn = conf.get("hdfsproxy.dfs.namenode.address");
+    if (nn == null) {
+      throw new ServletException("Proxy source cluster name node address not speficied");
+    }
+    InetSocketAddress nAddr = NetUtils.createSocketAddr(nn);
+    context.setAttribute("name.node.address", nAddr);
+    context.setAttribute("name.conf", new Configuration());
+           
+    LOG.info("proxyFilter initialization success: " + nn);
   }
 
   private static Map<String, Set<Path>> getPermMap(Configuration conf) {
@@ -136,6 +156,8 @@
   /** {@inheritDoc} */
   public void destroy() {
   }
+  
+  
 
   /** {@inheritDoc} */
   public void doFilter(ServletRequest request, ServletResponse response,
@@ -177,15 +199,33 @@
 
       LOG.debug(b.toString());
     }
-
-    if (rqst.getScheme().equalsIgnoreCase("https")) {
+    
+    boolean unitTest = false;
+    if (rqst.getScheme().equalsIgnoreCase("http") && rqst.getParameter("UnitTest") != null) unitTest = true;
+    
+    if (rqst.getScheme().equalsIgnoreCase("https") || unitTest) {
       boolean isAuthorized = false;
-      X509Certificate[] certs = (X509Certificate[]) rqst
-          .getAttribute("javax.servlet.request.X509Certificate");
+      X509Certificate[] certs = (X509Certificate[]) rqst.getAttribute("javax.servlet.request.X509Certificate");
+      
+      if (unitTest) {
+        try {
+          LOG.debug("==> Entering https unit test");
+          String SslPath = rqst.getParameter("SslPath");
+          InputStream inStream = new FileInputStream(SslPath);
+          CertificateFactory cf = CertificateFactory.getInstance("X.509");
+          X509Certificate cert = (X509Certificate)cf.generateCertificate(inStream);
+          inStream.close();          
+          certs = new X509Certificate[] {cert};
+        } catch (Exception e) {
+          // do nothing here
+        }
+      } 
+      
       if (certs == null || certs.length == 0) {
         rsp.sendError(HttpServletResponse.SC_BAD_REQUEST,
-            "No client SSL certificate received");
-        return;
+          "No client SSL certificate received");
+        LOG.info("No Client SSL certificate received");
+        return;       
       }
       for (X509Certificate cert : certs) {
         try {
@@ -205,7 +245,7 @@
           return;
         }
       }
-
+      
       String[] tokens = certs[0].getSubjectX500Principal().getName().split(
           "\\s*,\\s*");
       String userID = null;
@@ -222,8 +262,10 @@
         return;
       }
       userID = userID.substring(3);
-
+      
       String servletPath = rqst.getServletPath();
+      if (unitTest) servletPath = rqst.getParameter("TestSevletPathInfo");
+      
       if (HFTP_PATTERN.matcher(servletPath).matches()) {
         // request is an HSFTP request
         if (FILEPATH_PATTERN.matcher(servletPath).matches()) {
@@ -258,12 +300,13 @@
         LOG.info("Ugi cache cleared");
         rsp.setStatus(HttpServletResponse.SC_OK);
         return;
-      }
+      } 
 
       if (!isAuthorized) {
         rsp.sendError(HttpServletResponse.SC_FORBIDDEN, "Unauthorized access");
         return;
       }
+      
       // request is authorized, set ugi for servlets
       UnixUserGroupInformation ugi = ProxyUgiManager
           .getUgiForUser(userID);
@@ -274,12 +317,13 @@
         return;
       }
       rqst.setAttribute("authorized.ugi", ugi);
-    } else { // http request, set ugi for servlets, only for testing purposes
+    } else if(rqst.getScheme().equalsIgnoreCase("http")) { // http request, set ugi for servlets, only for testing purposes
       String ugi = rqst.getParameter("ugi");
-      rqst.setAttribute("authorized.ugi", new UnixUserGroupInformation(ugi
+      if (ugi != null) {
+        rqst.setAttribute("authorized.ugi", new UnixUserGroupInformation(ugi
           .split(",")));
+      } 
     }
-
     chain.doFilter(request, response);
   }
 
@@ -314,7 +358,7 @@
       LOG.info("Can't get file path from HTTPS request; user is " + userID);
       return false;
     }
-
+    
     Path userPath = new Path(pathInfo);
     while (userPath != null) {
       if (LOG.isDebugEnabled()) {

Added: hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java?rev=748770&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java Sat Feb 28 01:58:20 2009
@@ -0,0 +1,189 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfsproxy;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.security.cert.X509Certificate;
+import java.util.Set;
+
+import javax.net.ssl.HttpsURLConnection;
+import javax.net.ssl.HostnameVerifier;
+import javax.net.ssl.SSLSession;
+import javax.net.ssl.SSLSocketFactory;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.util.HostsFileReader;
+
+
+/**
+ * Proxy Utility .
+ */
+public class ProxyUtil {
+  public static final Log LOG = LogFactory.getLog(ProxyUtil.class);
+  
+  private static enum UtilityOption {
+    RELOAD("-reloadPermFiles"), CLEAR("-clearUgiCache");
+
+    private String name = null;
+
+    private UtilityOption(String arg) {
+      this.name = arg;
+    }
+
+    public String getName() {
+      return name;
+    }
+  }
+  
+  /**
+   * Dummy hostname verifier that is used to bypass hostname checking
+   */
+  private static class DummyHostnameVerifier implements HostnameVerifier {
+    public boolean verify(String hostname, SSLSession session) {
+      return true;
+    }
+  }
+
+  private static HttpsURLConnection openConnection(String hostname, int port,
+      String path) throws IOException {
+    try {
+      final URL url = new URI("https", null, hostname, port, path, null, null)
+          .toURL();
+      HttpsURLConnection conn = (HttpsURLConnection) url.openConnection();
+      // bypass hostname verification
+      conn.setHostnameVerifier(new DummyHostnameVerifier());
+      conn.setRequestMethod("GET");
+      return conn;
+    } catch (URISyntaxException e) {
+      throw (IOException) new IOException().initCause(e);
+    }
+  }
+
+  private static void setupSslProps(Configuration conf) {
+    System.setProperty("javax.net.ssl.trustStore", conf
+        .get("ssl.client.truststore.location"));
+    System.setProperty("javax.net.ssl.trustStorePassword", conf.get(
+        "ssl.client.truststore.password", ""));
+    System.setProperty("javax.net.ssl.trustStoreType", conf.get(
+        "ssl.client.truststore.type", "jks"));
+    System.setProperty("javax.net.ssl.keyStore", conf
+        .get("ssl.client.keystore.location"));
+    System.setProperty("javax.net.ssl.keyStorePassword", conf.get(
+        "ssl.client.keystore.password", ""));
+    System.setProperty("javax.net.ssl.keyPassword", conf.get(
+        "ssl.client.keystore.keypassword", ""));
+    System.setProperty("javax.net.ssl.keyStoreType", conf.get(
+        "ssl.client.keystore.type", "jks"));
+  }
+
+  static InetSocketAddress getSslAddr(Configuration conf) throws IOException {
+    String addr = conf.get("hdfsproxy.https.address");
+    if (addr == null)
+      throw new IOException("HdfsProxy address is not specified");
+    return NetUtils.createSocketAddr(addr);
+  }
+
+  static boolean sendCommand(Configuration conf, String path)
+      throws IOException {
+    setupSslProps(conf);
+    int sslPort = getSslAddr(conf).getPort();
+    int err = 0;
+    StringBuilder b = new StringBuilder();
+
+    HostsFileReader hostsReader = new HostsFileReader(conf.get("hdfsproxy.hosts",
+        "hdfsproxy-hosts"), "");
+    Set<String> hostsList = hostsReader.getHosts();
+    for (String hostname : hostsList) {
+      HttpsURLConnection connection = null;
+      try {
+        connection = openConnection(hostname, sslPort, path);  
+        connection.connect(); 
+        if (LOG.isDebugEnabled()) {
+          StringBuffer sb = new StringBuffer();
+          X509Certificate[] clientCerts = (X509Certificate[]) connection.getLocalCertificates();
+          if (clientCerts != null) {
+            for (X509Certificate cert : clientCerts)
+              sb.append("\n Client certificate Subject Name is "
+                  + cert.getSubjectX500Principal().getName());
+          } else {
+            sb.append("\n No Client certs was found");  
+          }
+          X509Certificate[] serverCerts = (X509Certificate[]) connection.getServerCertificates();
+          if (serverCerts != null) {
+            for (X509Certificate cert : serverCerts)
+              sb.append("\n Server certificate Subject Name is "
+                  + cert.getSubjectX500Principal().getName());
+          } else {
+            sb.append("\n No Server certs was found");  
+          }
+          LOG.debug(sb.toString());
+        }
+        if (connection.getResponseCode() != HttpServletResponse.SC_OK) {
+          b.append("\n\t" + hostname + ": " + connection.getResponseCode()
+              + " " + connection.getResponseMessage());
+          err++;
+        }
+      } catch (IOException e) {
+        b.append("\n\t" + hostname + ": " + e.getLocalizedMessage());
+        if (LOG.isDebugEnabled()) e.printStackTrace();
+        err++;
+      } finally {
+        if (connection != null)
+          connection.disconnect();
+      }
+    }
+    if (err > 0) {
+      System.err.print("Command failed on the following "
+          + err + " host" + (err==1?":":"s:") + b.toString() + "\n");
+      return false;
+    }
+    return true;
+  }
+
+  public static void main(String[] args) throws Exception {
+    if(args.length != 1 || 
+        (!UtilityOption.RELOAD.getName().equalsIgnoreCase(args[0]) 
+            && !UtilityOption.CLEAR.getName().equalsIgnoreCase(args[0]))) {
+      System.err.println("Usage: ProxyUtil ["
+          + UtilityOption.RELOAD.getName() + "] | ["
+          + UtilityOption.CLEAR.getName() + "]");
+      System.exit(0);      
+    }
+    Configuration conf = new Configuration(false);   
+    conf.addResource("ssl-client.xml");
+    conf.addResource("hdfsproxy-default.xml");
+     
+    if (UtilityOption.RELOAD.getName().equalsIgnoreCase(args[0])) {
+      // reload user-certs.xml and user-permissions.xml files
+      boolean error = sendCommand(conf, "/reloadPermFiles");
+    } else {
+      // clear UGI caches
+      boolean error = sendCommand(conf, "/clearUgiCache");
+    }
+  }
+        
+}

Added: hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyFilter.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyFilter.java?rev=748770&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyFilter.java (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyFilter.java Sat Feb 28 01:58:20 2009
@@ -0,0 +1,120 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfsproxy;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.ServletContext;
+
+import org.apache.cactus.FilterTestCase;
+import org.apache.cactus.WebRequest;
+import org.apache.cactus.WebResponse;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+
+public class TestProxyFilter extends FilterTestCase {
+  
+  public static final Log LOG = LogFactory.getLog(TestProxyFilter.class);
+  
+  private static String TEST_CLIENT_SSL_CERT = System.getProperty("javax.net.ssl.clientCert", 
+  "./src/test/resources/ssl-keys/test.crt");
+  
+  private class DummyFilterChain implements FilterChain {
+    public void doFilter(ServletRequest theRequest, ServletResponse theResponse) 
+      throws IOException, ServletException  {
+      PrintWriter writer = theResponse.getWriter();
+  
+      writer.print("<p>some content</p>");
+      writer.close();
+    }
+  
+    public void init(FilterConfig theConfig) {
+    }
+  
+    public void destroy() {
+    }
+  }
+   
+  public void beginDoFilterHttp(WebRequest theRequest) {
+    theRequest.addParameter("ugi", "nobody,test");
+  }  
+  
+  public void testDoFilterHttp() throws ServletException, IOException  {    
+    ProxyFilter filter = new ProxyFilter();
+    
+    ServletContext context = config.getServletContext();
+    context.removeAttribute("name.node.address");
+    context.removeAttribute("name.conf");
+    assertNull(context.getAttribute("name.node.address"));
+    assertNull(context.getAttribute("name.conf"));
+    
+    filter.init(config);
+    
+    assertNotNull(context.getAttribute("name.node.address"));
+    assertNotNull(context.getAttribute("name.conf"));
+    
+    request.removeAttribute("authorized.ugi");
+    assertNull(request.getAttribute("authorized.ugi"));
+        
+    FilterChain mockFilterChain = new DummyFilterChain();
+    filter.doFilter(request, response, mockFilterChain);    
+    assertEquals(request.getAttribute("authorized.ugi").toString(), "nobody,test");
+    
+  }
+
+  public void endDoFilterHttp(WebResponse theResponse)  {
+    assertEquals("<p>some content</p>", theResponse.getText());    
+  }
+  
+  public void beginDoFilterHttps(WebRequest theRequest) throws Exception{
+    theRequest.addParameter("UnitTest", "true");
+    theRequest.addParameter("SslPath", TEST_CLIENT_SSL_CERT);
+    theRequest.addParameter("ugi", "nobody,test");    
+    theRequest.addParameter("TestSevletPathInfo", "/streamFile");
+    theRequest.addParameter("filename", "/user");
+  }  
+  
+  public void testDoFilterHttps() throws Exception  {    
+    ProxyFilter filter = new ProxyFilter();
+    
+    request.removeAttribute("authorized.ugi");
+    assertNull(request.getAttribute("authorized.ugi"));        
+    
+    FilterChain mockFilterChain = new DummyFilterChain();
+    filter.init(config);
+    filter.doFilter(request, response, mockFilterChain);
+    
+    LOG.info("Finish setting up X509Certificate");  
+    assertEquals(request.getAttribute("authorized.ugi").toString().substring(0, 6), "nobody");
+    
+  }
+
+  public void endDoFilterHttps(WebResponse theResponse)  {
+    assertEquals("<p>some content</p>", theResponse.getText());    
+  }
+  
+    
+}
+

Added: hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyUtil.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyUtil.java?rev=748770&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyUtil.java (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyUtil.java Sat Feb 28 01:58:20 2009
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfsproxy;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.conf.Configuration;
+
+/** Unit tests for ProxyUtil */
+public class TestProxyUtil extends TestCase {
+  
+  private static String TEST_PROXY_CONF_DIR = System.getProperty("test.proxy.conf.dir", "./conf");
+
+  public void testSendCommand() throws Exception {
+      
+    Configuration conf = new Configuration(false);  
+    conf.addResource("ssl-client.xml");
+    conf.addResource("hdfsproxy-default.xml");
+    conf.set("hdfsproxy.https.address", "localhost:8443");
+    String hostFname = TEST_PROXY_CONF_DIR + "/hdfsproxy-hosts";
+    conf.set("hdfsproxy.hosts", hostFname);    
+    
+    assertTrue(ProxyUtil.sendCommand(conf, "/test/reloadPermFiles"));
+    assertTrue(ProxyUtil.sendCommand(conf, "/test/clearUgiCache"));    
+    
+    conf.set("hdfsproxy.https.address", "localhost:0");
+    assertFalse(ProxyUtil.sendCommand(conf, "/test/reloadPermFiles"));
+    assertFalse(ProxyUtil.sendCommand(conf, "/test/reloadPermFiles"));
+  }
+ 
+}

Added: hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/cactus-web.xml
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/cactus-web.xml?rev=748770&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/cactus-web.xml (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/cactus-web.xml Sat Feb 28 01:58:20 2009
@@ -0,0 +1,66 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!DOCTYPE web-app
+    PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN"
+    "http://java.sun.com/dtd/web-app_2_3.dtd">
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<web-app>
+
+    <context-param>
+      <param-name>param</param-name>
+      <param-value>value used for testing</param-value>
+    </context-param>
+    
+    <servlet>
+        <servlet-name>ServletRedirector</servlet-name>
+        <servlet-class>org.apache.cactus.server.ServletTestRedirector</servlet-class>
+        <init-param>
+          <param-name>param1</param-name>
+          <param-value>value1 used for testing</param-value>
+        </init-param>
+    </servlet>
+    
+    <servlet>
+        <servlet-name>ServletRedirector_TestOverride</servlet-name>
+        <servlet-class>org.apache.cactus.server.ServletTestRedirector</servlet-class>
+        <init-param>
+          <param-name>param2</param-name>
+          <param-value>value2 used for testing</param-value>
+        </init-param>
+    </servlet>
+
+    <servlet>
+        <servlet-name>TestJsp</servlet-name>
+        <jsp-file>/test/test.jsp</jsp-file>
+    </servlet>
+
+    <servlet>
+        <servlet-name>JspRedirector</servlet-name>
+        <jsp-file>/jspRedirector.jsp</jsp-file>
+        <init-param>
+          <param-name>param1</param-name>
+          <param-value>value1 used for testing</param-value>
+        </init-param>
+    </servlet>
+
+    <servlet-mapping>
+        <servlet-name>ServletRedirector_TestOverride</servlet-name>
+        <url-pattern>/ServletRedirectorOverride</url-pattern>
+    </servlet-mapping>
+
+</web-app>

Added: hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-default.xml
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-default.xml?rev=748770&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-default.xml (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-default.xml Sat Feb 28 01:58:20 2009
@@ -0,0 +1,74 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<!-- Put hdfsproxy specific properties in this file. -->
+
+<configuration>
+
+<property>
+  <name>hdfsproxy.https.address</name>
+  <value>0.0.0.0:8443</value>
+  <description>the SSL port that hdfsproxy listens on
+  </description>
+</property>
+
+<property>
+  <name>hdfsproxy.hosts</name>
+  <value>hdfsproxy-hosts</value>
+  <description>location of hdfsproxy-hosts file
+  </description>
+</property>
+
+<property>
+  <name>hdfsproxy.dfs.namenode.address</name>
+  <value>localhost:54321</value>
+  <description>namenode address of the HDFS cluster being proxied
+  </description>
+</property>
+
+<property>
+  <name>hdfsproxy.https.server.keystore.resource</name>
+  <value>ssl-server.xml</value>
+  <description>location of the resource from which ssl server keystore
+  information will be extracted
+  </description>
+</property>
+
+<property>
+  <name>hdfsproxy.user.permissions.file.location</name>
+  <value>user-permissions.xml</value>
+  <description>location of the user permissions file
+  </description>
+</property>
+
+<property>
+  <name>hdfsproxy.user.certs.file.location</name>
+  <value>user-certs.xml</value>
+  <description>location of the user certs file
+  </description>
+</property>
+
+<property>
+  <name>hdfsproxy.ugi.cache.ugi.lifetime</name>
+  <value>15</value>
+  <description> The lifetime (in minutes) of a cached ugi
+  </description>
+</property>
+
+</configuration>
+

Added: hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-hosts
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-hosts?rev=748770&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-hosts (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-hosts Sat Feb 28 01:58:20 2009
@@ -0,0 +1 @@
+localhost

Added: hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/log4j.properties?rev=748770&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/log4j.properties (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/log4j.properties Sat Feb 28 01:58:20 2009
@@ -0,0 +1,76 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Define some default values that can be overridden by system properties
+hdfsproxy.root.logger=DEBUG,console
+hdfsproxy.log.dir=.
+hdfsproxy.log.file=hdfsproxy.log
+
+# Define the root logger to the system property "hdfsproxy.root.logger".
+log4j.rootLogger=${hdfsproxy.root.logger}
+
+# Logging Threshold
+log4j.threshhold=ALL
+
+#
+# Daily Rolling File Appender
+#
+
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.DRFA.File=${hdfsproxy.log.dir}/${hdfsproxy.log.file}
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
+
+# 30-day backup
+#log4j.appender.DRFA.MaxBackupIndex=30
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this 
+#
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+
+#
+# Rolling File Appender
+#
+
+#log4j.appender.RFA=org.apache.log4j.RollingFileAppender
+#log4j.appender.RFA.File=${hdfsproxy.log.dir}/${hdfsproxy.log.file}
+
+# Logfile size and and 30-day backups
+#log4j.appender.RFA.MaxFileSize=1MB
+#log4j.appender.RFA.MaxBackupIndex=30
+
+#log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
+#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
+#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+# Custom Logging levels
+
+log4j.logger.org.apache.hadoop.hdfsproxy.HttpsProxy=DEBUG
+log4j.logger.org.apache.hadoop.hdfsproxy.ProxyFilter=DEBUG
+log4j.logger.org.apache.hadoop.hdfsproxy.HdfsProxy=DEBUG



Mime
View raw message