Return-Path: Delivered-To: apmail-hadoop-core-commits-archive@www.apache.org Received: (qmail 69395 invoked from network); 25 Aug 2008 23:40:10 -0000 Received: from hermes.apache.org (HELO mail.apache.org) (140.211.11.2) by minotaur.apache.org with SMTP; 25 Aug 2008 23:40:10 -0000 Received: (qmail 9613 invoked by uid 500); 25 Aug 2008 23:40:08 -0000 Delivered-To: apmail-hadoop-core-commits-archive@hadoop.apache.org Received: (qmail 9594 invoked by uid 500); 25 Aug 2008 23:40:08 -0000 Mailing-List: contact core-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: core-dev@hadoop.apache.org Delivered-To: mailing list core-commits@hadoop.apache.org Received: (qmail 9585 invoked by uid 99); 25 Aug 2008 23:40:08 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 25 Aug 2008 16:40:08 -0700 X-ASF-Spam-Status: No, hits=-2000.0 required=10.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 25 Aug 2008 23:39:19 +0000 Received: by eris.apache.org (Postfix, from userid 65534) id BB9EF238899C; Mon, 25 Aug 2008 16:39:49 -0700 (PDT) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r688923 - in /hadoop/core/trunk/src: core/org/apache/hadoop/http/FilterContainer.java core/org/apache/hadoop/http/FilterInitializer.java test/org/apache/hadoop/http/ test/org/apache/hadoop/http/TestServletFilter.java Date: Mon, 25 Aug 2008 23:39:49 -0000 To: core-commits@hadoop.apache.org From: omalley@apache.org X-Mailer: svnmailer-1.0.8 Message-Id: <20080825233949.BB9EF238899C@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: omalley Date: Mon Aug 25 16:39:49 2008 New Revision: 688923 URL: http://svn.apache.org/viewvc?rev=688923&view=rev Log: HADOOP-3854. Add some files that I missed. Added: hadoop/core/trunk/src/core/org/apache/hadoop/http/FilterContainer.java hadoop/core/trunk/src/core/org/apache/hadoop/http/FilterInitializer.java hadoop/core/trunk/src/test/org/apache/hadoop/http/ hadoop/core/trunk/src/test/org/apache/hadoop/http/TestServletFilter.java Added: hadoop/core/trunk/src/core/org/apache/hadoop/http/FilterContainer.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/http/FilterContainer.java?rev=688923&view=auto ============================================================================== --- hadoop/core/trunk/src/core/org/apache/hadoop/http/FilterContainer.java (added) +++ hadoop/core/trunk/src/core/org/apache/hadoop/http/FilterContainer.java Mon Aug 25 16:39:49 2008 @@ -0,0 +1,33 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.http; + +import java.util.Map; + +/** + * A container class for javax.servlet.Filter. + */ +public interface FilterContainer { + /** + * Add a filter to the container. + * @param name Filter name + * @param classname Filter class name + * @param parameters a map from parameter names to initial values + */ + void addFilter(String name, String classname, Map parameters); +} Added: hadoop/core/trunk/src/core/org/apache/hadoop/http/FilterInitializer.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/http/FilterInitializer.java?rev=688923&view=auto ============================================================================== --- hadoop/core/trunk/src/core/org/apache/hadoop/http/FilterInitializer.java (added) +++ hadoop/core/trunk/src/core/org/apache/hadoop/http/FilterInitializer.java Mon Aug 25 16:39:49 2008 @@ -0,0 +1,29 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.http; + +/** + * Initialize a javax.servlet.Filter. + */ +public abstract class FilterInitializer { + /** + * Initialize a Filter to a FilterContainer. + * @param container The filter container + */ + abstract void initFilter(FilterContainer container); +} \ No newline at end of file Added: hadoop/core/trunk/src/test/org/apache/hadoop/http/TestServletFilter.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/http/TestServletFilter.java?rev=688923&view=auto ============================================================================== --- hadoop/core/trunk/src/test/org/apache/hadoop/http/TestServletFilter.java (added) +++ hadoop/core/trunk/src/test/org/apache/hadoop/http/TestServletFilter.java Mon Aug 25 16:39:49 2008 @@ -0,0 +1,148 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.http; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.net.URL; +import java.net.URLConnection; +import java.util.Map; +import java.util.Random; +import java.util.TreeMap; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; + +public class TestServletFilter extends junit.framework.TestCase { + static final Log LOG = LogFactory.getLog(HttpServer.class); + static final Map COUNTS = new TreeMap(); + + /** A very simple filter which count number of access for each uri */ + static public class CountingFilter implements Filter { + private FilterConfig filterConfig = null; + + public void init(FilterConfig filterConfig) { + this.filterConfig = filterConfig; + } + + public void destroy() { + this.filterConfig = null; + } + + public void doFilter(ServletRequest request, ServletResponse response, + FilterChain chain) throws IOException, ServletException { + if (filterConfig == null) + return; + + String uri = ((HttpServletRequest)request).getRequestURI(); + LOG.info("filtering " + uri); + Integer value = COUNTS.get(uri); + value = value == null? 1: value + 1; + COUNTS.put(uri, value); + chain.doFilter(request, response); + } + + /** Configuration for CountingFilter */ + static public class Initializer extends FilterInitializer { + public Initializer() {} + + void initFilter(FilterContainer container) { + container.addFilter("counting", CountingFilter.class.getName(), null); + } + } + } + + + /** access a url, ignoring some IOException such as the page does not exist */ + static void access(String urlstring) throws IOException { + LOG.warn("access " + urlstring); + URL url = new URL(urlstring); + URLConnection connection = url.openConnection(); + connection.connect(); + + try { + BufferedReader in = new BufferedReader(new InputStreamReader( + connection.getInputStream())); + try { + for(; in.readLine() != null; ); + } finally { + in.close(); + } + } catch(IOException ioe) { + LOG.warn("urlstring=" + urlstring, ioe); + } + } + + public void testServletFilter() throws Exception { + Configuration conf = new Configuration(); + + //start a http server with CountingFilter + conf.set(HttpServer.FILTER_INITIALIZER_PROPERTY, + CountingFilter.Initializer.class.getName()); + HttpServer http = new HttpServer("datanode", "localhost", 0, true, conf); + http.start(); + + final String fsckURL = "/fsck"; + final String stacksURL = "/stacks"; + final String ajspURL = "/a.jsp"; + final String[] urls = {fsckURL, stacksURL, ajspURL}; + final Random ran = new Random(); + final int[] sequence = new int[20]; + final int[] counts = new int[urls.length]; + + //generate a random sequence and update counts + for(int i = 0; i < sequence.length; i++) { + sequence[i] = ran.nextInt(urls.length); + counts[sequence[i]]++; + } + + //access the urls as the sequence + final String prefix = "http://localhost:" + http.getPort(); + try { + for(int i = 0; i < sequence.length; i++) { + access(prefix + urls[sequence[i]]); + } + } finally { + http.stop(); + } + + LOG.info("COUNTS = " + COUNTS); + //make sure fsck not get filtered + assertFalse(COUNTS.containsKey(fsckURL)); + + //verify other counts + for(int i = 1; i < urls.length; i++) { + if (counts[i] == 0) { + assertFalse(COUNTS.containsKey(urls[i])); + } else { + assertEquals(counts[i], COUNTS.remove(urls[i]).intValue()); + } + } + assertTrue(COUNTS.isEmpty()); + } +}