hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From t...@apache.org
Subject svn commit: r1212060 [7/8] - in /hadoop/common/trunk/hadoop-hdfs-project: ./ hadoop-hdfs-httpfs/ hadoop-hdfs-httpfs/src/ hadoop-hdfs-httpfs/src/main/ hadoop-hdfs-httpfs/src/main/conf/ hadoop-hdfs-httpfs/src/main/java/ hadoop-hdfs-httpfs/src/main/java/o...
Date Thu, 08 Dec 2011 19:25:33 GMT
Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/servlet/TestServerWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/servlet/TestServerWebApp.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/servlet/TestServerWebApp.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/servlet/TestServerWebApp.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.servlet;
+
+import junit.framework.Assert;
+import org.apache.hadoop.lib.server.Server;
+import org.apache.hadoop.test.HTestCase;
+import org.apache.hadoop.test.TestDir;
+import org.apache.hadoop.test.TestDirHelper;
+import org.junit.Test;
+
+public class TestServerWebApp extends HTestCase {
+
+  @Test(expected = IllegalArgumentException.class)
+  public void getHomeDirNotDef() {
+    ServerWebApp.getHomeDir("TestServerWebApp00");
+  }
+
+  @Test
+  public void getHomeDir() {
+    System.setProperty("TestServerWebApp0.home.dir", "/tmp");
+    Assert.assertEquals(ServerWebApp.getHomeDir("TestServerWebApp0"), "/tmp");
+    Assert.assertEquals(ServerWebApp.getDir("TestServerWebApp0", ".log.dir", "/tmp/log"), "/tmp/log");
+    System.setProperty("TestServerWebApp0.log.dir", "/tmplog");
+    Assert.assertEquals(ServerWebApp.getDir("TestServerWebApp0", ".log.dir", "/tmp/log"), "/tmplog");
+  }
+
+  @Test
+  @TestDir
+  public void lifecycle() throws Exception {
+    String dir = TestDirHelper.getTestDir().getAbsolutePath();
+    System.setProperty("TestServerWebApp1.home.dir", dir);
+    System.setProperty("TestServerWebApp1.config.dir", dir);
+    System.setProperty("TestServerWebApp1.log.dir", dir);
+    System.setProperty("TestServerWebApp1.temp.dir", dir);
+    ServerWebApp server = new ServerWebApp("TestServerWebApp1") {
+    };
+
+    Assert.assertEquals(server.getStatus(), Server.Status.UNDEF);
+    server.contextInitialized(null);
+    Assert.assertEquals(server.getStatus(), Server.Status.NORMAL);
+    server.contextDestroyed(null);
+    Assert.assertEquals(server.getStatus(), Server.Status.SHUTDOWN);
+  }
+
+  @Test(expected = RuntimeException.class)
+  @TestDir
+  public void failedInit() throws Exception {
+    String dir = TestDirHelper.getTestDir().getAbsolutePath();
+    System.setProperty("TestServerWebApp2.home.dir", dir);
+    System.setProperty("TestServerWebApp2.config.dir", dir);
+    System.setProperty("TestServerWebApp2.log.dir", dir);
+    System.setProperty("TestServerWebApp2.temp.dir", dir);
+    System.setProperty("testserverwebapp2.services", "FOO");
+    ServerWebApp server = new ServerWebApp("TestServerWebApp2") {
+    };
+
+    server.contextInitialized(null);
+  }
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/util/TestCheck.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/util/TestCheck.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/util/TestCheck.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/util/TestCheck.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,144 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.util;
+
+
+import junit.framework.Assert;
+import org.apache.hadoop.test.HTestCase;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+
+public class TestCheck extends HTestCase {
+
+  @Test
+  public void notNullNotNull() {
+    Assert.assertEquals(Check.notNull("value", "name"), "value");
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void notNullNull() {
+    Check.notNull(null, "name");
+  }
+
+  @Test
+  public void notNullElementsNotNull() {
+    Check.notNullElements(new ArrayList<String>(), "name");
+    Check.notNullElements(Arrays.asList("a"), "name");
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void notNullElementsNullList() {
+    Check.notNullElements(null, "name");
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void notNullElementsNullElements() {
+    Check.notNullElements(Arrays.asList("a", "", null), "name");
+  }
+
+  @Test
+  public void notEmptyElementsNotNull() {
+    Check.notEmptyElements(new ArrayList<String>(), "name");
+    Check.notEmptyElements(Arrays.asList("a"), "name");
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void notEmptyElementsNullList() {
+    Check.notEmptyElements(null, "name");
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void notEmptyElementsNullElements() {
+    Check.notEmptyElements(Arrays.asList("a", null), "name");
+  }
+
+
+  @Test(expected = IllegalArgumentException.class)
+  public void notEmptyElementsEmptyElements() {
+    Check.notEmptyElements(Arrays.asList("a", ""), "name");
+  }
+
+
+  @Test
+  public void notEmptyNotEmtpy() {
+    Assert.assertEquals(Check.notEmpty("value", "name"), "value");
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void notEmptyNull() {
+    Check.notEmpty(null, "name");
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void notEmptyEmpty() {
+    Check.notEmpty("", "name");
+  }
+
+  @Test
+  public void validIdentifierValid() throws Exception {
+    Assert.assertEquals(Check.validIdentifier("a", 1, ""), "a");
+    Assert.assertEquals(Check.validIdentifier("a1", 2, ""), "a1");
+    Assert.assertEquals(Check.validIdentifier("a_", 3, ""), "a_");
+    Assert.assertEquals(Check.validIdentifier("_", 1, ""), "_");
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void validIdentifierInvalid1() throws Exception {
+    Check.validIdentifier("!", 1, "");
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void validIdentifierInvalid2() throws Exception {
+    Check.validIdentifier("a1", 1, "");
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void validIdentifierInvalid3() throws Exception {
+    Check.validIdentifier("1", 1, "");
+  }
+
+  @Test
+  public void checkGTZeroGreater() {
+    Assert.assertEquals(Check.gt0(120, "test"), 120);
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void checkGTZeroZero() {
+    Check.gt0(0, "test");
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void checkGTZeroLessThanZero() {
+    Check.gt0(-1, "test");
+  }
+
+  @Test
+  public void checkGEZero() {
+    Assert.assertEquals(Check.ge0(120, "test"), 120);
+    Assert.assertEquals(Check.ge0(0, "test"), 0);
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void checkGELessThanZero() {
+    Check.ge0(-1, "test");
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/util/TestConfigurationUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/util/TestConfigurationUtils.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/util/TestConfigurationUtils.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/util/TestConfigurationUtils.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,125 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.util;
+
+import junit.framework.Assert;
+import org.apache.hadoop.conf.Configuration;
+import org.junit.Test;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+
+public class TestConfigurationUtils {
+
+  @Test
+  public void constructors() throws Exception {
+    Configuration conf = new Configuration(false);
+    Assert.assertEquals(conf.size(), 0);
+
+    byte[] bytes = "<configuration><property><name>a</name><value>A</value></property></configuration>".getBytes();
+    InputStream is = new ByteArrayInputStream(bytes);
+    conf = new Configuration(false);
+    ConfigurationUtils.load(conf, is);
+    Assert.assertEquals(conf.size(), 1);
+    Assert.assertEquals(conf.get("a"), "A");
+  }
+
+
+  @Test(expected = IOException.class)
+  public void constructorsFail3() throws Exception {
+    InputStream is = new ByteArrayInputStream("<xonfiguration></xonfiguration>".getBytes());
+    Configuration conf = new Configuration(false);
+    ConfigurationUtils.load(conf, is);
+  }
+
+  @Test
+  public void copy() throws Exception {
+    Configuration srcConf = new Configuration(false);
+    Configuration targetConf = new Configuration(false);
+
+    srcConf.set("testParameter1", "valueFromSource");
+    srcConf.set("testParameter2", "valueFromSource");
+
+    targetConf.set("testParameter2", "valueFromTarget");
+    targetConf.set("testParameter3", "valueFromTarget");
+
+    ConfigurationUtils.copy(srcConf, targetConf);
+
+    Assert.assertEquals("valueFromSource", targetConf.get("testParameter1"));
+    Assert.assertEquals("valueFromSource", targetConf.get("testParameter2"));
+    Assert.assertEquals("valueFromTarget", targetConf.get("testParameter3"));
+  }
+
+  @Test
+  public void injectDefaults() throws Exception {
+    Configuration srcConf = new Configuration(false);
+    Configuration targetConf = new Configuration(false);
+
+    srcConf.set("testParameter1", "valueFromSource");
+    srcConf.set("testParameter2", "valueFromSource");
+
+    targetConf.set("testParameter2", "originalValueFromTarget");
+    targetConf.set("testParameter3", "originalValueFromTarget");
+
+    ConfigurationUtils.injectDefaults(srcConf, targetConf);
+
+    Assert.assertEquals("valueFromSource", targetConf.get("testParameter1"));
+    Assert.assertEquals("originalValueFromTarget", targetConf.get("testParameter2"));
+    Assert.assertEquals("originalValueFromTarget", targetConf.get("testParameter3"));
+
+    Assert.assertEquals("valueFromSource", srcConf.get("testParameter1"));
+    Assert.assertEquals("valueFromSource", srcConf.get("testParameter2"));
+    Assert.assertNull(srcConf.get("testParameter3"));
+  }
+
+
+  @Test
+  public void resolve() {
+    Configuration conf = new Configuration(false);
+    conf.set("a", "A");
+    conf.set("b", "${a}");
+    Assert.assertEquals(conf.getRaw("a"), "A");
+    Assert.assertEquals(conf.getRaw("b"), "${a}");
+    conf = ConfigurationUtils.resolve(conf);
+    Assert.assertEquals(conf.getRaw("a"), "A");
+    Assert.assertEquals(conf.getRaw("b"), "A");
+  }
+
+  @Test
+  public void testVarResolutionAndSysProps() {
+    String userName = System.getProperty("user.name");
+    Configuration conf = new Configuration(false);
+    conf.set("a", "A");
+    conf.set("b", "${a}");
+    conf.set("c", "${user.name}");
+    conf.set("d", "${aaa}");
+    Assert.assertEquals(conf.getRaw("a"), "A");
+    Assert.assertEquals(conf.getRaw("b"), "${a}");
+    Assert.assertEquals(conf.getRaw("c"), "${user.name}");
+    Assert.assertEquals(conf.get("a"), "A");
+    Assert.assertEquals(conf.get("b"), "A");
+    Assert.assertEquals(conf.get("c"), userName);
+    Assert.assertEquals(conf.get("d"), "${aaa}");
+
+    conf.set("user.name", "foo");
+    Assert.assertEquals(conf.get("user.name"), "foo");
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestBooleanParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestBooleanParam.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestBooleanParam.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestBooleanParam.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+import junit.framework.Assert;
+import org.junit.Test;
+
+public class TestBooleanParam {
+
+  @Test
+  public void param() throws Exception {
+    BooleanParam param = new BooleanParam("p", "true") {
+    };
+    Assert.assertEquals(param.getDomain(), "a boolean");
+    Assert.assertEquals(param.value(), Boolean.TRUE);
+    Assert.assertEquals(param.toString(), "true");
+    param = new BooleanParam("p", "false") {
+    };
+    Assert.assertEquals(param.value(), Boolean.FALSE);
+    param = new BooleanParam("p", null) {
+    };
+    Assert.assertEquals(param.value(), null);
+    param = new BooleanParam("p", "") {
+    };
+    Assert.assertEquals(param.value(), null);
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void invalid() throws Exception {
+    new BooleanParam("p", "x") {
+    };
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestByteParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestByteParam.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestByteParam.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestByteParam.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+
+import junit.framework.Assert;
+import org.junit.Test;
+
+public class TestByteParam {
+
+  @Test
+  public void param() throws Exception {
+    ByteParam param = new ByteParam("p", "1") {
+    };
+    Assert.assertEquals(param.getDomain(), "a byte");
+    Assert.assertEquals(param.value(), new Byte((byte) 1));
+    Assert.assertEquals(param.toString(), "1");
+    param = new ByteParam("p", null) {
+    };
+    Assert.assertEquals(param.value(), null);
+    param = new ByteParam("p", "") {
+    };
+    Assert.assertEquals(param.value(), null);
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void invalid1() throws Exception {
+    new ByteParam("p", "x") {
+    };
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void invalid2() throws Exception {
+    new ByteParam("p", "256") {
+    };
+  }
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestEnumParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestEnumParam.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestEnumParam.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestEnumParam.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+
+import junit.framework.Assert;
+import org.junit.Test;
+
+public class TestEnumParam {
+
+  public static enum ENUM {
+    FOO, BAR
+  }
+
+  @Test
+  public void param() throws Exception {
+    EnumParam<ENUM> param = new EnumParam<ENUM>("p", "FOO", ENUM.class) {
+    };
+    Assert.assertEquals(param.getDomain(), "FOO,BAR");
+    Assert.assertEquals(param.value(), ENUM.FOO);
+    Assert.assertEquals(param.toString(), "FOO");
+    param = new EnumParam<ENUM>("p", null, ENUM.class) {
+    };
+    Assert.assertEquals(param.value(), null);
+    param = new EnumParam<ENUM>("p", "", ENUM.class) {
+    };
+    Assert.assertEquals(param.value(), null);
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void invalid1() throws Exception {
+    new EnumParam<ENUM>("p", "x", ENUM.class) {
+    };
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestInputStreamEntity.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestInputStreamEntity.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestInputStreamEntity.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestInputStreamEntity.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+import junit.framework.Assert;
+import org.junit.Test;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.InputStream;
+
+public class TestInputStreamEntity {
+
+  @Test
+  public void test() throws Exception {
+    InputStream is = new ByteArrayInputStream("abc".getBytes());
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    InputStreamEntity i = new InputStreamEntity(is);
+    i.write(baos);
+    baos.close();
+    Assert.assertEquals(new String(baos.toByteArray()), "abc");
+
+    is = new ByteArrayInputStream("abc".getBytes());
+    baos = new ByteArrayOutputStream();
+    i = new InputStreamEntity(is, 1, 1);
+    i.write(baos);
+    baos.close();
+    Assert.assertEquals(baos.toByteArray()[0], 'b');
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestIntegerParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestIntegerParam.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestIntegerParam.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestIntegerParam.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+import junit.framework.Assert;
+import org.junit.Test;
+
+public class TestIntegerParam {
+
+  @Test
+  public void param() throws Exception {
+    IntegerParam param = new IntegerParam("p", "1") {
+    };
+    Assert.assertEquals(param.getDomain(), "an integer");
+    Assert.assertEquals(param.value(), new Integer(1));
+    Assert.assertEquals(param.toString(), "1");
+    param = new IntegerParam("p", null) {
+    };
+    Assert.assertEquals(param.value(), null);
+    param = new IntegerParam("p", "") {
+    };
+    Assert.assertEquals(param.value(), null);
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void invalid1() throws Exception {
+    new IntegerParam("p", "x") {
+    };
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void invalid2() throws Exception {
+    new IntegerParam("p", "" + Long.MAX_VALUE) {
+    };
+  }
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestJSONMapProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestJSONMapProvider.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestJSONMapProvider.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestJSONMapProvider.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+import junit.framework.Assert;
+import org.json.simple.JSONObject;
+import org.junit.Test;
+
+import java.io.ByteArrayOutputStream;
+import java.util.Map;
+
+public class TestJSONMapProvider {
+
+  @Test
+  @SuppressWarnings("unchecked")
+  public void test() throws Exception {
+    JSONMapProvider p = new JSONMapProvider();
+    Assert.assertTrue(p.isWriteable(Map.class, null, null, null));
+    Assert.assertFalse(p.isWriteable(this.getClass(), null, null, null));
+    Assert.assertEquals(p.getSize(null, null, null, null, null), -1);
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    JSONObject json = new JSONObject();
+    json.put("a", "A");
+    p.writeTo(json, JSONObject.class, null, null, null, null, baos);
+    baos.close();
+    Assert.assertEquals(new String(baos.toByteArray()).trim(), "{\"a\":\"A\"}");
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestJSONProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestJSONProvider.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestJSONProvider.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestJSONProvider.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+import junit.framework.Assert;
+import org.json.simple.JSONObject;
+import org.junit.Test;
+
+import java.io.ByteArrayOutputStream;
+
+public class TestJSONProvider {
+
+  @Test
+  @SuppressWarnings("unchecked")
+  public void test() throws Exception {
+    JSONProvider p = new JSONProvider();
+    Assert.assertTrue(p.isWriteable(JSONObject.class, null, null, null));
+    Assert.assertFalse(p.isWriteable(this.getClass(), null, null, null));
+    Assert.assertEquals(p.getSize(null, null, null, null, null), -1);
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    JSONObject json = new JSONObject();
+    json.put("a", "A");
+    p.writeTo(json, JSONObject.class, null, null, null, null, baos);
+    baos.close();
+    Assert.assertEquals(new String(baos.toByteArray()).trim(), "{\"a\":\"A\"}");
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestLongParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestLongParam.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestLongParam.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestLongParam.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+import junit.framework.Assert;
+import org.junit.Test;
+
+public class TestLongParam {
+
+  @Test
+  public void param() throws Exception {
+    LongParam param = new LongParam("p", "1") {
+    };
+    Assert.assertEquals(param.getDomain(), "a long");
+    Assert.assertEquals(param.value(), new Long(1));
+    Assert.assertEquals(param.toString(), "1");
+    param = new LongParam("p", null) {
+    };
+    Assert.assertEquals(param.value(), null);
+    param = new LongParam("p", "") {
+    };
+    Assert.assertEquals(param.value(), null);
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void invalid1() throws Exception {
+    new LongParam("p", "x") {
+    };
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestShortParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestShortParam.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestShortParam.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestShortParam.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+
+import junit.framework.Assert;
+import org.junit.Test;
+
+public class TestShortParam {
+
+  @Test
+  public void param() throws Exception {
+    ShortParam param = new ShortParam("p", "1") {
+    };
+    Assert.assertEquals(param.getDomain(), "a short");
+    Assert.assertEquals(param.value(), new Short((short) 1));
+    Assert.assertEquals(param.toString(), "1");
+    param = new ShortParam("p", null) {
+    };
+    Assert.assertEquals(param.value(), null);
+    param = new ShortParam("p", "") {
+    };
+    Assert.assertEquals(param.value(), null);
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void invalid1() throws Exception {
+    new ShortParam("p", "x") {
+    };
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void invalid2() throws Exception {
+    new ShortParam("p", "" + Integer.MAX_VALUE) {
+    };
+  }
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestStringParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestStringParam.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestStringParam.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestStringParam.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+
+import junit.framework.Assert;
+import org.junit.Test;
+
+import java.util.regex.Pattern;
+
+public class TestStringParam {
+
+  @Test
+  public void param() throws Exception {
+    StringParam param = new StringParam("p", "s") {
+    };
+    Assert.assertEquals(param.getDomain(), "a string");
+    Assert.assertEquals(param.value(), "s");
+    Assert.assertEquals(param.toString(), "s");
+    param = new StringParam("p", null) {
+    };
+    Assert.assertEquals(param.value(), null);
+    param = new StringParam("p", "") {
+    };
+    Assert.assertEquals(param.value(), null);
+
+    param.setValue("S");
+    Assert.assertEquals(param.value(), "S");
+  }
+
+  @Test
+  public void paramRegEx() throws Exception {
+    StringParam param = new StringParam("p", "Aaa", Pattern.compile("A.*")) {
+    };
+    Assert.assertEquals(param.getDomain(), "A.*");
+    Assert.assertEquals(param.value(), "Aaa");
+    Assert.assertEquals(param.toString(), "Aaa");
+    param = new StringParam("p", null) {
+    };
+    Assert.assertEquals(param.value(), null);
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void paramInvalidRegEx() throws Exception {
+    new StringParam("p", "Baa", Pattern.compile("A.*")) {
+    };
+  }
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestUserProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestUserProvider.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestUserProvider.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestUserProvider.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+import com.sun.jersey.api.core.HttpContext;
+import com.sun.jersey.api.core.HttpRequestContext;
+import com.sun.jersey.core.spi.component.ComponentScope;
+import junit.framework.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+import org.slf4j.MDC;
+
+import javax.ws.rs.core.MultivaluedMap;
+import java.security.Principal;
+
+public class TestUserProvider {
+
+  @Test
+  @SuppressWarnings("unchecked")
+  public void noUser() {
+    MDC.remove("user");
+    HttpRequestContext request = Mockito.mock(HttpRequestContext.class);
+    Mockito.when(request.getUserPrincipal()).thenReturn(null);
+    MultivaluedMap map = Mockito.mock(MultivaluedMap.class);
+    Mockito.when(map.getFirst(UserProvider.USER_NAME_PARAM)).thenReturn(null);
+    Mockito.when(request.getQueryParameters()).thenReturn(map);
+    HttpContext context = Mockito.mock(HttpContext.class);
+    Mockito.when(context.getRequest()).thenReturn(request);
+    UserProvider up = new UserProvider();
+    Assert.assertNull(up.getValue(context));
+    Assert.assertNull(MDC.get("user"));
+  }
+
+  @Test
+  @SuppressWarnings("unchecked")
+  public void queryStringUser() {
+    MDC.remove("user");
+    HttpRequestContext request = Mockito.mock(HttpRequestContext.class);
+    Mockito.when(request.getUserPrincipal()).thenReturn(null);
+    MultivaluedMap map = Mockito.mock(MultivaluedMap.class);
+    Mockito.when(map.getFirst(UserProvider.USER_NAME_PARAM)).thenReturn("foo");
+    Mockito.when(request.getQueryParameters()).thenReturn(map);
+    HttpContext context = Mockito.mock(HttpContext.class);
+    Mockito.when(context.getRequest()).thenReturn(request);
+    UserProvider up = new UserProvider();
+    Assert.assertEquals(up.getValue(context).getName(), "foo");
+    Assert.assertEquals(MDC.get("user"), "foo");
+  }
+
+  @Test
+  @SuppressWarnings("unchecked")
+  public void principalUser() {
+    MDC.remove("user");
+    HttpRequestContext request = Mockito.mock(HttpRequestContext.class);
+    Mockito.when(request.getUserPrincipal()).thenReturn(new Principal() {
+      @Override
+      public String getName() {
+        return "bar";
+      }
+    });
+    HttpContext context = Mockito.mock(HttpContext.class);
+    Mockito.when(context.getRequest()).thenReturn(request);
+    UserProvider up = new UserProvider();
+    Assert.assertEquals(up.getValue(context).getName(), "bar");
+    Assert.assertEquals(MDC.get("user"), "bar");
+  }
+
+  @Test
+  public void getters() {
+    UserProvider up = new UserProvider();
+    Assert.assertEquals(up.getScope(), ComponentScope.PerRequest);
+    Assert.assertEquals(up.getInjectable(null, null, Principal.class), up);
+    Assert.assertNull(up.getInjectable(null, null, String.class));
+  }
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/HFSTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/HFSTestCase.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/HFSTestCase.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/HFSTestCase.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.test;
+
+import org.junit.Rule;
+import org.junit.rules.MethodRule;
+
+public abstract class HFSTestCase extends HTestCase {
+
+  @Rule
+  public MethodRule hdfsTestHelper = new TestHdfsHelper();
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/HTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/HTestCase.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/HTestCase.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/HTestCase.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,174 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.test;
+
+import junit.framework.Assert;
+import org.junit.Rule;
+import org.junit.rules.MethodRule;
+
+import java.text.MessageFormat;
+
+public abstract class HTestCase {
+
+  public static final String TEST_WAITFOR_RATIO_PROP = "test.waitfor.ratio";
+
+  static {
+    SysPropsForTestsLoader.init();
+  }
+
+  private static float WAITFOR_RATIO_DEFAULT = Float.parseFloat(System.getProperty(TEST_WAITFOR_RATIO_PROP, "1"));
+
+  private float waitForRatio = WAITFOR_RATIO_DEFAULT;
+
+  @Rule
+  public MethodRule testDir = new TestDirHelper();
+
+  @Rule
+  public MethodRule jettyTestHelper = new TestJettyHelper();
+
+  @Rule
+  public MethodRule exceptionHelper = new TestExceptionHelper();
+
+  /**
+   * Sets the 'wait for ratio' used in the {@link #sleep(long)},
+   * {@link #waitFor(int, Predicate)} and
+   * {@link #waitFor(int, boolean, Predicate)} method for the current
+   * test class.
+   * <p/>
+   * This is useful when running tests in slow machine for tests
+   * that are time sensitive.
+   *
+   * @param ratio the 'wait for ratio' to set.
+   */
+  protected void setWaitForRatio(float ratio) {
+    waitForRatio = ratio;
+  }
+
+  /*
+   * Returns the 'wait for ratio' used in the {@link #sleep(long)},
+   * {@link #waitFor(int, Predicate)} and
+   * {@link #waitFor(int, boolean, Predicate)} methods for the current
+   * test class.
+   * <p/>
+   * This is useful when running tests in slow machine for tests
+   * that are time sensitive.
+   * <p/>
+   * The default value is obtained from the Java System property
+   * <code>test.wait.for.ratio</code> which defaults to <code>1</code>.
+   *
+   * @return the 'wait for ratio' for the current test class.
+   */
+  protected float getWaitForRatio() {
+    return waitForRatio;
+  }
+
+  /**
+   * A predicate 'closure' used by the {@link #waitFor(int, Predicate)} and
+   * {@link #waitFor(int, boolean, Predicate)} methods.
+   */
+  public static interface Predicate {
+
+    /**
+     * Perform a predicate evaluation.
+     *
+     * @return the boolean result of the evaluation.
+     *
+     * @throws Exception thrown if the predicate evaluation could not evaluate.
+     */
+    public boolean evaluate() throws Exception;
+
+  }
+
+  /**
+   * Makes the current thread sleep for the specified number of milliseconds.
+   * <p/>
+   * The sleep time is multiplied by the {@link #getWaitForRatio()}.
+   *
+   * @param time the number of milliseconds to sleep.
+   */
+  protected void sleep(long time) {
+    try {
+      Thread.sleep((long) (getWaitForRatio() * time));
+    } catch (InterruptedException ex) {
+      System.err.println(MessageFormat.format("Sleep interrupted, {0}", ex.toString()));
+    }
+  }
+
+  /**
+   * Waits up to the specified timeout for the given {@link Predicate} to
+   * become <code>true</code>, failing the test if the timeout is reached
+   * and the Predicate is still <code>false</code>.
+   * <p/>
+   * The timeout time is multiplied by the {@link #getWaitForRatio()}.
+   *
+   * @param timeout the timeout in milliseconds to wait for the predicate.
+   * @param predicate the predicate ot evaluate.
+   *
+   * @return the effective wait, in milli-seconds until the predicate become
+   *         <code>true</code>.
+   */
+  protected long waitFor(int timeout, Predicate predicate) {
+    return waitFor(timeout, false, predicate);
+  }
+
+  /**
+   * Waits up to the specified timeout for the given {@link Predicate} to
+   * become <code>true</code>.
+   * <p/>
+   * The timeout time is multiplied by the {@link #getWaitForRatio()}.
+   *
+   * @param timeout the timeout in milliseconds to wait for the predicate.
+   * @param failIfTimeout indicates if the test should be failed if the
+   * predicate times out.
+   * @param predicate the predicate ot evaluate.
+   *
+   * @return the effective wait, in milli-seconds until the predicate become
+   *         <code>true</code> or <code>-1</code> if the predicate did not evaluate
+   *         to <code>true</code>.
+   */
+  protected long waitFor(int timeout, boolean failIfTimeout, Predicate predicate) {
+    long started = System.currentTimeMillis();
+    long mustEnd = System.currentTimeMillis() + (long) (getWaitForRatio() * timeout);
+    long lastEcho = 0;
+    try {
+      long waiting = mustEnd - System.currentTimeMillis();
+      System.out.println(MessageFormat.format("Waiting up to [{0}] msec", waiting));
+      boolean eval;
+      while (!(eval = predicate.evaluate()) && System.currentTimeMillis() < mustEnd) {
+        if ((System.currentTimeMillis() - lastEcho) > 5000) {
+          waiting = mustEnd - System.currentTimeMillis();
+          System.out.println(MessageFormat.format("Waiting up to [{0}] msec", waiting));
+          lastEcho = System.currentTimeMillis();
+        }
+        Thread.sleep(100);
+      }
+      if (!eval) {
+        if (failIfTimeout) {
+          Assert.fail(MessageFormat.format("Waiting timed out after [{0}] msec", timeout));
+        } else {
+          System.out.println(MessageFormat.format("Waiting timed out after [{0}] msec", timeout));
+        }
+      }
+      return (eval) ? System.currentTimeMillis() - started : -1;
+    } catch (Exception ex) {
+      throw new RuntimeException(ex);
+    }
+  }
+
+}
+

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/HadoopUsersConfTestHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/HadoopUsersConfTestHelper.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/HadoopUsersConfTestHelper.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/HadoopUsersConfTestHelper.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,177 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.test;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.UserGroupInformation;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Helper to configure FileSystemAccess user/group and proxyuser
+ * configuration for testing using Java System properties.
+ * <p/>
+ * It uses the {@link SysPropsForTestsLoader} to load JavaSystem
+ * properties for testing.
+ */
+public class HadoopUsersConfTestHelper {
+
+  static {
+    SysPropsForTestsLoader.init();
+  }
+
+  public static final String HADOOP_PROXYUSER = "test.hadoop.proxyuser";
+
+  public static final String HADOOP_PROXYUSER_HOSTS = "test.hadoop.proxyuser.hosts";
+
+  public static final String HADOOP_PROXYUSER_GROUPS = "test.hadoop.proxyuser.groups";
+
+  public static final String HADOOP_USER_PREFIX = "test.hadoop.user.";
+
+  /**
+   * Returns a valid FileSystemAccess proxyuser for the FileSystemAccess cluster.
+   * <p/>
+   * The user is read from the Java System property
+   * <code>test.hadoop.proxyuser</code> which defaults to the current user
+   * (java System property <code>user.name</code>).
+   * <p/>
+   * This property should be set in the <code>test.properties</code> file.
+   * <p/>
+   * When running FileSystemAccess minicluster it is used to configure the FileSystemAccess minicluster.
+   * <p/>
+   * When using an external FileSystemAccess cluster, it is expected this property is set to
+   * a valid proxy user.
+   *
+   * @return a valid FileSystemAccess proxyuser for the FileSystemAccess cluster.
+   */
+  public static String getHadoopProxyUser() {
+    return System.getProperty(HADOOP_PROXYUSER, System.getProperty("user.name"));
+  }
+
+  /**
+   * Returns the hosts for the FileSystemAccess proxyuser settings.
+   * <p/>
+   * The hosts are read from the Java System property
+   * <code>test.hadoop.proxyuser.hosts</code> which defaults to <code>*</code>.
+   * <p/>
+   * This property should be set in the <code>test.properties</code> file.
+   * <p/>
+   * This property is ONLY used when running FileSystemAccess minicluster, it is used to
+   * configure the FileSystemAccess minicluster.
+   * <p/>
+   * When using an external FileSystemAccess cluster this property is ignored.
+   *
+   * @return the hosts for the FileSystemAccess proxyuser settings.
+   */
+  public static String getHadoopProxyUserHosts() {
+    return System.getProperty(HADOOP_PROXYUSER_HOSTS, "*");
+  }
+
+  /**
+   * Returns the groups for the FileSystemAccess proxyuser settings.
+   * <p/>
+   * The hosts are read from the Java System property
+   * <code>test.hadoop.proxyuser.groups</code> which defaults to <code>*</code>.
+   * <p/>
+   * This property should be set in the <code>test.properties</code> file.
+   * <p/>
+   * This property is ONLY used when running FileSystemAccess minicluster, it is used to
+   * configure the FileSystemAccess minicluster.
+   * <p/>
+   * When using an external FileSystemAccess cluster this property is ignored.
+   *
+   * @return the groups for the FileSystemAccess proxyuser settings.
+   */
+  public static String getHadoopProxyUserGroups() {
+    return System.getProperty(HADOOP_PROXYUSER_GROUPS, "*");
+  }
+
+  private static final String[] DEFAULT_USERS = new String[]{"user1", "user2"};
+  private static final String[] DEFAULT_USERS_GROUP = new String[]{"group1", "supergroup"};
+
+  /**
+   * Returns the FileSystemAccess users to be used for tests. These users are defined
+   * in the <code>test.properties</code> file in properties of the form
+   * <code>test.hadoop.user.#USER#=#GROUP1#,#GROUP2#,...</code>.
+   * <p/>
+   * These properties are used to configure the FileSystemAccess minicluster user/group
+   * information.
+   * <p/>
+   * When using an external FileSystemAccess cluster these properties should match the
+   * user/groups settings in the cluster.
+   *
+   * @return the FileSystemAccess users used for testing.
+   */
+  public static String[] getHadoopUsers() {
+    List<String> users = new ArrayList<String>();
+    for (String name : System.getProperties().stringPropertyNames()) {
+      if (name.startsWith(HADOOP_USER_PREFIX)) {
+        users.add(name.substring(HADOOP_USER_PREFIX.length()));
+      }
+    }
+    return (users.size() != 0) ? users.toArray(new String[users.size()]) : DEFAULT_USERS;
+  }
+
+  /**
+   * Returns the groups a FileSystemAccess user belongs to during tests. These users/groups
+   * are defined in the <code>test.properties</code> file in properties of the
+   * form <code>test.hadoop.user.#USER#=#GROUP1#,#GROUP2#,...</code>.
+   * <p/>
+   * These properties are used to configure the FileSystemAccess minicluster user/group
+   * information.
+   * <p/>
+   * When using an external FileSystemAccess cluster these properties should match the
+   * user/groups settings in the cluster.
+   *
+   * @param user user name to get gropus.
+   *
+   * @return the groups of FileSystemAccess users used for testing.
+   */
+  public static String[] getHadoopUserGroups(String user) {
+    if (getHadoopUsers() == DEFAULT_USERS) {
+      return DEFAULT_USERS_GROUP;
+    } else {
+      String groups = System.getProperty(HADOOP_USER_PREFIX + user);
+      return (groups != null) ? groups.split(",") : new String[0];
+    }
+  }
+
+  public static Configuration getBaseConf() {
+    Configuration conf = new Configuration();
+    for (String name : System.getProperties().stringPropertyNames()) {
+      conf.set(name, System.getProperty(name));
+    }
+    return conf;
+  }
+
+  public static void addUserConf(Configuration conf) {
+    conf.set("hadoop.security.authentication", "simple");
+    conf.set("hadoop.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
+             HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
+    conf.set("hadoop.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".groups",
+             HadoopUsersConfTestHelper.getHadoopProxyUserGroups());
+
+    for (String user : HadoopUsersConfTestHelper.getHadoopUsers()) {
+      String[] groups = HadoopUsersConfTestHelper.getHadoopUserGroups(user);
+      UserGroupInformation.createUserForTesting(user, groups);
+    }
+  }
+
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/SysPropsForTestsLoader.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/SysPropsForTestsLoader.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/SysPropsForTestsLoader.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/SysPropsForTestsLoader.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.test;
+
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.text.MessageFormat;
+import java.util.Map;
+import java.util.Properties;
+
+public class SysPropsForTestsLoader {
+
+  public static final String TEST_PROPERTIES_PROP = "test.properties";
+
+  static {
+    try {
+      String testFileName = System.getProperty(TEST_PROPERTIES_PROP, "test.properties");
+      File currentDir = new File(testFileName).getAbsoluteFile().getParentFile();
+      File testFile = new File(currentDir, testFileName);
+      while (currentDir != null && !testFile.exists()) {
+        testFile = new File(testFile.getAbsoluteFile().getParentFile().getParentFile(), testFileName);
+        currentDir = currentDir.getParentFile();
+        if (currentDir != null) {
+          testFile = new File(currentDir, testFileName);
+        }
+      }
+
+      if (testFile.exists()) {
+        System.out.println();
+        System.out.println(">>> " + TEST_PROPERTIES_PROP + " : " + testFile.getAbsolutePath());
+        Properties testProperties = new Properties();
+        testProperties.load(new FileReader(testFile));
+        for (Map.Entry entry : testProperties.entrySet()) {
+          if (!System.getProperties().containsKey(entry.getKey())) {
+            System.setProperty((String) entry.getKey(), (String) entry.getValue());
+          }
+        }
+      } else if (System.getProperty(TEST_PROPERTIES_PROP) != null) {
+        System.err.println(MessageFormat.format("Specified 'test.properties' file does not exist [{0}]",
+                                                System.getProperty(TEST_PROPERTIES_PROP)));
+        System.exit(-1);
+
+      } else {
+        System.out.println(">>> " + TEST_PROPERTIES_PROP + " : <NONE>");
+      }
+    } catch (IOException ex) {
+      throw new RuntimeException(ex);
+    }
+  }
+
+  public static void init() {
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestDir.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestDir.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestDir.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestDir.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.test;
+
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.Target;
+
+/**
+ * Annotation for {@link HTestCase} subclasses to indicate that the test method
+ * requires a test directory in the local file system.
+ * <p/>
+ * The test directory location can be retrieve using the
+ * {@link TestDirHelper#getTestDir()} method.
+ */
+@Retention(java.lang.annotation.RetentionPolicy.RUNTIME)
+@Target(java.lang.annotation.ElementType.METHOD)
+public @interface TestDir {
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestDirHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestDirHelper.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestDirHelper.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestDirHelper.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,149 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.test;
+
+import org.junit.Test;
+import org.junit.rules.MethodRule;
+import org.junit.runners.model.FrameworkMethod;
+import org.junit.runners.model.Statement;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.MessageFormat;
+import java.util.concurrent.atomic.AtomicInteger;
+
+public class TestDirHelper implements MethodRule {
+
+  @Test
+  public void dummy() {
+  }
+
+  static {
+    SysPropsForTestsLoader.init();
+  }
+
+  public static final String TEST_DIR_PROP = "test.dir";
+  static String TEST_DIR_ROOT;
+
+  private static void delete(File file) throws IOException {
+    if (file.getAbsolutePath().length() < 5) {
+      throw new IllegalArgumentException(
+        MessageFormat.format("Path [{0}] is too short, not deleting", file.getAbsolutePath()));
+    }
+    if (file.exists()) {
+      if (file.isDirectory()) {
+        File[] children = file.listFiles();
+        if (children != null) {
+          for (File child : children) {
+            delete(child);
+          }
+        }
+      }
+      if (!file.delete()) {
+        throw new RuntimeException(MessageFormat.format("Could not delete path [{0}]", file.getAbsolutePath()));
+      }
+    }
+  }
+
+  static {
+    try {
+      TEST_DIR_ROOT = System.getProperty(TEST_DIR_PROP, new File("target").getAbsolutePath());
+      if (!TEST_DIR_ROOT.startsWith("/")) {
+        System.err.println(MessageFormat.format("System property [{0}]=[{1}] must be set to an absolute path",
+                                                TEST_DIR_PROP, TEST_DIR_ROOT));
+        System.exit(-1);
+      } else if (TEST_DIR_ROOT.length() < 4) {
+        System.err.println(MessageFormat.format("System property [{0}]=[{1}] must be at least 4 chars",
+                                                TEST_DIR_PROP, TEST_DIR_ROOT));
+        System.exit(-1);
+      }
+
+      TEST_DIR_ROOT = new File(TEST_DIR_ROOT, "testdir").getAbsolutePath();
+      System.setProperty(TEST_DIR_PROP, TEST_DIR_ROOT);
+
+      File dir = new File(TEST_DIR_ROOT);
+      delete(dir);
+      if (!dir.mkdirs()) {
+        System.err.println(MessageFormat.format("Could not create test dir [{0}]", TEST_DIR_ROOT));
+        System.exit(-1);
+      }
+
+      System.setProperty("test.circus", "true");
+
+      System.out.println(">>> " + TEST_DIR_PROP + "        : " + System.getProperty(TEST_DIR_PROP));
+    } catch (IOException ex) {
+      throw new RuntimeException(ex);
+    }
+  }
+
+  private static ThreadLocal<File> TEST_DIR_TL = new InheritableThreadLocal<File>();
+
+  @Override
+  public Statement apply(final Statement statement, final FrameworkMethod frameworkMethod, final Object o) {
+    return new Statement() {
+      @Override
+      public void evaluate() throws Throwable {
+        File testDir = null;
+        TestDir testDirAnnotation = frameworkMethod.getAnnotation(TestDir.class);
+        if (testDirAnnotation != null) {
+          testDir = resetTestCaseDir(frameworkMethod.getName());
+        }
+        try {
+          TEST_DIR_TL.set(testDir);
+          statement.evaluate();
+        } finally {
+          TEST_DIR_TL.remove();
+        }
+      }
+    };
+  }
+
+  /**
+   * Returns the local test directory for the current test, only available when the
+   * test method has been annotated with {@link TestDir}.
+   *
+   * @return the test directory for the current test. It is an full/absolute
+   *         <code>File</code>.
+   */
+  public static File getTestDir() {
+    File testDir = TEST_DIR_TL.get();
+    if (testDir == null) {
+      throw new IllegalStateException("This test does not use @TestDir");
+    }
+    return testDir;
+  }
+
+  private static AtomicInteger counter = new AtomicInteger();
+
+  private static File resetTestCaseDir(String testName) {
+    File dir = new File(TEST_DIR_ROOT);
+    dir = new File(dir, testName + "-" + counter.getAndIncrement());
+    dir = dir.getAbsoluteFile();
+    try {
+      delete(dir);
+    } catch (IOException ex) {
+      throw new RuntimeException(MessageFormat.format("Could not delete test dir[{0}], {1}",
+                                                      dir, ex.getMessage()), ex);
+    }
+    if (!dir.mkdirs()) {
+      throw new RuntimeException(MessageFormat.format("Could not create test dir[{0}]", dir));
+    }
+    return dir;
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestException.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestException.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestException.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestException.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.test;
+
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.Target;
+
+@Retention(java.lang.annotation.RetentionPolicy.RUNTIME)
+@Target(java.lang.annotation.ElementType.METHOD)
+public @interface TestException {
+  Class<? extends Throwable> exception();
+
+  String msgRegExp() default ".*";
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestExceptionHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestExceptionHelper.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestExceptionHelper.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestExceptionHelper.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.test;
+
+import junit.framework.Assert;
+import org.junit.Test;
+import org.junit.rules.MethodRule;
+import org.junit.runners.model.FrameworkMethod;
+import org.junit.runners.model.Statement;
+
+import java.util.regex.Pattern;
+
+public class TestExceptionHelper implements MethodRule {
+
+  @Test
+  public void dummy() {
+  }
+
+  @Override
+  public Statement apply(final Statement statement, final FrameworkMethod frameworkMethod, final Object o) {
+    return new Statement() {
+      @Override
+      public void evaluate() throws Throwable {
+        TestException testExceptionAnnotation = frameworkMethod.getAnnotation(TestException.class);
+        try {
+          statement.evaluate();
+          if (testExceptionAnnotation != null) {
+            Class<? extends Throwable> klass = testExceptionAnnotation.exception();
+            Assert.fail("Expected Exception: " + klass.getSimpleName());
+          }
+        } catch (Throwable ex) {
+          if (testExceptionAnnotation != null) {
+            Class<? extends Throwable> klass = testExceptionAnnotation.exception();
+            if (klass.isInstance(ex)) {
+              String regExp = testExceptionAnnotation.msgRegExp();
+              Pattern pattern = Pattern.compile(regExp);
+              if (!pattern.matcher(ex.getMessage()).find()) {
+                Assert.fail("Expected Exception Message pattern: " + regExp + " got message: " + ex.getMessage());
+              }
+            } else {
+              Assert.fail("Expected Exception: " + klass.getSimpleName() + " got: " + ex.getClass().getSimpleName());
+            }
+          } else {
+            throw ex;
+          }
+        }
+      }
+    };
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,187 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.test;
+
+import junit.framework.Assert;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.junit.Test;
+import org.mortbay.jetty.Server;
+import org.mortbay.jetty.servlet.Context;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.net.HttpURLConnection;
+import java.net.URL;
+
+public class TestHFSTestCase extends HFSTestCase {
+
+  @Test(expected = IllegalStateException.class)
+  public void testDirNoAnnotation() throws Exception {
+    TestDirHelper.getTestDir();
+  }
+
+  @Test(expected = IllegalStateException.class)
+  public void testJettyNoAnnotation() throws Exception {
+    TestJettyHelper.getJettyServer();
+  }
+
+  @Test(expected = IllegalStateException.class)
+  public void testJettyNoAnnotation2() throws Exception {
+    TestJettyHelper.getJettyURL();
+  }
+
+  @Test(expected = IllegalStateException.class)
+  public void testHdfsNoAnnotation() throws Exception {
+    TestHdfsHelper.getHdfsConf();
+  }
+
+  @Test(expected = IllegalStateException.class)
+  public void testHdfsNoAnnotation2() throws Exception {
+    TestHdfsHelper.getHdfsTestDir();
+  }
+
+  @Test
+  @TestDir
+  public void testDirAnnotation() throws Exception {
+    Assert.assertNotNull(TestDirHelper.getTestDir());
+  }
+
+  @Test
+  public void waitFor() {
+    long start = System.currentTimeMillis();
+    long waited = waitFor(1000, new Predicate() {
+      public boolean evaluate() throws Exception {
+        return true;
+      }
+    });
+    long end = System.currentTimeMillis();
+    Assert.assertEquals(waited, 0, 50);
+    Assert.assertEquals(end - start - waited, 0, 50);
+  }
+
+  @Test
+  public void waitForTimeOutRatio1() {
+    setWaitForRatio(1);
+    long start = System.currentTimeMillis();
+    long waited = waitFor(200, new Predicate() {
+      public boolean evaluate() throws Exception {
+        return false;
+      }
+    });
+    long end = System.currentTimeMillis();
+    Assert.assertEquals(waited, -1);
+    Assert.assertEquals(end - start, 200, 50);
+  }
+
+  @Test
+  public void waitForTimeOutRatio2() {
+    setWaitForRatio(2);
+    long start = System.currentTimeMillis();
+    long waited = waitFor(200, new Predicate() {
+      public boolean evaluate() throws Exception {
+        return false;
+      }
+    });
+    long end = System.currentTimeMillis();
+    Assert.assertEquals(waited, -1);
+    Assert.assertEquals(end - start, 200 * getWaitForRatio(), 50 * getWaitForRatio());
+  }
+
+  @Test
+  public void sleepRatio1() {
+    setWaitForRatio(1);
+    long start = System.currentTimeMillis();
+    sleep(100);
+    long end = System.currentTimeMillis();
+    Assert.assertEquals(end - start, 100, 50);
+  }
+
+  @Test
+  public void sleepRatio2() {
+    setWaitForRatio(1);
+    long start = System.currentTimeMillis();
+    sleep(100);
+    long end = System.currentTimeMillis();
+    Assert.assertEquals(end - start, 100 * getWaitForRatio(), 50 * getWaitForRatio());
+  }
+
+  @Test
+  @TestHdfs
+  public void testHadoopFileSystem() throws Exception {
+    Configuration conf = TestHdfsHelper.getHdfsConf();
+    FileSystem fs = FileSystem.get(conf);
+    try {
+      OutputStream os = fs.create(new Path(TestHdfsHelper.getHdfsTestDir(), "foo"));
+      os.write(new byte[]{1});
+      os.close();
+      InputStream is = fs.open(new Path(TestHdfsHelper.getHdfsTestDir(), "foo"));
+      Assert.assertEquals(is.read(), 1);
+      Assert.assertEquals(is.read(), -1);
+      is.close();
+    } finally {
+      fs.close();
+    }
+  }
+
+  public static class MyServlet extends HttpServlet {
+    @Override
+    protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
+      resp.getWriter().write("foo");
+    }
+  }
+
+  @Test
+  @TestJetty
+  public void testJetty() throws Exception {
+    Context context = new Context();
+    context.setContextPath("/");
+    context.addServlet(MyServlet.class, "/bar");
+    Server server = TestJettyHelper.getJettyServer();
+    server.addHandler(context);
+    server.start();
+    URL url = new URL(TestJettyHelper.getJettyURL(), "/bar");
+    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+    Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
+    BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
+    Assert.assertEquals(reader.readLine(), "foo");
+    reader.close();
+  }
+
+  @Test
+  @TestException(exception = RuntimeException.class)
+  public void testException0() {
+    throw new RuntimeException("foo");
+  }
+
+  @Test
+  @TestException(exception = RuntimeException.class, msgRegExp = ".o.")
+  public void testException1() {
+    throw new RuntimeException("foo");
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.test;
+
+import junit.framework.Assert;
+import org.junit.Test;
+import org.mortbay.jetty.Server;
+import org.mortbay.jetty.servlet.Context;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.HttpURLConnection;
+import java.net.URL;
+
+public class TestHTestCase extends HTestCase {
+
+  @Test(expected = IllegalStateException.class)
+  public void testDirNoAnnotation() throws Exception {
+    TestDirHelper.getTestDir();
+  }
+
+  @Test(expected = IllegalStateException.class)
+  public void testJettyNoAnnotation() throws Exception {
+    TestJettyHelper.getJettyServer();
+  }
+
+  @Test(expected = IllegalStateException.class)
+  public void testJettyNoAnnotation2() throws Exception {
+    TestJettyHelper.getJettyURL();
+  }
+
+  @Test
+  @TestDir
+  public void testDirAnnotation() throws Exception {
+    Assert.assertNotNull(TestDirHelper.getTestDir());
+  }
+
+  @Test
+  public void waitFor() {
+    long start = System.currentTimeMillis();
+    long waited = waitFor(1000, new Predicate() {
+      public boolean evaluate() throws Exception {
+        return true;
+      }
+    });
+    long end = System.currentTimeMillis();
+    Assert.assertEquals(waited, 0, 50);
+    Assert.assertEquals(end - start - waited, 0, 50);
+  }
+
+  @Test
+  public void waitForTimeOutRatio1() {
+    setWaitForRatio(1);
+    long start = System.currentTimeMillis();
+    long waited = waitFor(200, new Predicate() {
+      public boolean evaluate() throws Exception {
+        return false;
+      }
+    });
+    long end = System.currentTimeMillis();
+    Assert.assertEquals(waited, -1);
+    Assert.assertEquals(end - start, 200, 50);
+  }
+
+  @Test
+  public void waitForTimeOutRatio2() {
+    setWaitForRatio(2);
+    long start = System.currentTimeMillis();
+    long waited = waitFor(200, new Predicate() {
+      public boolean evaluate() throws Exception {
+        return false;
+      }
+    });
+    long end = System.currentTimeMillis();
+    Assert.assertEquals(waited, -1);
+    Assert.assertEquals(end - start, 200 * getWaitForRatio(), 50 * getWaitForRatio());
+  }
+
+  @Test
+  public void sleepRatio1() {
+    setWaitForRatio(1);
+    long start = System.currentTimeMillis();
+    sleep(100);
+    long end = System.currentTimeMillis();
+    Assert.assertEquals(end - start, 100, 50);
+  }
+
+  @Test
+  public void sleepRatio2() {
+    setWaitForRatio(1);
+    long start = System.currentTimeMillis();
+    sleep(100);
+    long end = System.currentTimeMillis();
+    Assert.assertEquals(end - start, 100 * getWaitForRatio(), 50 * getWaitForRatio());
+  }
+
+  public static class MyServlet extends HttpServlet {
+    @Override
+    protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
+      resp.getWriter().write("foo");
+    }
+  }
+
+  @Test
+  @TestJetty
+  public void testJetty() throws Exception {
+    Context context = new Context();
+    context.setContextPath("/");
+    context.addServlet(MyServlet.class, "/bar");
+    Server server = TestJettyHelper.getJettyServer();
+    server.addHandler(context);
+    server.start();
+    URL url = new URL(TestJettyHelper.getJettyURL(), "/bar");
+    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+    Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
+    BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
+    Assert.assertEquals(reader.readLine(), "foo");
+    reader.close();
+  }
+
+  @Test
+  @TestException(exception = RuntimeException.class)
+  public void testException0() {
+    throw new RuntimeException("foo");
+  }
+
+  @Test
+  @TestException(exception = RuntimeException.class, msgRegExp = ".o.")
+  public void testException1() {
+    throw new RuntimeException("foo");
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfs.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfs.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfs.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfs.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.test;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.Target;
+
+
+/**
+ * Annotation for {@link HTestCase} subclasses to indicate that the test method
+ * requires a FileSystemAccess cluster.
+ * <p/>
+ * The {@link TestHdfsHelper#getHdfsConf()} returns a FileSystemAccess JobConf preconfigured to connect
+ * to the FileSystemAccess test minicluster or the FileSystemAccess cluster information.
+ * <p/>
+ * A HDFS test directory for the test will be created. The HDFS test directory
+ * location can be retrieve using the {@link TestHdfsHelper#getHdfsTestDir()} method.
+ * <p/>
+ * Refer to the {@link HTestCase} class for details on how to use and configure
+ * a FileSystemAccess test minicluster or a real FileSystemAccess cluster for the tests.
+ */
+@Retention(java.lang.annotation.RetentionPolicy.RUNTIME)
+@Target(java.lang.annotation.ElementType.METHOD)
+public @interface TestHdfs {
+}



Mime
View raw message