hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ste...@apache.org
Subject svn commit: r1526848 [6/8] - in /hadoop/common/branches/branch-2: hadoop-project/ hadoop-tools/ hadoop-tools/hadoop-openstack/ hadoop-tools/hadoop-openstack/src/ hadoop-tools/hadoop-openstack/src/main/ hadoop-tools/hadoop-openstack/src/main/java/ hadoo...
Date Fri, 27 Sep 2013 11:12:45 GMT
Added: hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/SwiftFileSystemBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/SwiftFileSystemBaseTest.java?rev=1526848&view=auto
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/SwiftFileSystemBaseTest.java (added)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/SwiftFileSystemBaseTest.java Fri Sep 27 11:12:42 2013
@@ -0,0 +1,400 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.swift;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.swift.exceptions.SwiftOperationFailedException;
+import org.apache.hadoop.fs.swift.snative.SwiftNativeFileSystem;
+import org.apache.hadoop.fs.swift.snative.SwiftNativeFileSystemStore;
+import org.apache.hadoop.fs.swift.util.DurationStats;
+import org.apache.hadoop.fs.swift.util.SwiftTestUtils;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.Assume;
+import org.junit.Before;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.List;
+
+import static org.apache.hadoop.fs.swift.util.SwiftTestUtils.assertPathExists;
+import static org.apache.hadoop.fs.swift.util.SwiftTestUtils.cleanupInTeardown;
+import static org.apache.hadoop.fs.swift.util.SwiftTestUtils.getServiceURI;
+import static org.apache.hadoop.fs.swift.util.SwiftTestUtils.noteAction;
+
+/**
+ * This is the base class for most of the Swift tests
+ */
+public class SwiftFileSystemBaseTest extends Assert implements
+                                                    SwiftTestConstants {
+
+  protected static final Log LOG =
+          LogFactory.getLog(SwiftFileSystemBaseTest.class);
+  protected SwiftNativeFileSystem fs;
+  protected static SwiftNativeFileSystem lastFs;
+  protected byte[] data = SwiftTestUtils.dataset(getBlockSize() * 2, 0, 255);
+  private Configuration conf;
+
+  @Before
+  public void setUp() throws Exception {
+    noteAction("setup");
+    final URI uri = getFilesystemURI();
+    conf = createConfiguration();
+
+    fs = createSwiftFS();
+    try {
+      fs.initialize(uri, conf);
+    } catch (IOException e) {
+      //FS init failed, set it to null so that teardown doesn't
+      //attempt to use it
+      fs = null;
+      throw e;
+    }
+    //remember the last FS
+    lastFs = fs;
+    noteAction("setup complete");
+  }
+
+  /**
+   * Configuration generator. May be overridden to inject
+   * some custom options
+   * @return a configuration with which to create FS instances
+   */
+  protected Configuration createConfiguration() {
+    return new Configuration();
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    cleanupInTeardown(fs, "/test");
+  }
+
+  @AfterClass
+  public static void classTearDown() throws Exception {
+    if (lastFs  != null) {
+      List<DurationStats> statistics = lastFs.getOperationStatistics();
+      for (DurationStats stat : statistics) {
+        LOG.info(stat.toString());
+      }
+    }
+  }
+
+  /**
+   * Get the configuration used to set up the FS
+   * @return the configuration
+   */
+  public Configuration getConf() {
+    return conf;
+  }
+
+  /**
+   * Describe the test, combining some logging with details
+   * for people reading the code
+   *
+   * @param description test description
+   */
+  protected void describe(String description) {
+    noteAction(description);
+  }
+
+  protected URI getFilesystemURI() throws URISyntaxException, IOException {
+    return getServiceURI(createConfiguration());
+  }
+
+  protected SwiftNativeFileSystem createSwiftFS() throws IOException {
+    SwiftNativeFileSystem swiftNativeFileSystem =
+      new SwiftNativeFileSystem();
+    return swiftNativeFileSystem;
+  }
+
+  protected int getBlockSize() {
+    return 1024;
+  }
+
+  /**
+   * Is rename supported?
+   * @return true
+   */
+  protected boolean renameSupported() {
+    return true;
+  }
+
+  /**
+   * assume in a test that rename is supported;
+   * skip it if not
+   */
+  protected void assumeRenameSupported() {
+    Assume.assumeTrue(renameSupported());
+  }
+
+  /**
+   * Take an unqualified path, and qualify it w.r.t the
+   * current filesystem
+   * @param pathString source path
+   * @return a qualified path instance
+   */
+  protected Path path(String pathString) {
+    return new Path(pathString).makeQualified(fs);
+  }
+
+  /**
+   * Get the filesystem
+   * @return the current FS
+   */
+  public SwiftNativeFileSystem getFs() {
+    return fs;
+  }
+
+  /**
+   * Create a file using the standard {@link #data} bytes.
+   *
+   * @param path path to write
+   * @throws IOException on any problem
+   */
+  protected void createFile(Path path) throws IOException {
+    createFile(path, data);
+  }
+
+  /**
+   * Create a file with the given data.
+   *
+   * @param path       path to write
+   * @param sourceData source dataset
+   * @throws IOException on any problem
+   */
+  protected void createFile(Path path, byte[] sourceData) throws IOException {
+    FSDataOutputStream out = fs.create(path);
+    out.write(sourceData, 0, sourceData.length);
+    out.close();
+  }
+
+  /**
+   * Create and then close a file
+   * @param path path to create
+   * @throws IOException on a failure
+   */
+  protected void createEmptyFile(Path path) throws IOException {
+    FSDataOutputStream out = fs.create(path);
+    out.close();
+  }
+
+  /**
+   * Get the inner store -useful for lower level operations
+   *
+   * @return the store
+   */
+  protected SwiftNativeFileSystemStore getStore() {
+    return fs.getStore();
+  }
+
+  /**
+   * Rename a path
+   * @param src source
+   * @param dst dest
+   * @param renameMustSucceed flag to say "this rename must exist"
+   * @param srcExists add assert that the source exists afterwards
+   * @param dstExists add assert the dest exists afterwards
+   * @throws IOException IO trouble
+   */
+  protected void rename(Path src, Path dst, boolean renameMustSucceed,
+                        boolean srcExists, boolean dstExists) throws IOException {
+    if (renameMustSucceed) {
+      renameToSuccess(src, dst, srcExists, dstExists);
+    } else {
+      renameToFailure(src, dst);
+    }
+  }
+
+  /**
+   * Get a string describing the outcome of a rename, by listing the dest
+   * path and its parent along with some covering text
+   * @param src source patj
+   * @param dst dest path
+   * @return a string for logs and exceptions
+   * @throws IOException IO problems
+   */
+  private String getRenameOutcome(Path src, Path dst) throws IOException {
+    String lsDst = ls(dst);
+    Path parent = dst.getParent();
+    String lsParent = parent != null ? ls(parent) : "";
+    return "  result of " + src + " => " + dst
+            + " - " + lsDst
+            + " \n" + lsParent;
+  }
+
+  /**
+   * Rename, expecting an exception to be thrown
+   *
+   * @param src source
+   * @param dst dest
+   * @throws IOException a failure other than an
+   *                     expected SwiftRenameException or FileNotFoundException
+   */
+  protected void renameToFailure(Path src, Path dst) throws IOException {
+    try {
+      getStore().rename(src, dst);
+      fail("Expected failure renaming " + src + " to " + dst
+              + "- but got success");
+    } catch (SwiftOperationFailedException e) {
+      LOG.debug("Rename failed (expected):" + e);
+    } catch (FileNotFoundException e) {
+      LOG.debug("Rename failed (expected):" + e);
+    }
+  }
+
+  /**
+   * Rename to success
+   *
+   * @param src source
+   * @param dst dest
+   * @param srcExists add assert that the source exists afterwards
+   * @param dstExists add assert the dest exists afterwards
+   * @throws SwiftOperationFailedException operation failure
+   * @throws IOException IO problems
+   */
+  protected void renameToSuccess(Path src, Path dst,
+                                 boolean srcExists, boolean dstExists)
+      throws SwiftOperationFailedException, IOException {
+    getStore().rename(src, dst);
+    String outcome = getRenameOutcome(src, dst);
+    assertEquals("Source " + src + "exists: " + outcome,
+            srcExists, fs.exists(src));
+    assertEquals("Destination " + dstExists + " exists" + outcome,
+            dstExists, fs.exists(dst));
+  }
+
+  /**
+   * List a path in the test FS
+   * @param path path to list
+   * @return the contents of the path/dir
+   * @throws IOException IO problems
+   */
+  protected String ls(Path path) throws IOException {
+    return SwiftTestUtils.ls(fs, path);
+  }
+
+  /**
+   * assert that a path exists
+   * @param message message to use in an assertion
+   * @param path path to probe
+   * @throws IOException IO problems
+   */
+  public void assertExists(String message, Path path) throws IOException {
+    assertPathExists(fs, message, path);
+  }
+
+  /**
+   * assert that a path does not
+   * @param message message to use in an assertion
+   * @param path path to probe
+   * @throws IOException IO problems
+   */
+  public void assertPathDoesNotExist(String message, Path path) throws
+          IOException {
+    SwiftTestUtils.assertPathDoesNotExist(fs, message, path);
+  }
+
+  /**
+   * Assert that a file exists and whose {@link FileStatus} entry
+   * declares that this is a file and not a symlink or directory.
+   *
+   * @param filename name of the file
+   * @throws IOException IO problems during file operations
+   */
+  protected void assertIsFile(Path filename) throws IOException {
+    SwiftTestUtils.assertIsFile(fs, filename);
+  }
+
+  /**
+   * Assert that a file exists and whose {@link FileStatus} entry
+   * declares that this is a file and not a symlink or directory.
+   *
+   * @throws IOException IO problems during file operations
+   */
+  protected void mkdirs(Path path) throws IOException {
+    assertTrue("Failed to mkdir" + path, fs.mkdirs(path));
+  }
+
+  /**
+   * Assert that a delete succeeded
+   * @param path path to delete
+   * @param recursive recursive flag
+   * @throws IOException IO problems
+   */
+  protected void assertDeleted(Path path, boolean recursive) throws IOException {
+    SwiftTestUtils.assertDeleted(fs, path, recursive);
+  }
+
+  /**
+   * Assert that a value is not equal to the expected value
+   * @param message message if the two values are equal
+   * @param expected expected value
+   * @param actual actual value
+   */
+  protected void assertNotEqual(String message, int expected, int actual) {
+    assertTrue(message,
+               actual != expected);
+  }
+
+  /**
+   * Get the number of partitions written from the Swift Native FS APIs
+   * @param out output stream
+   * @return the number of partitioned files written by the stream
+   */
+  protected int getPartitionsWritten(FSDataOutputStream out) {
+    return SwiftNativeFileSystem.getPartitionsWritten(out);
+  }
+
+  /**
+   * Assert that the no. of partitions written matches expectations
+   * @param action operation (for use in the assertions)
+   * @param out output stream
+   * @param expected expected no. of partitions
+   */
+  protected void assertPartitionsWritten(String action, FSDataOutputStream out,
+                                         long expected) {
+    OutputStream nativeStream = out.getWrappedStream();
+    int written = getPartitionsWritten(out);
+    if(written !=expected) {
+    Assert.fail(action + ": " +
+                TestSwiftFileSystemPartitionedUploads.WRONG_PARTITION_COUNT 
+                + " + expected: " + expected + " actual: " + written
+                + " -- " + nativeStream);
+    }
+  }
+
+  /**
+   * Assert that the result value == -1; which implies
+   * that a read was successful
+   * @param text text to include in a message (usually the operation)
+   * @param result read result to validate
+   */
+  protected void assertMinusOne(String text, int result) {
+    assertEquals(text + " wrong read result " + result, -1, result);
+  }
+}

Added: hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/SwiftTestConstants.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/SwiftTestConstants.java?rev=1526848&view=auto
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/SwiftTestConstants.java (added)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/SwiftTestConstants.java Fri Sep 27 11:12:42 2013
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.swift;
+
+/**
+ * Hard coded constants for the test timeouts
+ */
+public interface SwiftTestConstants {
+  /**
+   * Timeout for swift tests: {@value}
+   */
+  int SWIFT_TEST_TIMEOUT = 5 * 60 * 1000;
+
+  /**
+   * Timeout for tests performing bulk operations: {@value}
+   */
+  int SWIFT_BULK_IO_TEST_TIMEOUT = 12 * 60 * 1000;
+}

Added: hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestFSMainOperationsSwift.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestFSMainOperationsSwift.java?rev=1526848&view=auto
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestFSMainOperationsSwift.java (added)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestFSMainOperationsSwift.java Fri Sep 27 11:12:42 2013
@@ -0,0 +1,366 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.fs.swift;
+
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSMainOperationsBaseTest;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.swift.http.SwiftProtocolConstants;
+import org.apache.hadoop.fs.swift.util.SwiftTestUtils;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import static org.apache.hadoop.fs.swift.SwiftTestConstants.SWIFT_TEST_TIMEOUT;
+import java.io.IOException;
+import java.net.URI;
+
+public class TestFSMainOperationsSwift extends FSMainOperationsBaseTest {
+
+  @Override
+  @Before
+  public void setUp() throws Exception {
+    Configuration conf = new Configuration();
+    //small blocksize for faster remote tests
+    conf.setInt(SwiftProtocolConstants.SWIFT_BLOCKSIZE, 2);
+    URI serviceURI = SwiftTestUtils.getServiceURI(conf);
+    fSys = FileSystem.get(serviceURI, conf);
+    super.setUp();
+  }
+
+  private Path wd = null;
+
+  @Override
+  protected FileSystem createFileSystem() throws Exception {
+    return fSys;
+  }
+
+  @Override
+  protected Path getDefaultWorkingDirectory() throws IOException {
+    if (wd == null) {
+      wd = fSys.getWorkingDirectory();
+    }
+    return wd;
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testWDAbsolute() throws IOException {
+    Path absoluteDir = getTestRootPath(fSys, "test/existingDir");
+    fSys.mkdirs(absoluteDir);
+    fSys.setWorkingDirectory(absoluteDir);
+    Assert.assertEquals(absoluteDir, fSys.getWorkingDirectory());
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testListStatusThrowsExceptionForUnreadableDir() {
+    SwiftTestUtils.skip("unsupported");
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testFsStatus() throws Exception {
+    super.testFsStatus();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testWorkingDirectory() throws Exception {
+    super.testWorkingDirectory();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testMkdirs() throws Exception {
+    super.testMkdirs();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testMkdirsFailsForSubdirectoryOfExistingFile() throws Exception {
+    super.testMkdirsFailsForSubdirectoryOfExistingFile();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testGetFileStatusThrowsExceptionForNonExistentFile() throws
+                                                                   Exception {
+    super.testGetFileStatusThrowsExceptionForNonExistentFile();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testListStatusThrowsExceptionForNonExistentFile() throws
+                                                                Exception {
+    super.testListStatusThrowsExceptionForNonExistentFile();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testListStatus() throws Exception {
+    super.testListStatus();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testListStatusFilterWithNoMatches() throws Exception {
+    super.testListStatusFilterWithNoMatches();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testListStatusFilterWithSomeMatches() throws Exception {
+    super.testListStatusFilterWithSomeMatches();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testGlobStatusNonExistentFile() throws Exception {
+    super.testGlobStatusNonExistentFile();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testGlobStatusWithNoMatchesInPath() throws Exception {
+    super.testGlobStatusWithNoMatchesInPath();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testGlobStatusSomeMatchesInDirectories() throws Exception {
+    super.testGlobStatusSomeMatchesInDirectories();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testGlobStatusWithMultipleWildCardMatches() throws Exception {
+    super.testGlobStatusWithMultipleWildCardMatches();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testGlobStatusWithMultipleMatchesOfSingleChar() throws Exception {
+    super.testGlobStatusWithMultipleMatchesOfSingleChar();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testGlobStatusFilterWithEmptyPathResults() throws Exception {
+    super.testGlobStatusFilterWithEmptyPathResults();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testGlobStatusFilterWithSomePathMatchesAndTrivialFilter() throws
+                                                                        Exception {
+    super.testGlobStatusFilterWithSomePathMatchesAndTrivialFilter();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testGlobStatusFilterWithMultipleWildCardMatchesAndTrivialFilter() throws
+                                                                                Exception {
+    super.testGlobStatusFilterWithMultipleWildCardMatchesAndTrivialFilter();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testGlobStatusFilterWithMultiplePathMatchesAndNonTrivialFilter() throws
+                                                                               Exception {
+    super.testGlobStatusFilterWithMultiplePathMatchesAndNonTrivialFilter();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testGlobStatusFilterWithNoMatchingPathsAndNonTrivialFilter() throws
+                                                                           Exception {
+    super.testGlobStatusFilterWithNoMatchingPathsAndNonTrivialFilter();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testGlobStatusFilterWithMultiplePathWildcardsAndNonTrivialFilter() throws
+                                                                                 Exception {
+    super.testGlobStatusFilterWithMultiplePathWildcardsAndNonTrivialFilter();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testWriteReadAndDeleteEmptyFile() throws Exception {
+    super.testWriteReadAndDeleteEmptyFile();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testWriteReadAndDeleteHalfABlock() throws Exception {
+    super.testWriteReadAndDeleteHalfABlock();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testWriteReadAndDeleteOneBlock() throws Exception {
+    super.testWriteReadAndDeleteOneBlock();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testWriteReadAndDeleteOneAndAHalfBlocks() throws Exception {
+    super.testWriteReadAndDeleteOneAndAHalfBlocks();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testWriteReadAndDeleteTwoBlocks() throws Exception {
+    super.testWriteReadAndDeleteTwoBlocks();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testOverwrite() throws IOException {
+    super.testOverwrite();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testWriteInNonExistentDirectory() throws IOException {
+    super.testWriteInNonExistentDirectory();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testDeleteNonExistentFile() throws IOException {
+    super.testDeleteNonExistentFile();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testDeleteRecursively() throws IOException {
+    super.testDeleteRecursively();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testDeleteEmptyDirectory() throws IOException {
+    super.testDeleteEmptyDirectory();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testRenameNonExistentPath() throws Exception {
+    super.testRenameNonExistentPath();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testRenameFileToNonExistentDirectory() throws Exception {
+    super.testRenameFileToNonExistentDirectory();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testRenameFileToDestinationWithParentFile() throws Exception {
+    super.testRenameFileToDestinationWithParentFile();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testRenameFileToExistingParent() throws Exception {
+    super.testRenameFileToExistingParent();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testRenameFileToItself() throws Exception {
+    super.testRenameFileToItself();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testRenameFileAsExistingFile() throws Exception {
+    super.testRenameFileAsExistingFile();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testRenameFileAsExistingDirectory() throws Exception {
+    super.testRenameFileAsExistingDirectory();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testRenameDirectoryToItself() throws Exception {
+    super.testRenameDirectoryToItself();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testRenameDirectoryToNonExistentParent() throws Exception {
+    super.testRenameDirectoryToNonExistentParent();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testRenameDirectoryAsNonExistentDirectory() throws Exception {
+    super.testRenameDirectoryAsNonExistentDirectory();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testRenameDirectoryAsEmptyDirectory() throws Exception {
+    super.testRenameDirectoryAsEmptyDirectory();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testRenameDirectoryAsNonEmptyDirectory() throws Exception {
+    super.testRenameDirectoryAsNonEmptyDirectory();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testRenameDirectoryAsFile() throws Exception {
+    super.testRenameDirectoryAsFile();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testInputStreamClosedTwice() throws IOException {
+    super.testInputStreamClosedTwice();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testOutputStreamClosedTwice() throws IOException {
+    super.testOutputStreamClosedTwice();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testGetWrappedInputStream() throws IOException {
+    super.testGetWrappedInputStream();
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  @Override
+  public void testCopyToLocalWithUseRawLocalFileSystemOption() throws
+                                                               Exception {
+    super.testCopyToLocalWithUseRawLocalFileSystemOption();
+  }
+}

Added: hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestLogResources.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestLogResources.java?rev=1526848&view=auto
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestLogResources.java (added)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestLogResources.java Fri Sep 27 11:12:42 2013
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.swift;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.junit.Test;
+
+import java.net.URL;
+
+/**
+ * This test just debugs which log resources are being picked up
+ */
+public class TestLogResources implements SwiftTestConstants {
+  protected static final Log LOG =
+    LogFactory.getLog(TestLogResources.class);
+
+  private void printf(String format, Object... args) {
+    String msg = String.format(format, args);
+    System.out.printf(msg + "\n");
+    LOG.info(msg);
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testWhichLog4JPropsFile() throws Throwable {
+    locateResource("log4j.properties");
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testWhichLog4JXMLFile() throws Throwable {
+    locateResource("log4j.XML");
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testCommonsLoggingProps() throws Throwable {
+    locateResource("commons-logging.properties");
+  }
+
+  private void locateResource(String resource) {
+    URL url = this.getClass().getClassLoader().getResource(resource);
+    if (url != null) {
+      printf("resource %s is at %s", resource, url);
+    } else {
+      printf("resource %s is not on the classpath", resource);
+    }
+  }
+}

Added: hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestReadPastBuffer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestReadPastBuffer.java?rev=1526848&view=auto
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestReadPastBuffer.java (added)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestReadPastBuffer.java Fri Sep 27 11:12:42 2013
@@ -0,0 +1,163 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.swift;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.swift.http.SwiftProtocolConstants;
+import org.apache.hadoop.fs.swift.util.SwiftTestUtils;
+import org.apache.hadoop.io.IOUtils;
+import org.junit.After;
+import org.junit.Test;
+
+/**
+ * Seek tests verify that
+ * <ol>
+ *   <li>When you seek on a 0 byte file to byte (0), it's not an error.</li>
+ *   <li>When you seek past the end of a file, it's an error that should
+ *   raise -what- EOFException?</li>
+ *   <li>when you seek forwards, you get new data</li>
+ *   <li>when you seek backwards, you get the previous data</li>
+ *   <li>That this works for big multi-MB files as well as small ones.</li>
+ * </ol>
+ * These may seem "obvious", but the more the input streams try to be clever
+ * about offsets and buffering, the more likely it is that seek() will start
+ * to get confused.
+ */
+public class TestReadPastBuffer extends SwiftFileSystemBaseTest {
+  protected static final Log LOG =
+    LogFactory.getLog(TestReadPastBuffer.class);
+  public static final int SWIFT_READ_BLOCKSIZE = 4096;
+  public static final int SEEK_FILE_LEN = SWIFT_READ_BLOCKSIZE * 2;
+
+  private Path testPath;
+  private Path readFile;
+  private Path zeroByteFile;
+  private FSDataInputStream instream;
+
+
+  /**
+   * Get a configuration which a small blocksize reported to callers
+   * @return a configuration for this test
+   */
+  @Override
+  public Configuration getConf() {
+    Configuration conf = super.getConf();
+    /*
+     * set to 4KB
+     */
+    conf.setInt(SwiftProtocolConstants.SWIFT_BLOCKSIZE, SWIFT_READ_BLOCKSIZE);
+    return conf;
+  }
+
+  /**
+   * Setup creates dirs under test/hadoop
+   *
+   * @throws Exception
+   */
+  @Override
+  public void setUp() throws Exception {
+    super.setUp();
+    byte[] block = SwiftTestUtils.dataset(SEEK_FILE_LEN, 0, 255);
+
+    //delete the test directory
+    testPath = path("/test");
+    readFile = new Path(testPath, "TestReadPastBuffer.txt");
+    createFile(readFile, block);
+  }
+
+  @After
+  public void cleanFile() {
+    IOUtils.closeStream(instream);
+    instream = null;
+  }
+
+  /**
+   * Create a config with a 1KB request size
+   * @return a config
+   */
+  @Override
+  protected Configuration createConfiguration() {
+    Configuration conf = super.createConfiguration();
+    conf.set(SwiftProtocolConstants.SWIFT_REQUEST_SIZE, "1");
+    return conf;
+  }
+
+  /**
+   * Seek past the buffer then read
+   * @throws Throwable problems
+   */
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testSeekAndReadPastEndOfFile() throws Throwable {
+    instream = fs.open(readFile);
+    assertEquals(0, instream.getPos());
+    //expect that seek to 0 works
+    //go just before the end
+    instream.seek(SEEK_FILE_LEN - 2);
+    assertTrue("Premature EOF", instream.read() != -1);
+    assertTrue("Premature EOF", instream.read() != -1);
+    assertMinusOne("read past end of file", instream.read());
+  }
+
+  /**
+   * Seek past the buffer and attempt a read(buffer)
+   * @throws Throwable failures
+   */
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testSeekBulkReadPastEndOfFile() throws Throwable {
+    instream = fs.open(readFile);
+    assertEquals(0, instream.getPos());
+    //go just before the end
+    instream.seek(SEEK_FILE_LEN - 1);
+    byte[] buffer = new byte[1];
+    int result = instream.read(buffer, 0, 1);
+    //next byte is expected to fail
+    result = instream.read(buffer, 0, 1);
+    assertMinusOne("read past end of file", result);
+    //and this one
+    result = instream.read(buffer, 0, 1);
+    assertMinusOne("read past end of file", result);
+
+    //now do an 0-byte read and expect it to
+    //to be checked first
+    result = instream.read(buffer, 0, 0);
+    assertEquals("EOF checks coming before read range check", 0, result);
+
+  }
+
+
+
+  /**
+   * Read past the buffer size byte by byte and verify that it refreshed
+   * @throws Throwable
+   */
+  @Test
+  public void testReadPastBufferSize() throws Throwable {
+    instream = fs.open(readFile);
+
+    while (instream.read() != -1);
+    //here we have gone past the end of a file and its buffer. Now try again
+    assertMinusOne("reading after the (large) file was read: "+ instream,
+                   instream.read());
+  }
+}
+

Added: hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSeek.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSeek.java?rev=1526848&view=auto
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSeek.java (added)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSeek.java Fri Sep 27 11:12:42 2013
@@ -0,0 +1,260 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.swift;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.swift.exceptions.SwiftConnectionClosedException;
+import org.apache.hadoop.fs.swift.http.SwiftProtocolConstants;
+import org.apache.hadoop.fs.swift.util.SwiftTestUtils;
+import org.apache.hadoop.io.IOUtils;
+import org.junit.After;
+import org.junit.Test;
+
+import java.io.EOFException;
+import java.io.IOException;
+
+/**
+ * Seek tests verify that
+ * <ol>
+ *   <li>When you seek on a 0 byte file to byte (0), it's not an error.</li>
+ *   <li>When you seek past the end of a file, it's an error that should
+ *   raise -what- EOFException?</li>
+ *   <li>when you seek forwards, you get new data</li>
+ *   <li>when you seek backwards, you get the previous data</li>
+ *   <li>That this works for big multi-MB files as well as small ones.</li>
+ * </ol>
+ * These may seem "obvious", but the more the input streams try to be clever
+ * about offsets and buffering, the more likely it is that seek() will start
+ * to get confused.
+ */
+public class TestSeek extends SwiftFileSystemBaseTest {
+  protected static final Log LOG =
+    LogFactory.getLog(TestSeek.class);
+  public static final int SMALL_SEEK_FILE_LEN = 256;
+
+  private Path testPath;
+  private Path smallSeekFile;
+  private Path zeroByteFile;
+  private FSDataInputStream instream;
+
+  /**
+   * Setup creates dirs under test/hadoop
+   *
+   * @throws Exception
+   */
+  @Override
+  public void setUp() throws Exception {
+    super.setUp();
+    //delete the test directory
+    testPath = path("/test");
+    smallSeekFile = new Path(testPath, "seekfile.txt");
+    zeroByteFile = new Path(testPath, "zero.txt");
+    byte[] block = SwiftTestUtils.dataset(SMALL_SEEK_FILE_LEN, 0, 255);
+    //this file now has a simple rule: offset => value
+    createFile(smallSeekFile, block);
+    createEmptyFile(zeroByteFile);
+  }
+
+  @After
+  public void cleanFile() {
+    IOUtils.closeStream(instream);
+    instream = null;
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testSeekZeroByteFile() throws Throwable {
+    instream = fs.open(zeroByteFile);
+    assertEquals(0, instream.getPos());
+    //expect initial read to fai;
+    int result = instream.read();
+    assertMinusOne("initial byte read", result);
+    byte[] buffer = new byte[1];
+    //expect that seek to 0 works
+    instream.seek(0);
+    //reread, expect same exception
+    result = instream.read();
+    assertMinusOne("post-seek byte read", result);
+    result = instream.read(buffer, 0, 1);
+    assertMinusOne("post-seek buffer read", result);
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testBlockReadZeroByteFile() throws Throwable {
+    instream = fs.open(zeroByteFile);
+    assertEquals(0, instream.getPos());
+    //expect that seek to 0 works
+    byte[] buffer = new byte[1];
+    int result = instream.read(buffer, 0, 1);
+    assertMinusOne("block read zero byte file", result);
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testSeekReadClosedFile() throws Throwable {
+    instream = fs.open(smallSeekFile);
+    instream.close();
+    try {
+      instream.seek(0);
+    } catch (SwiftConnectionClosedException e) {
+      //expected a closed file
+    }
+    try {
+      instream.read();
+    } catch (IOException e) {
+      //expected a closed file
+    }
+    try {
+      byte[] buffer = new byte[1];
+      int result = instream.read(buffer, 0, 1);
+    } catch (IOException e) {
+      //expected a closed file
+    }
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testNegativeSeek() throws Throwable {
+    instream = fs.open(smallSeekFile);
+    assertEquals(0, instream.getPos());
+    try {
+      instream.seek(-1);
+      long p = instream.getPos();
+      LOG.warn("Seek to -1 returned a position of " + p);
+      int result = instream.read();
+      fail(
+        "expected an exception, got data " + result + " at a position of " + p);
+    } catch (IOException e) {
+      //bad seek -expected
+    }
+    assertEquals(0, instream.getPos());
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testSeekFile() throws Throwable {
+    instream = fs.open(smallSeekFile);
+    assertEquals(0, instream.getPos());
+    //expect that seek to 0 works
+    instream.seek(0);
+    int result = instream.read();
+    assertEquals(0, result);
+    assertEquals(1, instream.read());
+    assertEquals(2, instream.getPos());
+    assertEquals(2, instream.read());
+    assertEquals(3, instream.getPos());
+    instream.seek(128);
+    assertEquals(128, instream.getPos());
+    assertEquals(128, instream.read());
+    instream.seek(63);
+    assertEquals(63, instream.read());
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testSeekAndReadPastEndOfFile() throws Throwable {
+    instream = fs.open(smallSeekFile);
+    assertEquals(0, instream.getPos());
+    //expect that seek to 0 works
+    //go just before the end
+    instream.seek(SMALL_SEEK_FILE_LEN - 2);
+    assertTrue("Premature EOF", instream.read() != -1);
+    assertTrue("Premature EOF", instream.read() != -1);
+    assertMinusOne("read past end of file", instream.read());
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testSeekAndPastEndOfFileThenReseekAndRead() throws Throwable {
+    instream = fs.open(smallSeekFile);
+    //go just before the end. This may or may not fail; it may be delayed until the
+    //read
+    try {
+      instream.seek(SMALL_SEEK_FILE_LEN);
+      //if this doesn't trigger, then read() is expected to fail
+      assertMinusOne("read after seeking past EOF", instream.read());
+    } catch (EOFException expected) {
+      //here an exception was raised in seek
+    }
+    instream.seek(1);
+    assertTrue("Premature EOF", instream.read() != -1);
+  }
+
+  @Override
+  protected Configuration createConfiguration() {
+    Configuration conf = super.createConfiguration();
+    conf.set(SwiftProtocolConstants.SWIFT_REQUEST_SIZE, "1");
+    return conf;
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testSeekBigFile() throws Throwable {
+    Path testSeekFile = new Path(testPath, "bigseekfile.txt");
+    byte[] block = SwiftTestUtils.dataset(65536, 0, 255);
+    createFile(testSeekFile, block);
+    instream = fs.open(testSeekFile);
+    assertEquals(0, instream.getPos());
+    //expect that seek to 0 works
+    instream.seek(0);
+    int result = instream.read();
+    assertEquals(0, result);
+    assertEquals(1, instream.read());
+    assertEquals(2, instream.read());
+
+    //do seek 32KB ahead
+    instream.seek(32768);
+    assertEquals("@32768", block[32768], (byte) instream.read());
+    instream.seek(40000);
+    assertEquals("@40000", block[40000], (byte) instream.read());
+    instream.seek(8191);
+    assertEquals("@8191", block[8191], (byte) instream.read());
+    instream.seek(0);
+    assertEquals("@0", 0, (byte) instream.read());
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testPositionedBulkReadDoesntChangePosition() throws Throwable {
+    Path testSeekFile = new Path(testPath, "bigseekfile.txt");
+    byte[] block = SwiftTestUtils.dataset(65536, 0, 255);
+    createFile(testSeekFile, block);
+    instream = fs.open(testSeekFile);
+    instream.seek(39999);
+    assertTrue(-1 != instream.read());
+    assertEquals (40000, instream.getPos());
+
+    byte[] readBuffer = new byte[256];
+    instream.read(128, readBuffer, 0, readBuffer.length);
+    //have gone back
+    assertEquals(40000, instream.getPos());
+    //content is the same too
+    assertEquals("@40000", block[40000], (byte) instream.read());
+    //now verify the picked up data
+    for (int i = 0; i < 256; i++) {
+      assertEquals("@" + i, block[i + 128], readBuffer[i]);
+    }
+  }
+
+  /**
+   * work out the expected byte from a specific offset
+   * @param offset offset in the file
+   * @return the value
+   */
+  int expectedByte(int offset) {
+    return offset & 0xff;
+  }
+}
+

Added: hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftConfig.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftConfig.java?rev=1526848&view=auto
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftConfig.java (added)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftConfig.java Fri Sep 27 11:12:42 2013
@@ -0,0 +1,194 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.swift;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.swift.http.SwiftRestClient;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import static org.apache.hadoop.fs.swift.http.SwiftProtocolConstants.DOT_AUTH_URL;
+import static org.apache.hadoop.fs.swift.http.SwiftProtocolConstants.DOT_LOCATION_AWARE;
+import static org.apache.hadoop.fs.swift.http.SwiftProtocolConstants.DOT_PASSWORD;
+import static org.apache.hadoop.fs.swift.http.SwiftProtocolConstants.DOT_TENANT;
+import static org.apache.hadoop.fs.swift.http.SwiftProtocolConstants.DOT_USERNAME;
+import static org.apache.hadoop.fs.swift.http.SwiftProtocolConstants.SWIFT_BLOCKSIZE;
+import static org.apache.hadoop.fs.swift.http.SwiftProtocolConstants.SWIFT_CONNECTION_TIMEOUT;
+import static org.apache.hadoop.fs.swift.http.SwiftProtocolConstants.SWIFT_PARTITION_SIZE;
+import static org.apache.hadoop.fs.swift.http.SwiftProtocolConstants.SWIFT_PROXY_HOST_PROPERTY;
+import static org.apache.hadoop.fs.swift.http.SwiftProtocolConstants.SWIFT_PROXY_PORT_PROPERTY;
+import static org.apache.hadoop.fs.swift.http.SwiftProtocolConstants.SWIFT_RETRY_COUNT;
+import static org.apache.hadoop.fs.swift.http.SwiftProtocolConstants.SWIFT_SERVICE_PREFIX;
+
+/**
+ * Test the swift service-specific configuration binding features
+ */
+public class TestSwiftConfig extends Assert {
+
+
+  public static final String SERVICE = "openstack";
+
+  @Test(expected = org.apache.hadoop.fs.swift.exceptions.SwiftConfigurationException.class)
+  public void testEmptyUrl() throws Exception {
+    final Configuration configuration = new Configuration();
+
+    set(configuration, DOT_TENANT, "tenant");
+    set(configuration, DOT_USERNAME, "username");
+    set(configuration, DOT_PASSWORD, "password");
+    mkInstance(configuration);
+  }
+
+@Test
+  public void testEmptyTenant() throws Exception {
+    final Configuration configuration = new Configuration();
+    set(configuration, DOT_AUTH_URL, "http://localhost:8080");
+    set(configuration, DOT_USERNAME, "username");
+    set(configuration, DOT_PASSWORD, "password");
+    mkInstance(configuration);
+  }
+
+  @Test(expected = org.apache.hadoop.fs.swift.exceptions.SwiftConfigurationException.class)
+  public void testEmptyUsername() throws Exception {
+    final Configuration configuration = new Configuration();
+    set(configuration, DOT_AUTH_URL, "http://localhost:8080");
+    set(configuration, DOT_TENANT, "tenant");
+    set(configuration, DOT_PASSWORD, "password");
+    mkInstance(configuration);
+  }
+
+  @Test(expected = org.apache.hadoop.fs.swift.exceptions.SwiftConfigurationException.class)
+  public void testEmptyPassword() throws Exception {
+    final Configuration configuration = new Configuration();
+    set(configuration, DOT_AUTH_URL, "http://localhost:8080");
+    set(configuration, DOT_TENANT, "tenant");
+    set(configuration, DOT_USERNAME, "username");
+    mkInstance(configuration);
+  }
+
+  @Test
+  public void testGoodRetryCount() throws Exception {
+    final Configuration configuration = createCoreConfig();
+    configuration.set(SWIFT_RETRY_COUNT, "3");
+    mkInstance(configuration);
+  }
+
+  @Test(expected = org.apache.hadoop.fs.swift.exceptions.SwiftConfigurationException.class)
+  public void testBadRetryCount() throws Exception {
+    final Configuration configuration = createCoreConfig();
+    configuration.set(SWIFT_RETRY_COUNT, "three");
+    mkInstance(configuration);
+  }
+
+  @Test(expected = org.apache.hadoop.fs.swift.exceptions.SwiftConfigurationException.class)
+  public void testBadConnectTimeout() throws Exception {
+    final Configuration configuration = createCoreConfig();
+    configuration.set(SWIFT_CONNECTION_TIMEOUT, "three");
+    mkInstance(configuration);
+  }
+
+  @Test(expected = org.apache.hadoop.fs.swift.exceptions.SwiftConfigurationException.class)
+  public void testZeroBlocksize() throws Exception {
+    final Configuration configuration = createCoreConfig();
+    configuration.set(SWIFT_BLOCKSIZE, "0");
+    mkInstance(configuration);
+  }
+
+  @Test(expected = org.apache.hadoop.fs.swift.exceptions.SwiftConfigurationException.class)
+  public void testNegativeBlocksize() throws Exception {
+    final Configuration configuration = createCoreConfig();
+    configuration.set(SWIFT_BLOCKSIZE, "-1");
+    mkInstance(configuration);
+  }
+
+  @Test
+  public void testPositiveBlocksize() throws Exception {
+    final Configuration configuration = createCoreConfig();
+    int size = 127;
+    configuration.set(SWIFT_BLOCKSIZE, Integer.toString(size));
+    SwiftRestClient restClient = mkInstance(configuration);
+    assertEquals(size, restClient.getBlocksizeKB());
+  }
+
+  @Test
+  public void testLocationAwareTruePropagates() throws Exception {
+    final Configuration configuration = createCoreConfig();
+    set(configuration, DOT_LOCATION_AWARE, "true");
+    SwiftRestClient restClient = mkInstance(configuration);
+    assertTrue(restClient.isLocationAware());
+  }
+
+  @Test
+  public void testLocationAwareFalsePropagates() throws Exception {
+    final Configuration configuration = createCoreConfig();
+    set(configuration, DOT_LOCATION_AWARE, "false");
+    SwiftRestClient restClient = mkInstance(configuration);
+    assertFalse(restClient.isLocationAware());
+  }
+
+  @Test(expected = org.apache.hadoop.fs.swift.exceptions.SwiftConfigurationException.class)
+  public void testNegativePartsize() throws Exception {
+    final Configuration configuration = createCoreConfig();
+    configuration.set(SWIFT_PARTITION_SIZE, "-1");
+    SwiftRestClient restClient = mkInstance(configuration);
+  }
+
+  @Test
+  public void testPositivePartsize() throws Exception {
+    final Configuration configuration = createCoreConfig();
+    int size = 127;
+    configuration.set(SWIFT_PARTITION_SIZE, Integer.toString(size));
+    SwiftRestClient restClient = mkInstance(configuration);
+    assertEquals(size, restClient.getPartSizeKB());
+  }
+
+  @Test
+  public void testProxyData() throws Exception {
+    final Configuration configuration = createCoreConfig();
+    String proxy="web-proxy";
+    int port = 8088;
+    configuration.set(SWIFT_PROXY_HOST_PROPERTY, proxy);
+    configuration.set(SWIFT_PROXY_PORT_PROPERTY, Integer.toString(port));
+    SwiftRestClient restClient = mkInstance(configuration);
+    assertEquals(proxy, restClient.getProxyHost());
+    assertEquals(port, restClient.getProxyPort());
+  }
+
+  private Configuration createCoreConfig() {
+    final Configuration configuration = new Configuration();
+    set(configuration, DOT_AUTH_URL, "http://localhost:8080");
+    set(configuration, DOT_TENANT, "tenant");
+    set(configuration, DOT_USERNAME, "username");
+    set(configuration, DOT_PASSWORD, "password");
+    return configuration;
+  }
+
+  private void set(Configuration configuration, String field, String value) {
+    configuration.set(SWIFT_SERVICE_PREFIX + SERVICE + field, value);
+  }
+
+  private SwiftRestClient mkInstance(Configuration configuration) throws
+          IOException,
+          URISyntaxException {
+    URI uri = new URI("swift://container.openstack/");
+    return SwiftRestClient.getInstance(uri, configuration);
+  }
+}

Added: hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemBasicOps.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemBasicOps.java?rev=1526848&view=auto
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemBasicOps.java (added)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemBasicOps.java Fri Sep 27 11:12:42 2013
@@ -0,0 +1,289 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.swift;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.swift.exceptions.SwiftBadRequestException;
+import org.apache.hadoop.fs.swift.exceptions.SwiftNotDirectoryException;
+import org.apache.hadoop.fs.swift.snative.SwiftNativeFileSystem;
+import org.apache.hadoop.fs.swift.util.SwiftTestUtils;
+import org.junit.Test;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+
+import static org.apache.hadoop.fs.swift.util.SwiftTestUtils.assertFileHasLength;
+import static org.apache.hadoop.fs.swift.util.SwiftTestUtils.assertIsDirectory;
+import static org.apache.hadoop.fs.swift.util.SwiftTestUtils.readBytesToString;
+import static org.apache.hadoop.fs.swift.util.SwiftTestUtils.writeTextFile;
+
+
+/**
+ * Test basic filesystem operations.
+ * Many of these are similar to those in {@link TestSwiftFileSystemContract}
+ * -this is a JUnit4 test suite used to initially test the Swift
+ * component. Once written, there's no reason not to retain these tests.
+ */
+public class TestSwiftFileSystemBasicOps extends SwiftFileSystemBaseTest {
+
+  private static final Log LOG =
+          LogFactory.getLog(TestSwiftFileSystemBasicOps.class);
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testLsRoot() throws Throwable {
+    Path path = new Path("/");
+    FileStatus[] statuses = fs.listStatus(path);
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testMkDir() throws Throwable {
+    Path path = new Path("/test/MkDir");
+    fs.mkdirs(path);
+    //success then -so try a recursive operation
+    fs.delete(path, true);
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testDeleteNonexistentFile() throws Throwable {
+    Path path = new Path("/test/DeleteNonexistentFile");
+    assertFalse("delete returned true", fs.delete(path, false));
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testPutFile() throws Throwable {
+    Path path = new Path("/test/PutFile");
+    Exception caught = null;
+    writeTextFile(fs, path, "Testing a put to a file", false);
+    assertDeleted(path, false);
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testPutGetFile() throws Throwable {
+    Path path = new Path("/test/PutGetFile");
+    try {
+      String text = "Testing a put and get to a file "
+              + System.currentTimeMillis();
+      writeTextFile(fs, path, text, false);
+
+      String result = readBytesToString(fs, path, text.length());
+      assertEquals(text, result);
+    } finally {
+      delete(fs, path);
+    }
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testPutDeleteFileInSubdir() throws Throwable {
+    Path path =
+            new Path("/test/PutDeleteFileInSubdir/testPutDeleteFileInSubdir");
+    String text = "Testing a put and get to a file in a subdir "
+            + System.currentTimeMillis();
+    writeTextFile(fs, path, text, false);
+    assertDeleted(path, false);
+    //now delete the parent that should have no children
+    assertDeleted(new Path("/test/PutDeleteFileInSubdir"), false);
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testRecursiveDelete() throws Throwable {
+    Path childpath =
+            new Path("/test/testRecursiveDelete");
+    String text = "Testing a put and get to a file in a subdir "
+            + System.currentTimeMillis();
+    writeTextFile(fs, childpath, text, false);
+    //now delete the parent that should have no children
+    assertDeleted(new Path("/test"), true);
+    assertFalse("child entry still present " + childpath, fs.exists(childpath));
+  }
+
+  private void delete(SwiftNativeFileSystem fs, Path path) {
+    try {
+      if (!fs.delete(path, false)) {
+        LOG.warn("Failed to delete " + path);
+      }
+    } catch (IOException e) {
+      LOG.warn("deleting " + path, e);
+    }
+  }
+
+  private void deleteR(SwiftNativeFileSystem fs, Path path) {
+    try {
+      if (!fs.delete(path, true)) {
+        LOG.warn("Failed to delete " + path);
+      }
+    } catch (IOException e) {
+      LOG.warn("deleting " + path, e);
+    }
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testOverwrite() throws Throwable {
+    Path path = new Path("/test/Overwrite");
+    try {
+      String text = "Testing a put to a file "
+              + System.currentTimeMillis();
+      writeTextFile(fs, path, text, false);
+      assertFileHasLength(fs, path, text.length());
+      String text2 = "Overwriting a file "
+              + System.currentTimeMillis();
+      writeTextFile(fs, path, text2, true);
+      assertFileHasLength(fs, path, text2.length());
+      String result = readBytesToString(fs, path, text2.length());
+      assertEquals(text2, result);
+    } finally {
+      delete(fs, path);
+    }
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testOverwriteDirectory() throws Throwable {
+    Path path = new Path("/test/testOverwriteDirectory");
+    try {
+      fs.mkdirs(path.getParent());
+      String text = "Testing a put to a file "
+              + System.currentTimeMillis();
+      writeTextFile(fs, path, text, false);
+      assertFileHasLength(fs, path, text.length());
+    } finally {
+      delete(fs, path);
+    }
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testFileStatus() throws Throwable {
+    Path path = new Path("/test/FileStatus");
+    try {
+      String text = "Testing File Status "
+              + System.currentTimeMillis();
+      writeTextFile(fs, path, text, false);
+      SwiftTestUtils.assertIsFile(fs, path);
+    } finally {
+      delete(fs, path);
+    }
+  }
+
+  /**
+   * Assert that a newly created directory is a directory
+   *
+   * @throws Throwable if not, or if something else failed
+   */
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testDirStatus() throws Throwable {
+    Path path = new Path("/test/DirStatus");
+    try {
+      fs.mkdirs(path);
+      assertIsDirectory(fs, path);
+    } finally {
+      delete(fs, path);
+    }
+  }
+
+  /**
+   * Assert that if a directory that has children is deleted, it is still
+   * a directory
+   *
+   * @throws Throwable if not, or if something else failed
+   */
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testDirStaysADir() throws Throwable {
+    Path path = new Path("/test/dirStaysADir");
+    Path child = new Path(path, "child");
+    try {
+      //create the dir
+      fs.mkdirs(path);
+      //assert the parent has the directory nature
+      assertIsDirectory(fs, path);
+      //create the child dir
+      writeTextFile(fs, child, "child file", true);
+      //assert the parent has the directory nature
+      assertIsDirectory(fs, path);
+      //now rm the child
+      delete(fs, child);
+    } finally {
+      deleteR(fs, path);
+    }
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testCreateMultilevelDir() throws Throwable {
+    Path base = new Path("/test/CreateMultilevelDir");
+    Path path = new Path(base, "1/2/3");
+    fs.mkdirs(path);
+    assertExists("deep multilevel dir not created", path);
+    fs.delete(base, true);
+    assertPathDoesNotExist("Multilevel delete failed", path);
+    assertPathDoesNotExist("Multilevel delete failed", base);
+
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testCreateDirWithFileParent() throws Throwable {
+    Path path = new Path("/test/CreateDirWithFileParent");
+    Path child = new Path(path, "subdir/child");
+    fs.mkdirs(path.getParent());
+    try {
+      //create the child dir
+      writeTextFile(fs, path, "parent", true);
+      try {
+        fs.mkdirs(child);
+      } catch (SwiftNotDirectoryException expected) {
+        LOG.debug("Expected Exception", expected);
+      }
+    } finally {
+      fs.delete(path, true);
+    }
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testLongObjectNamesForbidden() throws Throwable {
+    StringBuilder buffer = new StringBuilder(1200);
+    buffer.append("/");
+    for (int i = 0; i < (1200 / 4); i++) {
+      buffer.append(String.format("%04x", i));
+    }
+    String pathString = buffer.toString();
+    Path path = new Path(pathString);
+    try {
+      writeTextFile(fs, path, pathString, true);
+      //if we get here, problems.
+      fs.delete(path, false);
+      fail("Managed to create an object with a name of length "
+              + pathString.length());
+    } catch (SwiftBadRequestException e) {
+      //expected
+      //LOG.debug("Caught exception " + e, e);
+    }
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testLsNonExistentFile() throws Exception {
+    try {
+      Path path = new Path("/test/hadoop/file");
+      FileStatus[] statuses = fs.listStatus(path);
+      fail("Should throw FileNotFoundException on " + path
+              + " but got list of length " + statuses.length);
+    } catch (FileNotFoundException fnfe) {
+      // expected
+    }
+  }
+
+}

Added: hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemBlockLocation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemBlockLocation.java?rev=1526848&view=auto
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemBlockLocation.java (added)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemBlockLocation.java Fri Sep 27 11:12:42 2013
@@ -0,0 +1,167 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.swift;
+
+import org.apache.hadoop.fs.BlockLocation;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.swift.http.SwiftProtocolConstants;
+import org.apache.hadoop.fs.swift.util.SwiftTestUtils;
+import org.junit.Test;
+
+import java.io.IOException;
+
+/**
+ * Test block location logic.
+ * The endpoint may or may not be location-aware
+ */
+public class TestSwiftFileSystemBlockLocation extends SwiftFileSystemBaseTest {
+
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testLocateSingleFileBlocks() throws Throwable {
+    describe("verify that a file returns 1+ blocks");
+    FileStatus fileStatus = createFileAndGetStatus();
+    BlockLocation[] locations =
+      getFs().getFileBlockLocations(fileStatus, 0, 1);
+    assertNotEqual("No block locations supplied for " + fileStatus, 0,
+                   locations.length);
+    for (BlockLocation location : locations) {
+      assertLocationValid(location);
+    }
+  }
+
+  private void assertLocationValid(BlockLocation location) throws
+                                                           IOException {
+    LOG.info(location);
+    String[] hosts = location.getHosts();
+    String[] names = location.getNames();
+    assertNotEqual("No hosts supplied for " + location, 0, hosts.length);
+    //for every host, there's a name.
+    assertEquals("Unequal names and hosts in " + location,
+                 hosts.length, names.length);
+    assertEquals(SwiftProtocolConstants.BLOCK_LOCATION,
+                 location.getNames()[0]);
+    assertEquals(SwiftProtocolConstants.TOPOLOGY_PATH,
+                 location.getTopologyPaths()[0]);
+  }
+
+  private FileStatus createFileAndGetStatus() throws IOException {
+    Path path = path("/test/locatedFile");
+    createFile(path);
+    return fs.getFileStatus(path);
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testLocateNullStatus() throws Throwable {
+    describe("verify that a null filestatus maps to a null location array");
+    BlockLocation[] locations =
+      getFs().getFileBlockLocations((FileStatus) null, 0, 1);
+    assertNull(locations);
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testLocateNegativeSeek() throws Throwable {
+    describe("verify that a negative offset is illegal");
+    try {
+      BlockLocation[] locations =
+        getFs().getFileBlockLocations(createFileAndGetStatus(),
+                                      -1,
+                                      1);
+      fail("Expected an exception, got " + locations.length + " locations");
+    } catch (IllegalArgumentException e) {
+      //expected
+    }
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testLocateNegativeLen() throws Throwable {
+    describe("verify that a negative length is illegal");
+    try {
+      BlockLocation[] locations =
+        getFs().getFileBlockLocations(createFileAndGetStatus(),
+                                      0,
+                                      -1);
+      fail("Expected an exception, got " + locations.length + " locations");
+    } catch (IllegalArgumentException e) {
+      //expected
+    }
+  }
+
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testLocateOutOfRangeLen() throws Throwable {
+    describe("overshooting the length is legal, as long as the" +
+             " origin location is valid");
+
+    BlockLocation[] locations =
+      getFs().getFileBlockLocations(createFileAndGetStatus(),
+                                    0,
+                                    data.length + 100);
+    assertNotNull(locations);
+    assertTrue(locations.length > 0);
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testLocateOutOfRangeSrc() throws Throwable {
+    describe("Seeking out of the file length returns an empty array");
+
+    BlockLocation[] locations =
+      getFs().getFileBlockLocations(createFileAndGetStatus(),
+                                    data.length + 100,
+                                    1);
+    assertEmptyBlockLocations(locations);
+  }
+
+  private void assertEmptyBlockLocations(BlockLocation[] locations) {
+    assertNotNull(locations);
+    if (locations.length!=0) {
+      fail("non empty locations[] with first entry of " + locations[0]);
+    }
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testLocateDirectory() throws Throwable {
+    describe("verify that locating a directory is an error");
+    createFile(path("/test/filename"));
+    FileStatus status = fs.getFileStatus(path("/test"));
+    LOG.info("Filesystem is " + fs + "; target is " + status);
+    SwiftTestUtils.assertIsDirectory(status);
+    BlockLocation[] locations;
+    locations = getFs().getFileBlockLocations(status,
+                                              0,
+                                              1);
+    assertEmptyBlockLocations(locations);
+  }
+
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testLocateRootDirectory() throws Throwable {
+    describe("verify that locating the root directory is an error");
+    FileStatus status = fs.getFileStatus(path("/"));
+    SwiftTestUtils.assertIsDirectory(status);
+    BlockLocation[] locations;
+    locations = getFs().getFileBlockLocations(status,
+                                              0,
+                                              1);
+    assertEmptyBlockLocations(locations);
+  }
+
+
+}

Added: hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemBlocksize.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemBlocksize.java?rev=1526848&view=auto
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemBlocksize.java (added)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemBlocksize.java Fri Sep 27 11:12:42 2013
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.swift;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.swift.util.SwiftTestUtils;
+import org.junit.Test;
+
+/**
+ * Tests that blocksize is never zero for a file, either in the FS default
+ * or the FileStatus value of a queried file 
+ */
+public class TestSwiftFileSystemBlocksize extends SwiftFileSystemBaseTest {
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testDefaultBlocksizeNonZero() throws Throwable {
+    assertTrue("Zero default blocksize", 0L != getFs().getDefaultBlockSize());
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testDefaultBlocksizeRootPathNonZero() throws Throwable {
+    assertTrue("Zero default blocksize",
+               0L != getFs().getDefaultBlockSize(new Path("/")));
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testDefaultBlocksizeOtherPathNonZero() throws Throwable {
+    assertTrue("Zero default blocksize",
+               0L != getFs().getDefaultBlockSize(new Path("/test")));
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testBlocksizeNonZeroForFile() throws Throwable {
+    Path smallfile = new Path("/test/smallfile");
+    SwiftTestUtils.writeTextFile(fs, smallfile, "blocksize", true);
+    createFile(smallfile);
+    FileStatus status = getFs().getFileStatus(smallfile);
+    assertTrue("Zero blocksize in " + status,
+               status.getBlockSize() != 0L);
+    assertTrue("Zero replication in " + status,
+               status.getReplication() != 0L);
+  }
+}

Added: hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemConcurrency.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemConcurrency.java?rev=1526848&view=auto
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemConcurrency.java (added)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemConcurrency.java Fri Sep 27 11:12:42 2013
@@ -0,0 +1,105 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.swift;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.swift.util.SwiftTestUtils;
+import org.junit.Test;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Test Swift FS concurrency logic. This isn't a very accurate test,
+ * because it is hard to consistently generate race conditions.
+ * Consider it "best effort"
+ */
+public class TestSwiftFileSystemConcurrency extends SwiftFileSystemBaseTest {
+  protected static final Log LOG =
+    LogFactory.getLog(TestSwiftFileSystemConcurrency.class);
+  private Exception thread1Ex, thread2Ex;
+  public static final String TEST_RACE_CONDITION_ON_DELETE_DIR =
+    "/test/testraceconditionondirdeletetest";
+
+  /**
+   * test on concurrent file system changes
+   */
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testRaceConditionOnDirDeleteTest() throws Exception {
+    SwiftTestUtils.skip("Skipping unreliable test");
+
+    final String message = "message";
+    final Path fileToRead = new Path(
+      TEST_RACE_CONDITION_ON_DELETE_DIR +"/files/many-files/file");
+    final ExecutorService executorService = Executors.newFixedThreadPool(2);
+    fs.create(new Path(TEST_RACE_CONDITION_ON_DELETE_DIR +"/file/test/file1"));
+    fs.create(new Path(TEST_RACE_CONDITION_ON_DELETE_DIR + "/documents/doc1"));
+    fs.create(new Path(
+      TEST_RACE_CONDITION_ON_DELETE_DIR + "/pictures/picture"));
+
+
+    executorService.execute(new Runnable() {
+      @Override
+      public void run() {
+        try {
+          assertDeleted(new Path(TEST_RACE_CONDITION_ON_DELETE_DIR), true);
+        } catch (IOException e) {
+          LOG.warn("deletion thread:" + e, e);
+          thread1Ex = e;
+          throw new RuntimeException(e);
+        }
+      }
+    });
+    executorService.execute(new Runnable() {
+      @Override
+      public void run() {
+        try {
+          final FSDataOutputStream outputStream = fs.create(fileToRead);
+          outputStream.write(message.getBytes());
+          outputStream.close();
+        } catch (IOException e) {
+          LOG.warn("writer thread:" + e, e);
+          thread2Ex = e;
+          throw new RuntimeException(e);
+        }
+      }
+    });
+
+    executorService.awaitTermination(1, TimeUnit.MINUTES);
+    if (thread1Ex != null) {
+      throw thread1Ex;
+    }
+    if (thread2Ex != null) {
+      throw thread2Ex;
+    }
+    try {
+      fs.open(fileToRead);
+      LOG.info("concurrency test failed to trigger a failure");
+    } catch (FileNotFoundException expected) {
+
+    }
+
+  }
+}

Added: hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemContract.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemContract.java?rev=1526848&view=auto
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemContract.java (added)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemContract.java Fri Sep 27 11:12:42 2013
@@ -0,0 +1,128 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.swift;
+
+import junit.framework.AssertionFailedError;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystemContractBaseTest;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.swift.exceptions.SwiftNotDirectoryException;
+import org.apache.hadoop.fs.swift.snative.SwiftNativeFileSystem;
+import org.apache.hadoop.fs.swift.util.SwiftTestUtils;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+/**
+ * This is the full filesystem contract test -which requires the
+ * Default config set up to point to a filesystem.
+ *
+ * Some of the tests override the base class tests -these
+ * are where SwiftFS does not implement those features, or
+ * when the behavior of SwiftFS does not match the normal
+ * contract -which normally means that directories and equal files
+ * are being treated as equal.
+ */
+public class TestSwiftFileSystemContract
+        extends FileSystemContractBaseTest {
+  private static final Log LOG =
+          LogFactory.getLog(TestSwiftFileSystemContract.class);
+
+  @Override
+  protected void setUp() throws Exception {
+    final URI uri = getFilesystemURI();
+    final Configuration conf = new Configuration();
+    fs = createSwiftFS();
+    try {
+      fs.initialize(uri, conf);
+    } catch (IOException e) {
+      //FS init failed, set it to null so that teardown doesn't
+      //attempt to use it
+      fs = null;
+      throw e;
+    }
+    super.setUp();
+  }
+
+  protected URI getFilesystemURI() throws URISyntaxException, IOException {
+    return SwiftTestUtils.getServiceURI(new Configuration());
+  }
+
+  protected SwiftNativeFileSystem createSwiftFS() throws IOException {
+    SwiftNativeFileSystem swiftNativeFileSystem =
+            new SwiftNativeFileSystem();
+    return swiftNativeFileSystem;
+  }
+
+  @Override
+  public void testMkdirsFailsForSubdirectoryOfExistingFile() throws Exception {
+    Path testDir = path("/test/hadoop");
+    assertFalse(fs.exists(testDir));
+    assertTrue(fs.mkdirs(testDir));
+    assertTrue(fs.exists(testDir));
+
+    Path filepath = path("/test/hadoop/file");
+    SwiftTestUtils.writeTextFile(fs, filepath, "hello, world", false);
+
+    Path testSubDir = new Path(filepath, "subdir");
+    SwiftTestUtils.assertPathDoesNotExist(fs, "subdir before mkdir", testSubDir);
+
+    try {
+      fs.mkdirs(testSubDir);
+      fail("Should throw IOException.");
+    } catch (SwiftNotDirectoryException e) {
+      // expected
+      assertEquals(filepath,e.getPath());
+    }
+    //now verify that the subdir path does not exist
+    SwiftTestUtils.assertPathDoesNotExist(fs, "subdir after mkdir", testSubDir);
+
+    Path testDeepSubDir = path("/test/hadoop/file/deep/sub/dir");
+    try {
+      fs.mkdirs(testDeepSubDir);
+      fail("Should throw IOException.");
+    } catch (SwiftNotDirectoryException e) {
+      // expected
+    }
+    SwiftTestUtils.assertPathDoesNotExist(fs, "testDeepSubDir  after mkdir",
+                                          testDeepSubDir);
+
+  }
+
+  @Override
+  public void testWriteReadAndDeleteEmptyFile() throws Exception {
+    try {
+      super.testWriteReadAndDeleteEmptyFile();
+    } catch (AssertionFailedError e) {
+      SwiftTestUtils.downgrade("empty files get mistaken for directories", e);
+    }
+  }
+
+  @Override
+  public void testMkdirsWithUmask() throws Exception {
+    //unsupported
+  }
+
+  public void testZeroByteFilesAreFiles() throws Exception {
+//    SwiftTestUtils.unsupported("testZeroByteFilesAreFiles");
+  }
+}

Added: hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemDelete.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemDelete.java?rev=1526848&view=auto
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemDelete.java (added)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemDelete.java Fri Sep 27 11:12:42 2013
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.swift;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.swift.util.SwiftTestUtils;
+import org.junit.Test;
+
+import java.io.IOException;
+/**
+ * Test deletion operations
+ */
+public class TestSwiftFileSystemDelete extends SwiftFileSystemBaseTest {
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testDeleteEmptyFile() throws IOException {
+    final Path file = new Path("/test/testDeleteEmptyFile");
+    createEmptyFile(file);
+    SwiftTestUtils.noteAction("about to delete");
+    assertDeleted(file, true);
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testDeleteEmptyFileTwice() throws IOException {
+    final Path file = new Path("/test/testDeleteEmptyFileTwice");
+    createEmptyFile(file);
+    assertDeleted(file, true);
+    SwiftTestUtils.noteAction("multiple creates, and deletes");
+    assertFalse("Delete returned true", fs.delete(file, false));
+    createEmptyFile(file);
+    assertDeleted(file, true);
+    assertFalse("Delete returned true", fs.delete(file, false));
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testDeleteNonEmptyFile() throws IOException {
+    final Path file = new Path("/test/testDeleteNonEmptyFile");
+    createFile(file);
+    assertDeleted(file, true);
+  }
+  
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testDeleteNonEmptyFileTwice() throws IOException {
+    final Path file = new Path("/test/testDeleteNonEmptyFileTwice");
+    createFile(file);
+    assertDeleted(file, true);
+    assertFalse("Delete returned true", fs.delete(file, false));
+    createFile(file);
+    assertDeleted(file, true);
+    assertFalse("Delete returned true", fs.delete(file, false));
+  }
+
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testDeleteTestDir() throws IOException {
+    final Path file = new Path("/test/");
+    fs.delete(file, true);
+    assertPathDoesNotExist("Test dir found", file);
+  }
+
+  /**
+   * Test recursive root directory deletion fails if there is an entry underneath
+   * @throws Throwable
+   */
+  @Test(timeout = SWIFT_TEST_TIMEOUT)
+  public void testRmRootDirRecursiveIsForbidden() throws Throwable {
+    Path root = path("/");
+    Path testFile = path("/test");
+    createFile(testFile);
+    assertTrue("rm(/) returned false", fs.delete(root, true));
+    assertExists("Root dir is missing", root);
+    assertPathDoesNotExist("test file not deleted", testFile);
+  }
+
+}



Mime
View raw message