hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From da...@apache.org
Subject svn commit: r1530667 - in /hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop: ipc/ mapred/ mapreduce/lib/input/ mapreduce/lib/output/ util/
Date Wed, 09 Oct 2013 15:09:38 GMT
Author: daryn
Date: Wed Oct  9 15:09:37 2013
New Revision: 1530667

URL: http://svn.apache.org/r1530667
Log:
HADOOP-9470. eliminate duplicate FQN tests in different Hadoop modules (Ivan A. Veselovsky via daryn)

Added:
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestMRCJCSocketFactory.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobClient.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobConf.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRCJCFileInputFormat.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRCJCFileOutputCommitter.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestMRCJCReflectionUtils.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestMRCJCRunJar.java
Removed:
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormat.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileOutputCommitter.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobClient.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobConf.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFileInputFormat.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestReflectionUtils.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestRunJar.java

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestMRCJCSocketFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestMRCJCSocketFactory.java?rev=1530667&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestMRCJCSocketFactory.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestMRCJCSocketFactory.java Wed Oct  9 15:09:37 2013
@@ -0,0 +1,216 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ipc;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.Socket;
+import java.net.SocketAddress;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobStatus;
+import org.apache.hadoop.mapreduce.MRConfig;
+import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster;
+import org.apache.hadoop.net.StandardSocketFactory;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * This class checks that RPCs can use specialized socket factories.
+ */
+public class TestMRCJCSocketFactory {
+
+  /**
+   * Check that we can reach a NameNode or Resource Manager using a specific
+   * socket factory
+   */
+  @Test
+  public void testSocketFactory() throws IOException {
+    // Create a standard mini-cluster
+    Configuration sconf = new Configuration();
+    MiniDFSCluster cluster = new MiniDFSCluster.Builder(sconf).numDataNodes(1)
+        .build();
+    final int nameNodePort = cluster.getNameNodePort();
+
+    // Get a reference to its DFS directly
+    FileSystem fs = cluster.getFileSystem();
+    Assert.assertTrue(fs instanceof DistributedFileSystem);
+    DistributedFileSystem directDfs = (DistributedFileSystem) fs;
+
+    Configuration cconf = getCustomSocketConfigs(nameNodePort);
+
+    fs = FileSystem.get(cconf);
+    Assert.assertTrue(fs instanceof DistributedFileSystem);
+    DistributedFileSystem dfs = (DistributedFileSystem) fs;
+
+    JobClient client = null;
+    MiniMRYarnCluster miniMRYarnCluster = null;
+    try {
+      // This will test RPC to the NameNode only.
+      // could we test Client-DataNode connections?
+      Path filePath = new Path("/dir");
+
+      Assert.assertFalse(directDfs.exists(filePath));
+      Assert.assertFalse(dfs.exists(filePath));
+
+      directDfs.mkdirs(filePath);
+      Assert.assertTrue(directDfs.exists(filePath));
+      Assert.assertTrue(dfs.exists(filePath));
+
+      // This will test RPC to a Resource Manager
+      fs = FileSystem.get(sconf);
+      JobConf jobConf = new JobConf();
+      FileSystem.setDefaultUri(jobConf, fs.getUri().toString());
+      miniMRYarnCluster = initAndStartMiniMRYarnCluster(jobConf);
+      JobConf jconf = new JobConf(miniMRYarnCluster.getConfig());
+      jconf.set("hadoop.rpc.socket.factory.class.default",
+                "org.apache.hadoop.ipc.DummySocketFactory");
+      jconf.set(MRConfig.FRAMEWORK_NAME, MRConfig.YARN_FRAMEWORK_NAME);
+      String rmAddress = jconf.get("yarn.resourcemanager.address");
+      String[] split = rmAddress.split(":");
+      jconf.set("yarn.resourcemanager.address", split[0] + ':'
+          + (Integer.parseInt(split[1]) + 10));
+      client = new JobClient(jconf);
+
+      JobStatus[] jobs = client.jobsToComplete();
+      Assert.assertTrue(jobs.length == 0);
+
+    } finally {
+      closeClient(client);
+      closeDfs(dfs);
+      closeDfs(directDfs);
+      stopMiniMRYarnCluster(miniMRYarnCluster);
+      shutdownDFSCluster(cluster);
+    }
+  }
+
+  private MiniMRYarnCluster initAndStartMiniMRYarnCluster(JobConf jobConf) {
+    MiniMRYarnCluster miniMRYarnCluster;
+    miniMRYarnCluster = new MiniMRYarnCluster(this.getClass().getName(), 1);
+    miniMRYarnCluster.init(jobConf);
+    miniMRYarnCluster.start();
+    return miniMRYarnCluster;
+  }
+
+  private Configuration getCustomSocketConfigs(final int nameNodePort) {
+    // Get another reference via network using a specific socket factory
+    Configuration cconf = new Configuration();
+    FileSystem.setDefaultUri(cconf, String.format("hdfs://localhost:%s/",
+        nameNodePort + 10));
+    cconf.set("hadoop.rpc.socket.factory.class.default",
+        "org.apache.hadoop.ipc.DummySocketFactory");
+    cconf.set("hadoop.rpc.socket.factory.class.ClientProtocol",
+        "org.apache.hadoop.ipc.DummySocketFactory");
+    cconf.set("hadoop.rpc.socket.factory.class.JobSubmissionProtocol",
+        "org.apache.hadoop.ipc.DummySocketFactory");
+    return cconf;
+  }
+
+  private void shutdownDFSCluster(MiniDFSCluster cluster) {
+    try {
+      if (cluster != null)
+        cluster.shutdown();
+
+    } catch (Exception ignored) {
+      // nothing we can do
+      ignored.printStackTrace();
+    }
+  }
+
+  private void stopMiniMRYarnCluster(MiniMRYarnCluster miniMRYarnCluster) {
+    try {
+      if (miniMRYarnCluster != null)
+        miniMRYarnCluster.stop();
+
+    } catch (Exception ignored) {
+      // nothing we can do
+      ignored.printStackTrace();
+    }
+  }
+
+  private void closeDfs(DistributedFileSystem dfs) {
+    try {
+      if (dfs != null)
+        dfs.close();
+
+    } catch (Exception ignored) {
+      // nothing we can do
+      ignored.printStackTrace();
+    }
+  }
+
+  private void closeClient(JobClient client) {
+    try {
+      if (client != null)
+        client.close();
+    } catch (Exception ignored) {
+      // nothing we can do
+      ignored.printStackTrace();
+    }
+  }
+}
+
+/**
+ * Dummy socket factory which shift TPC ports by subtracting 10 when
+ * establishing a connection
+ */
+class DummySocketFactory extends StandardSocketFactory {
+  /**
+   * Default empty constructor (for use with the reflection API).
+   */
+  public DummySocketFactory() {
+  }
+
+  @Override
+  public Socket createSocket() throws IOException {
+    return new Socket() {
+      @Override
+      public void connect(SocketAddress addr, int timeout) throws IOException {
+
+        assert (addr instanceof InetSocketAddress);
+        InetSocketAddress iaddr = (InetSocketAddress) addr;
+        SocketAddress newAddr = null;
+        if (iaddr.isUnresolved())
+          newAddr = new InetSocketAddress(iaddr.getHostName(),
+              iaddr.getPort() - 10);
+        else
+          newAddr = new InetSocketAddress(iaddr.getAddress(),
+              iaddr.getPort() - 10);
+        System.out.printf("Test socket: rerouting %s to %s\n", iaddr, newAddr);
+        super.connect(newAddr, timeout);
+      }
+    };
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj)
+      return true;
+    if (obj == null)
+      return false;
+    if (!(obj instanceof DummySocketFactory))
+      return false;
+    return true;
+  }
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java?rev=1530667&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java Wed Oct  9 15:09:37 2013
@@ -0,0 +1,316 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapred;
+
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.concurrent.TimeoutException;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.BlockLocation;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.DFSTestUtil;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.io.Text;
+
+@SuppressWarnings("deprecation")
+public class TestMRCJCFileInputFormat extends TestCase {
+
+  Configuration conf = new Configuration();
+  MiniDFSCluster dfs = null;
+
+  private MiniDFSCluster newDFSCluster(JobConf conf) throws Exception {
+    return new MiniDFSCluster(conf, 4, true,
+                         new String[]{"/rack0", "/rack0",
+                                      "/rack1", "/rack1"},
+                         new String[]{"host0", "host1",
+                                      "host2", "host3"});
+  }
+
+  public void testLocality() throws Exception {
+    JobConf job = new JobConf(conf);
+    dfs = newDFSCluster(job);
+    FileSystem fs = dfs.getFileSystem();
+    System.out.println("FileSystem " + fs.getUri());
+
+    Path inputDir = new Path("/foo/");
+    String fileName = "part-0000";
+    createInputs(fs, inputDir, fileName);
+
+    // split it using a file input format
+    TextInputFormat.addInputPath(job, inputDir);
+    TextInputFormat inFormat = new TextInputFormat();
+    inFormat.configure(job);
+    InputSplit[] splits = inFormat.getSplits(job, 1);
+    FileStatus fileStatus = fs.getFileStatus(new Path(inputDir, fileName));
+    BlockLocation[] locations =
+      fs.getFileBlockLocations(fileStatus, 0, fileStatus.getLen());
+    System.out.println("Made splits");
+
+    // make sure that each split is a block and the locations match
+    for(int i=0; i < splits.length; ++i) {
+      FileSplit fileSplit = (FileSplit) splits[i];
+      System.out.println("File split: " + fileSplit);
+      for (String h: fileSplit.getLocations()) {
+        System.out.println("Location: " + h);
+      }
+      System.out.println("Block: " + locations[i]);
+      assertEquals(locations[i].getOffset(), fileSplit.getStart());
+      assertEquals(locations[i].getLength(), fileSplit.getLength());
+      String[] blockLocs = locations[i].getHosts();
+      String[] splitLocs = fileSplit.getLocations();
+      assertEquals(2, blockLocs.length);
+      assertEquals(2, splitLocs.length);
+      assertTrue((blockLocs[0].equals(splitLocs[0]) &&
+                  blockLocs[1].equals(splitLocs[1])) ||
+                 (blockLocs[1].equals(splitLocs[0]) &&
+                  blockLocs[0].equals(splitLocs[1])));
+    }
+
+    assertEquals("Expected value of " + FileInputFormat.NUM_INPUT_FILES,
+                 1, job.getLong(FileInputFormat.NUM_INPUT_FILES, 0));
+  }
+
+  private void createInputs(FileSystem fs, Path inDir, String fileName)
+      throws IOException, TimeoutException, InterruptedException {
+    // create a multi-block file on hdfs
+    Path path = new Path(inDir, fileName);
+    final short replication = 2;
+    DataOutputStream out = fs.create(path, true, 4096,
+                                     replication, 512, null);
+    for(int i=0; i < 1000; ++i) {
+      out.writeChars("Hello\n");
+    }
+    out.close();
+    System.out.println("Wrote file");
+    DFSTestUtil.waitReplication(fs, path, replication);
+  }
+
+  public void testNumInputs() throws Exception {
+    JobConf job = new JobConf(conf);
+    dfs = newDFSCluster(job);
+    FileSystem fs = dfs.getFileSystem();
+    System.out.println("FileSystem " + fs.getUri());
+
+    Path inputDir = new Path("/foo/");
+    final int numFiles = 10;
+    String fileNameBase = "part-0000";
+    for (int i=0; i < numFiles; ++i) {
+      createInputs(fs, inputDir, fileNameBase + String.valueOf(i));
+    }
+    createInputs(fs, inputDir, "_meta");
+    createInputs(fs, inputDir, "_temp");
+
+    // split it using a file input format
+    TextInputFormat.addInputPath(job, inputDir);
+    TextInputFormat inFormat = new TextInputFormat();
+    inFormat.configure(job);
+    InputSplit[] splits = inFormat.getSplits(job, 1);
+
+    assertEquals("Expected value of " + FileInputFormat.NUM_INPUT_FILES,
+                 numFiles, job.getLong(FileInputFormat.NUM_INPUT_FILES, 0));
+  }
+  
+  final Path root = new Path("/TestFileInputFormat");
+  final Path file1 = new Path(root, "file1");
+  final Path dir1 = new Path(root, "dir1");
+  final Path file2 = new Path(dir1, "file2");
+
+  static final int BLOCKSIZE = 1024;
+  static final byte[] databuf = new byte[BLOCKSIZE];
+
+  private static final String rack1[] = new String[] {
+    "/r1"
+  };
+  private static final String hosts1[] = new String[] {
+    "host1.rack1.com"
+  };
+  
+  private class DummyFileInputFormat extends FileInputFormat<Text, Text> {
+    @Override
+    public RecordReader<Text, Text> getRecordReader(InputSplit split,
+        JobConf job, Reporter reporter) throws IOException {
+      return null;
+    }
+  }
+
+  public void testMultiLevelInput() throws Exception {
+    JobConf job = new JobConf(conf);
+
+    job.setBoolean("dfs.replication.considerLoad", false);
+    dfs = new MiniDFSCluster(job, 1, true, rack1, hosts1);
+    dfs.waitActive();
+
+    String namenode = (dfs.getFileSystem()).getUri().getHost() + ":" +
+                      (dfs.getFileSystem()).getUri().getPort();
+
+    FileSystem fileSys = dfs.getFileSystem();
+    if (!fileSys.mkdirs(dir1)) {
+      throw new IOException("Mkdirs failed to create " + root.toString());
+    }
+    writeFile(job, file1, (short)1, 1);
+    writeFile(job, file2, (short)1, 1);
+
+    // split it using a CombinedFile input format
+    DummyFileInputFormat inFormat = new DummyFileInputFormat();
+    inFormat.setInputPaths(job, root);
+
+    // By default, we don't allow multi-level/recursive inputs
+    boolean exceptionThrown = false;
+    try {
+      InputSplit[] splits = inFormat.getSplits(job, 1);
+    } catch (Exception e) {
+      exceptionThrown = true;
+    }
+    assertTrue("Exception should be thrown by default for scanning a "
+        + "directory with directories inside.", exceptionThrown);
+
+    // Enable multi-level/recursive inputs
+    job.setBoolean(FileInputFormat.INPUT_DIR_RECURSIVE, true);
+    InputSplit[] splits = inFormat.getSplits(job, 1);
+    assertEquals(splits.length, 2);
+  }
+
+  @SuppressWarnings("rawtypes")
+  public void testLastInputSplitAtSplitBoundary() throws Exception {
+    FileInputFormat fif = new FileInputFormatForTest(1024l * 1024 * 1024,
+        128l * 1024 * 1024);
+    JobConf job = new JobConf();
+    InputSplit[] splits = fif.getSplits(job, 8);
+    assertEquals(8, splits.length);
+    for (int i = 0; i < splits.length; i++) {
+      InputSplit split = splits[i];
+      assertEquals(("host" + i), split.getLocations()[0]);
+    }
+  }
+
+  @SuppressWarnings("rawtypes")
+  public void testLastInputSplitExceedingSplitBoundary() throws Exception {
+    FileInputFormat fif = new FileInputFormatForTest(1027l * 1024 * 1024,
+        128l * 1024 * 1024);
+    JobConf job = new JobConf();
+    InputSplit[] splits = fif.getSplits(job, 8);
+    assertEquals(8, splits.length);
+    for (int i = 0; i < splits.length; i++) {
+      InputSplit split = splits[i];
+      assertEquals(("host" + i), split.getLocations()[0]);
+    }
+  }
+
+  @SuppressWarnings("rawtypes")
+  public void testLastInputSplitSingleSplit() throws Exception {
+    FileInputFormat fif = new FileInputFormatForTest(100l * 1024 * 1024,
+        128l * 1024 * 1024);
+    JobConf job = new JobConf();
+    InputSplit[] splits = fif.getSplits(job, 1);
+    assertEquals(1, splits.length);
+    for (int i = 0; i < splits.length; i++) {
+      InputSplit split = splits[i];
+      assertEquals(("host" + i), split.getLocations()[0]);
+    }
+  }
+
+  private class FileInputFormatForTest<K, V> extends FileInputFormat<K, V> {
+
+    long splitSize;
+    long length;
+
+    FileInputFormatForTest(long length, long splitSize) {
+      this.length = length;
+      this.splitSize = splitSize;
+    }
+
+    @Override
+    public RecordReader<K, V> getRecordReader(InputSplit split, JobConf job,
+        Reporter reporter) throws IOException {
+      return null;
+    }
+
+    @Override
+    protected FileStatus[] listStatus(JobConf job) throws IOException {
+      FileStatus mockFileStatus = mock(FileStatus.class);
+      when(mockFileStatus.getBlockSize()).thenReturn(splitSize);
+      when(mockFileStatus.isDirectory()).thenReturn(false);
+      Path mockPath = mock(Path.class);
+      FileSystem mockFs = mock(FileSystem.class);
+
+      BlockLocation[] blockLocations = mockBlockLocations(length, splitSize);
+      when(mockFs.getFileBlockLocations(mockFileStatus, 0, length)).thenReturn(
+          blockLocations);
+      when(mockPath.getFileSystem(any(Configuration.class))).thenReturn(mockFs);
+
+      when(mockFileStatus.getPath()).thenReturn(mockPath);
+      when(mockFileStatus.getLen()).thenReturn(length);
+
+      FileStatus[] fs = new FileStatus[1];
+      fs[0] = mockFileStatus;
+      return fs;
+    }
+
+    @Override
+    protected long computeSplitSize(long blockSize, long minSize, long maxSize) {
+      return splitSize;
+    }
+
+    private BlockLocation[] mockBlockLocations(long size, long splitSize) {
+      int numLocations = (int) (size / splitSize);
+      if (size % splitSize != 0)
+        numLocations++;
+      BlockLocation[] blockLocations = new BlockLocation[numLocations];
+      for (int i = 0; i < numLocations; i++) {
+        String[] names = new String[] { "b" + i };
+        String[] hosts = new String[] { "host" + i };
+        blockLocations[i] = new BlockLocation(names, hosts, i * splitSize,
+            Math.min(splitSize, size - (splitSize * i)));
+      }
+      return blockLocations;
+    }
+  }
+
+  static void writeFile(Configuration conf, Path name,
+      short replication, int numBlocks)
+      throws IOException, TimeoutException, InterruptedException {
+    FileSystem fileSys = FileSystem.get(conf);
+
+    FSDataOutputStream stm = fileSys.create(name, true,
+                                            conf.getInt("io.file.buffer.size", 4096),
+                                            replication, (long)BLOCKSIZE);
+    for (int i = 0; i < numBlocks; i++) {
+      stm.write(databuf);
+    }
+    stm.close();
+    DFSTestUtil.waitReplication(fileSys, name, replication);
+  }
+
+  @Override
+  public void tearDown() throws Exception {
+    if (dfs != null) {
+      dfs.shutdown();
+      dfs = null;
+    }
+  }
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java?rev=1530667&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java Wed Oct  9 15:09:37 2013
@@ -0,0 +1,216 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapred;
+
+import java.io.*;
+import java.net.URI;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.mapred.JobContextImpl;
+import org.apache.hadoop.mapred.TaskAttemptContextImpl;
+import org.apache.hadoop.mapreduce.JobStatus;
+
+public class TestMRCJCFileOutputCommitter extends TestCase {
+  private static Path outDir = new Path(
+     System.getProperty("test.build.data", "/tmp"), "output");
+
+  // A random task attempt id for testing.
+  private static String attempt = "attempt_200707121733_0001_m_000000_0";
+  private static TaskAttemptID taskID = TaskAttemptID.forName(attempt);
+  private Text key1 = new Text("key1");
+  private Text key2 = new Text("key2");
+  private Text val1 = new Text("val1");
+  private Text val2 = new Text("val2");
+  
+  @SuppressWarnings("unchecked")
+  private void writeOutput(RecordWriter theRecordWriter, Reporter reporter)
+      throws IOException {
+    NullWritable nullWritable = NullWritable.get();
+
+    try {
+      theRecordWriter.write(key1, val1);
+      theRecordWriter.write(null, nullWritable);
+      theRecordWriter.write(null, val1);
+      theRecordWriter.write(nullWritable, val2);
+      theRecordWriter.write(key2, nullWritable);
+      theRecordWriter.write(key1, null);
+      theRecordWriter.write(null, null);
+      theRecordWriter.write(key2, val2);
+    } finally {
+      theRecordWriter.close(reporter);
+    }
+  }
+  
+  private void setConfForFileOutputCommitter(JobConf job) {
+    job.set(JobContext.TASK_ATTEMPT_ID, attempt);
+    job.setOutputCommitter(FileOutputCommitter.class);
+    FileOutputFormat.setOutputPath(job, outDir);
+  }
+
+  @SuppressWarnings("unchecked")
+  public void testCommitter() throws Exception {
+    JobConf job = new JobConf();
+    setConfForFileOutputCommitter(job);
+    JobContext jContext = new JobContextImpl(job, taskID.getJobID());
+    TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
+    FileOutputCommitter committer = new FileOutputCommitter();
+    FileOutputFormat.setWorkOutputPath(job, 
+      committer.getTaskAttemptPath(tContext));
+
+    committer.setupJob(jContext);
+    committer.setupTask(tContext);
+    String file = "test.txt";
+
+    // A reporter that does nothing
+    Reporter reporter = Reporter.NULL;
+    // write output
+    FileSystem localFs = FileSystem.getLocal(job);
+    TextOutputFormat theOutputFormat = new TextOutputFormat();
+    RecordWriter theRecordWriter =
+      theOutputFormat.getRecordWriter(localFs, job, file, reporter);
+    writeOutput(theRecordWriter, reporter);
+
+    // do commit
+    committer.commitTask(tContext);
+    committer.commitJob(jContext);
+    
+    // validate output
+    File expectedFile = new File(new Path(outDir, file).toString());
+    StringBuffer expectedOutput = new StringBuffer();
+    expectedOutput.append(key1).append('\t').append(val1).append("\n");
+    expectedOutput.append(val1).append("\n");
+    expectedOutput.append(val2).append("\n");
+    expectedOutput.append(key2).append("\n");
+    expectedOutput.append(key1).append("\n");
+    expectedOutput.append(key2).append('\t').append(val2).append("\n");
+    String output = UtilsForTests.slurp(expectedFile);
+    assertEquals(output, expectedOutput.toString());
+
+    FileUtil.fullyDelete(new File(outDir.toString()));
+  }
+
+  public void testAbort() throws IOException {
+    JobConf job = new JobConf();
+    setConfForFileOutputCommitter(job);
+    JobContext jContext = new JobContextImpl(job, taskID.getJobID());
+    TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
+    FileOutputCommitter committer = new FileOutputCommitter();
+    FileOutputFormat.setWorkOutputPath(job, committer
+        .getTaskAttemptPath(tContext));
+
+    // do setup
+    committer.setupJob(jContext);
+    committer.setupTask(tContext);
+    String file = "test.txt";
+
+    // A reporter that does nothing
+    Reporter reporter = Reporter.NULL;
+    // write output
+    FileSystem localFs = FileSystem.getLocal(job);
+    TextOutputFormat theOutputFormat = new TextOutputFormat();
+    RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(localFs,
+        job, file, reporter);
+    writeOutput(theRecordWriter, reporter);
+
+    // do abort
+    committer.abortTask(tContext);
+    File expectedFile = new File(new Path(committer
+        .getTaskAttemptPath(tContext), file).toString());
+    assertFalse("task temp dir still exists", expectedFile.exists());
+
+    committer.abortJob(jContext, JobStatus.State.FAILED);
+    expectedFile = new File(new Path(outDir, FileOutputCommitter.TEMP_DIR_NAME)
+        .toString());
+    assertFalse("job temp dir "+expectedFile+" still exists", expectedFile.exists());
+    assertEquals("Output directory not empty", 0, new File(outDir.toString())
+        .listFiles().length);
+    FileUtil.fullyDelete(new File(outDir.toString()));
+  }
+
+  public static class FakeFileSystem extends RawLocalFileSystem {
+    public FakeFileSystem() {
+      super();
+    }
+
+    public URI getUri() {
+      return URI.create("faildel:///");
+    }
+
+    @Override
+    public boolean delete(Path p, boolean recursive) throws IOException {
+      throw new IOException("fake delete failed");
+    }
+  }
+
+  public void testFailAbort() throws IOException {
+    JobConf job = new JobConf();
+    job.set(FileSystem.FS_DEFAULT_NAME_KEY, "faildel:///");
+    job.setClass("fs.faildel.impl", FakeFileSystem.class, FileSystem.class);
+    setConfForFileOutputCommitter(job);
+    JobContext jContext = new JobContextImpl(job, taskID.getJobID());
+    TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
+    FileOutputCommitter committer = new FileOutputCommitter();
+    FileOutputFormat.setWorkOutputPath(job, committer
+        .getTaskAttemptPath(tContext));
+
+    // do setup
+    committer.setupJob(jContext);
+    committer.setupTask(tContext);
+    
+    String file = "test.txt";
+    File jobTmpDir = new File(committer.getJobAttemptPath(jContext).toUri().getPath());
+    File taskTmpDir = new File(committer.getTaskAttemptPath(tContext).toUri().getPath());
+    File expectedFile = new File(taskTmpDir, file);
+
+    // A reporter that does nothing
+    Reporter reporter = Reporter.NULL;
+    // write output
+    FileSystem localFs = new FakeFileSystem();
+    TextOutputFormat theOutputFormat = new TextOutputFormat();
+    RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(localFs,
+        job, expectedFile.getAbsolutePath(), reporter);
+    writeOutput(theRecordWriter, reporter);
+
+    // do abort
+    Throwable th = null;
+    try {
+      committer.abortTask(tContext);
+    } catch (IOException ie) {
+      th = ie;
+    }
+    assertNotNull(th);
+    assertTrue(th instanceof IOException);
+    assertTrue(th.getMessage().contains("fake delete failed"));
+    assertTrue(expectedFile + " does not exists", expectedFile.exists());
+
+    th = null;
+    try {
+      committer.abortJob(jContext, JobStatus.State.FAILED);
+    } catch (IOException ie) {
+      th = ie;
+    }
+    assertNotNull(th);
+    assertTrue(th instanceof IOException);
+    assertTrue(th.getMessage().contains("fake delete failed"));
+    assertTrue("job temp dir does not exists", jobTmpDir.exists());
+  }
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobClient.java?rev=1530667&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobClient.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobClient.java Wed Oct  9 15:09:37 2013
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapred;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.TestMRJobClient;
+import org.apache.hadoop.mapreduce.tools.CLI;
+import org.apache.hadoop.util.Tool;
+import org.junit.Ignore;
+@Ignore
+public class TestMRCJCJobClient extends TestMRJobClient {
+  
+  private String runJob() throws Exception {
+    OutputStream os = getFileSystem().create(new Path(getInputDir(),
+                        "text.txt"));
+    Writer wr = new OutputStreamWriter(os);
+    wr.write("hello1\n");
+    wr.write("hello2\n");
+    wr.write("hello3\n");
+    wr.close();
+
+    JobConf conf = createJobConf();
+    conf.setJobName("mr");
+    conf.setJobPriority(JobPriority.HIGH);
+    
+    conf.setInputFormat(TextInputFormat.class);
+
+    conf.setMapOutputKeyClass(LongWritable.class);
+    conf.setMapOutputValueClass(Text.class);
+
+    conf.setOutputFormat(TextOutputFormat.class);
+    conf.setOutputKeyClass(LongWritable.class);
+    conf.setOutputValueClass(Text.class);
+
+    conf.setMapperClass(org.apache.hadoop.mapred.lib.IdentityMapper.class);
+    conf.setReducerClass(org.apache.hadoop.mapred.lib.IdentityReducer.class);
+
+    FileInputFormat.setInputPaths(conf, getInputDir());
+    FileOutputFormat.setOutputPath(conf, getOutputDir());
+
+    return JobClient.runJob(conf).getID().toString();
+  }
+  
+  public static int runTool(Configuration conf, Tool tool, String[] args,
+      OutputStream out) throws Exception {
+    return TestMRJobClient.runTool(conf, tool, args, out);
+  }
+  
+  static void verifyJobPriority(String jobId, String priority,
+      JobConf conf)  throws Exception {
+    TestMRCJCJobClient test = new TestMRCJCJobClient();
+    test.verifyJobPriority(jobId, priority, conf, test.createJobClient());
+  }
+  
+  public void testJobClient() throws Exception {
+    Configuration conf = createJobConf();
+    String jobId = runJob();
+    testGetCounter(jobId, conf);
+    testAllJobList(jobId, conf);
+    testChangingJobPriority(jobId, conf);
+  }
+  
+  protected CLI createJobClient() 
+      throws IOException {
+    return new JobClient();
+  }
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobConf.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobConf.java?rev=1530667&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobConf.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobConf.java Wed Oct  9 15:09:37 2013
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapred;
+
+import org.junit.Ignore;
+import org.junit.Test;
+import java.io.File;
+import java.net.URLClassLoader;
+import java.net.URL;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.util.ClassUtil;
+
+
+import static org.junit.Assert.*;
+@Ignore
+public class TestMRCJCJobConf {
+  private static final String JAR_RELATIVE_PATH =
+    "build/test/mapred/testjar/testjob.jar";
+  private static final String CLASSNAME = "testjar.ClassWordCount";
+
+  private static String TEST_DIR_WITH_SPECIAL_CHARS =
+    System.getProperty("test.build.data","/tmp") +
+    File.separator + "test jobconf with + and spaces";
+
+  @Test
+  public void testFindContainingJar() throws Exception {
+    testJarAtPath(JAR_RELATIVE_PATH);
+  }
+
+  /**
+   * Test that findContainingJar works correctly even if the
+   * path has a "+" sign or spaces in it
+   */
+  @Test
+  public void testFindContainingJarWithPlus() throws Exception {
+    new File(TEST_DIR_WITH_SPECIAL_CHARS).mkdirs();
+    Configuration conf = new Configuration();
+
+    FileSystem localfs = FileSystem.getLocal(conf);
+
+    FileUtil.copy(localfs, new Path(JAR_RELATIVE_PATH),
+                  localfs, new Path(TEST_DIR_WITH_SPECIAL_CHARS, "test.jar"),
+                  false, true, conf);
+    testJarAtPath(TEST_DIR_WITH_SPECIAL_CHARS + File.separator + "test.jar");
+  }
+
+  /**
+   * Given a path with a jar, make a classloader with that jar on the
+   * classpath, and check that findContainingJar can correctly
+   * identify the path of the jar.
+   */
+  private void testJarAtPath(String path) throws Exception {
+    File jar = new File(path).getAbsoluteFile();
+    assertTrue(jar.exists());
+
+    URL urls[] = new URL[] {
+      jar.toURI().toURL()
+    };
+
+    ClassLoader cl = new URLClassLoader(urls);
+    Class clazz = Class.forName(CLASSNAME, true, cl);
+    assertNotNull(clazz);
+
+    String containingJar = ClassUtil.findContainingJar(clazz);
+    assertEquals(jar.getAbsolutePath(), containingJar);
+  }
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRCJCFileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRCJCFileInputFormat.java?rev=1530667&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRCJCFileInputFormat.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRCJCFileInputFormat.java Wed Oct  9 15:09:37 2013
@@ -0,0 +1,233 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.lib.input;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+import static org.mockito.Mockito.*;
+import static org.apache.hadoop.test.MockitoMaker.*;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.BlockLocation;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+public class TestMRCJCFileInputFormat {
+
+  @Test
+  public void testAddInputPath() throws IOException {
+    final Configuration conf = new Configuration();
+    conf.set("fs.defaultFS", "s3://abc:xyz@hostname/");
+    final Job j = Job.getInstance(conf);
+    j.getConfiguration().set("fs.defaultFS", "s3://abc:xyz@hostname/");
+
+    //setup default fs
+    final FileSystem defaultfs = FileSystem.get(conf);
+    System.out.println("defaultfs.getUri() = " + defaultfs.getUri());
+
+    {
+      //test addInputPath
+      final Path original = new Path("file:/foo");
+      System.out.println("original = " + original);
+      FileInputFormat.addInputPath(j, original);
+      final Path[] results = FileInputFormat.getInputPaths(j);
+      System.out.println("results = " + Arrays.asList(results));
+      assertEquals(1, results.length);
+      assertEquals(original, results[0]);
+    }
+
+    {
+      //test setInputPaths
+      final Path original = new Path("file:/bar");
+      System.out.println("original = " + original);
+      FileInputFormat.setInputPaths(j, original);
+      final Path[] results = FileInputFormat.getInputPaths(j);
+      System.out.println("results = " + Arrays.asList(results));
+      assertEquals(1, results.length);
+      assertEquals(original, results[0]);
+    }
+  }
+
+  @Test
+  public void testNumInputFiles() throws Exception {
+    Configuration conf = spy(new Configuration());
+    Job job = make(stub(Job.class).returning(conf).from.getConfiguration());
+    FileStatus stat = make(stub(FileStatus.class).returning(0L).from.getLen());
+    TextInputFormat ispy = spy(new TextInputFormat());
+    doReturn(Arrays.asList(stat)).when(ispy).listStatus(job);
+
+    ispy.getSplits(job);
+    verify(conf).setLong(FileInputFormat.NUM_INPUT_FILES, 1);
+  }
+  
+  @Test
+  @SuppressWarnings({"rawtypes", "unchecked"})
+  public void testLastInputSplitAtSplitBoundary() throws Exception {
+    FileInputFormat fif = new FileInputFormatForTest(1024l * 1024 * 1024,
+        128l * 1024 * 1024);
+    Configuration conf = new Configuration();
+    JobContext jobContext = mock(JobContext.class);
+    when(jobContext.getConfiguration()).thenReturn(conf);
+    List<InputSplit> splits = fif.getSplits(jobContext);
+    assertEquals(8, splits.size());
+    for (int i = 0 ; i < splits.size() ; i++) {
+      InputSplit split = splits.get(i);
+      assertEquals(("host" + i), split.getLocations()[0]);
+    }
+  }
+  
+  @Test
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  public void testLastInputSplitExceedingSplitBoundary() throws Exception {
+    FileInputFormat fif = new FileInputFormatForTest(1027l * 1024 * 1024,
+        128l * 1024 * 1024);
+    Configuration conf = new Configuration();
+    JobContext jobContext = mock(JobContext.class);
+    when(jobContext.getConfiguration()).thenReturn(conf);
+    List<InputSplit> splits = fif.getSplits(jobContext);
+    assertEquals(8, splits.size());
+    for (int i = 0; i < splits.size(); i++) {
+      InputSplit split = splits.get(i);
+      assertEquals(("host" + i), split.getLocations()[0]);
+    }
+  }
+
+  @Test
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  public void testLastInputSplitSingleSplit() throws Exception {
+    FileInputFormat fif = new FileInputFormatForTest(100l * 1024 * 1024,
+        128l * 1024 * 1024);
+    Configuration conf = new Configuration();
+    JobContext jobContext = mock(JobContext.class);
+    when(jobContext.getConfiguration()).thenReturn(conf);
+    List<InputSplit> splits = fif.getSplits(jobContext);
+    assertEquals(1, splits.size());
+    for (int i = 0; i < splits.size(); i++) {
+      InputSplit split = splits.get(i);
+      assertEquals(("host" + i), split.getLocations()[0]);
+    }
+  }
+
+  /**
+   * Test when the input file's length is 0.
+   */
+  @Test
+  public void testForEmptyFile() throws Exception {
+      Configuration conf = new Configuration();
+      FileSystem fileSys = FileSystem.get(conf);
+      Path file = new Path("test" + "/file");
+      FSDataOutputStream out = fileSys.create(file, true,
+              conf.getInt("io.file.buffer.size", 4096), (short) 1, (long) 1024);
+      out.write(new byte[0]);
+      out.close();
+
+      // split it using a File input format
+      DummyInputFormat inFormat = new DummyInputFormat();
+      Job job = Job.getInstance(conf);
+      FileInputFormat.setInputPaths(job, "test");
+      List<InputSplit> splits = inFormat.getSplits(job);
+      assertEquals(1, splits.size());
+      FileSplit fileSplit = (FileSplit) splits.get(0);
+      assertEquals(0, fileSplit.getLocations().length);
+      assertEquals(file.getName(), fileSplit.getPath().getName());
+      assertEquals(0, fileSplit.getStart());
+      assertEquals(0, fileSplit.getLength());
+
+      fileSys.delete(file.getParent(), true);
+  }
+
+  /** Dummy class to extend FileInputFormat*/
+  private class DummyInputFormat extends FileInputFormat<Text, Text> {
+    @Override
+    public RecordReader<Text,Text> createRecordReader(InputSplit split,
+        TaskAttemptContext context) throws IOException {
+      return null;
+    }
+  }
+
+  private class FileInputFormatForTest<K, V> extends FileInputFormat<K, V> {
+
+    long splitSize;
+    long length;
+
+    FileInputFormatForTest(long length, long splitSize) {
+      this.length = length;
+      this.splitSize = splitSize;
+    }
+
+    @Override
+    public RecordReader<K, V> createRecordReader(InputSplit split,
+        TaskAttemptContext context) throws IOException, InterruptedException {
+      return null;
+    }
+
+    @Override
+    protected List<FileStatus> listStatus(JobContext job) throws IOException {
+      FileStatus mockFileStatus = mock(FileStatus.class);
+      when(mockFileStatus.getBlockSize()).thenReturn(splitSize);
+      Path mockPath = mock(Path.class);
+      FileSystem mockFs = mock(FileSystem.class);
+
+      BlockLocation[] blockLocations = mockBlockLocations(length, splitSize);
+      when(mockFs.getFileBlockLocations(mockFileStatus, 0, length)).thenReturn(
+          blockLocations);
+      when(mockPath.getFileSystem(any(Configuration.class))).thenReturn(mockFs);
+
+      when(mockFileStatus.getPath()).thenReturn(mockPath);
+      when(mockFileStatus.getLen()).thenReturn(length);
+
+      List<FileStatus> list = new ArrayList<FileStatus>();
+      list.add(mockFileStatus);
+      return list;
+    }
+
+    @Override
+    protected long computeSplitSize(long blockSize, long minSize, long maxSize) {
+      return splitSize;
+    }
+
+    private BlockLocation[] mockBlockLocations(long size, long splitSize) {
+      int numLocations = (int) (size / splitSize);
+      if (size % splitSize != 0)
+        numLocations++;
+      BlockLocation[] blockLocations = new BlockLocation[numLocations];
+      for (int i = 0; i < numLocations; i++) {
+        String[] names = new String[] { "b" + i };
+        String[] hosts = new String[] { "host" + i };
+        blockLocations[i] = new BlockLocation(names, hosts, i * splitSize,
+            Math.min(splitSize, size - (splitSize * i)));
+      }
+      return blockLocations;
+    }
+  }
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRCJCFileOutputCommitter.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRCJCFileOutputCommitter.java?rev=1530667&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRCJCFileOutputCommitter.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRCJCFileOutputCommitter.java Wed Oct  9 15:09:37 2013
@@ -0,0 +1,247 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.lib.output;
+
+import java.io.*;
+import java.net.URI;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.mapred.UtilsForTests;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.JobStatus;
+import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.task.JobContextImpl;
+import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
+
+
+public class TestMRCJCFileOutputCommitter extends TestCase {
+  private static Path outDir = new Path(System.getProperty("test.build.data",
+      "/tmp"), "output");
+
+  // A random task attempt id for testing.
+  private static String attempt = "attempt_200707121733_0001_m_000000_0";
+  private static String partFile = "part-m-00000";
+  private static TaskAttemptID taskID = TaskAttemptID.forName(attempt);
+  private Text key1 = new Text("key1");
+  private Text key2 = new Text("key2");
+  private Text val1 = new Text("val1");
+  private Text val2 = new Text("val2");
+
+  @SuppressWarnings("unchecked")
+  private void writeOutput(RecordWriter theRecordWriter,
+      TaskAttemptContext context) throws IOException, InterruptedException {
+    NullWritable nullWritable = NullWritable.get();
+
+    try {
+      theRecordWriter.write(key1, val1);
+      theRecordWriter.write(null, nullWritable);
+      theRecordWriter.write(null, val1);
+      theRecordWriter.write(nullWritable, val2);
+      theRecordWriter.write(key2, nullWritable);
+      theRecordWriter.write(key1, null);
+      theRecordWriter.write(null, null);
+      theRecordWriter.write(key2, val2);
+    } finally {
+      theRecordWriter.close(context);
+    }
+  }
+
+  private static void cleanup() throws IOException {
+    Configuration conf = new Configuration();
+    FileSystem fs = outDir.getFileSystem(conf);
+    fs.delete(outDir, true);
+  }
+  
+  @Override
+  public void setUp() throws IOException {
+    cleanup();
+  }
+  
+  @Override
+  public void tearDown() throws IOException {
+    cleanup();
+  }
+  
+  @SuppressWarnings("unchecked")
+  public void testCommitter() throws Exception {
+    Job job = Job.getInstance();
+    FileOutputFormat.setOutputPath(job, outDir);
+    Configuration conf = job.getConfiguration();
+    conf.set(MRJobConfig.TASK_ATTEMPT_ID, attempt);
+    JobContext jContext = new JobContextImpl(conf, taskID.getJobID());
+    TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, taskID);
+    FileOutputCommitter committer = new FileOutputCommitter(outDir, tContext);
+
+    // setup
+    committer.setupJob(jContext);
+    committer.setupTask(tContext);
+
+    // write output
+    TextOutputFormat theOutputFormat = new TextOutputFormat();
+    RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(tContext);
+    writeOutput(theRecordWriter, tContext);
+
+    // do commit
+    committer.commitTask(tContext);
+    committer.commitJob(jContext);
+
+    // validate output
+    File expectedFile = new File(new Path(outDir, partFile).toString());
+    StringBuffer expectedOutput = new StringBuffer();
+    expectedOutput.append(key1).append('\t').append(val1).append("\n");
+    expectedOutput.append(val1).append("\n");
+    expectedOutput.append(val2).append("\n");
+    expectedOutput.append(key2).append("\n");
+    expectedOutput.append(key1).append("\n");
+    expectedOutput.append(key2).append('\t').append(val2).append("\n");
+    String output = UtilsForTests.slurp(expectedFile);
+    assertEquals(output, expectedOutput.toString());
+    FileUtil.fullyDelete(new File(outDir.toString()));
+  }
+  
+  public void testEmptyOutput() throws Exception {
+    Job job = Job.getInstance();
+    FileOutputFormat.setOutputPath(job, outDir);
+    Configuration conf = job.getConfiguration();
+    conf.set(MRJobConfig.TASK_ATTEMPT_ID, attempt);
+    JobContext jContext = new JobContextImpl(conf, taskID.getJobID());
+    TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, taskID);
+    FileOutputCommitter committer = new FileOutputCommitter(outDir, tContext);
+
+    // setup
+    committer.setupJob(jContext);
+    committer.setupTask(tContext);
+
+    // Do not write any output
+
+    // do commit
+    committer.commitTask(tContext);
+    committer.commitJob(jContext);
+    
+    FileUtil.fullyDelete(new File(outDir.toString()));
+  }
+
+  @SuppressWarnings("unchecked")
+  public void testAbort() throws IOException, InterruptedException {
+    Job job = Job.getInstance();
+    FileOutputFormat.setOutputPath(job, outDir);
+    Configuration conf = job.getConfiguration();
+    conf.set(MRJobConfig.TASK_ATTEMPT_ID, attempt);
+    JobContext jContext = new JobContextImpl(conf, taskID.getJobID());
+    TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, taskID);
+    FileOutputCommitter committer = new FileOutputCommitter(outDir, tContext);
+
+    // do setup
+    committer.setupJob(jContext);
+    committer.setupTask(tContext);
+
+    // write output
+    TextOutputFormat theOutputFormat = new TextOutputFormat();
+    RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(tContext);
+    writeOutput(theRecordWriter, tContext);
+
+    // do abort
+    committer.abortTask(tContext);
+    File expectedFile = new File(new Path(committer.getWorkPath(), partFile)
+        .toString());
+    assertFalse("task temp dir still exists", expectedFile.exists());
+
+    committer.abortJob(jContext, JobStatus.State.FAILED);
+    expectedFile = new File(new Path(outDir, FileOutputCommitter.PENDING_DIR_NAME)
+        .toString());
+    assertFalse("job temp dir still exists", expectedFile.exists());
+    assertEquals("Output directory not empty", 0, new File(outDir.toString())
+        .listFiles().length);
+    FileUtil.fullyDelete(new File(outDir.toString()));
+  }
+
+  public static class FakeFileSystem extends RawLocalFileSystem {
+    public FakeFileSystem() {
+      super();
+    }
+
+    public URI getUri() {
+      return URI.create("faildel:///");
+    }
+
+    @Override
+    public boolean delete(Path p, boolean recursive) throws IOException {
+      throw new IOException("fake delete failed");
+    }
+  }
+
+  @SuppressWarnings("unchecked")
+  public void testFailAbort() throws IOException, InterruptedException {
+    Job job = Job.getInstance();
+    Configuration conf = job.getConfiguration();
+    conf.set(FileSystem.FS_DEFAULT_NAME_KEY, "faildel:///");
+    conf.setClass("fs.faildel.impl", FakeFileSystem.class, FileSystem.class);
+    conf.set(MRJobConfig.TASK_ATTEMPT_ID, attempt);
+    FileOutputFormat.setOutputPath(job, outDir);
+    JobContext jContext = new JobContextImpl(conf, taskID.getJobID());
+    TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, taskID);
+    FileOutputCommitter committer = new FileOutputCommitter(outDir, tContext);
+
+    // do setup
+    committer.setupJob(jContext);
+    committer.setupTask(tContext);
+
+    // write output
+    TextOutputFormat<?, ?> theOutputFormat = new TextOutputFormat();
+    RecordWriter<?, ?> theRecordWriter = theOutputFormat
+        .getRecordWriter(tContext);
+    writeOutput(theRecordWriter, tContext);
+
+    // do abort
+    Throwable th = null;
+    try {
+      committer.abortTask(tContext);
+    } catch (IOException ie) {
+      th = ie;
+    }
+    assertNotNull(th);
+    assertTrue(th instanceof IOException);
+    assertTrue(th.getMessage().contains("fake delete failed"));
+    //Path taskBaseDirName = committer.getTaskAttemptBaseDirName(tContext);
+    File jobTmpDir = new File(committer.getJobAttemptPath(jContext).toUri().getPath());
+    File taskTmpDir = new File(committer.getTaskAttemptPath(tContext).toUri().getPath());
+    File expectedFile = new File(taskTmpDir, partFile);
+    assertTrue(expectedFile + " does not exists", expectedFile.exists());
+
+    th = null;
+    try {
+      committer.abortJob(jContext, JobStatus.State.FAILED);
+    } catch (IOException ie) {
+      th = ie;
+    }
+    assertNotNull(th);
+    assertTrue(th instanceof IOException);
+    assertTrue(th.getMessage().contains("fake delete failed"));
+    assertTrue("job temp dir does not exists", jobTmpDir.exists());
+    FileUtil.fullyDelete(new File(outDir.toString()));
+  }
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestMRCJCReflectionUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestMRCJCReflectionUtils.java?rev=1530667&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestMRCJCReflectionUtils.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestMRCJCReflectionUtils.java Wed Oct  9 15:09:37 2013
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.util;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobConfigurable;
+
+import static org.junit.Assert.*;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * Test for the JobConf-related parts of common's ReflectionUtils
+ * class.
+ */
+public class TestMRCJCReflectionUtils {
+  @Before
+  public void setUp() {
+    ReflectionUtils.clearCache();
+  }
+
+  /**
+   * This is to test backward compatibility of ReflectionUtils for 
+   * JobConfigurable objects. 
+   * This should be made deprecated along with the mapred package HADOOP-1230. 
+   * Should be removed when mapred package is removed.
+   */
+  @Test
+  public void testSetConf() {
+    JobConfigurableOb ob = new JobConfigurableOb();
+    ReflectionUtils.setConf(ob, new Configuration());
+    assertFalse(ob.configured);
+    ReflectionUtils.setConf(ob, new JobConf());
+    assertTrue(ob.configured);
+  }
+  
+  private static class JobConfigurableOb implements JobConfigurable {
+    boolean configured;
+    public void configure(JobConf job) {
+      configured = true;
+    }
+  }
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestMRCJCRunJar.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestMRCJCRunJar.java?rev=1530667&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestMRCJCRunJar.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestMRCJCRunJar.java Wed Oct  9 15:09:37 2013
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.util;
+
+
+import java.io.BufferedInputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.jar.JarOutputStream;
+import java.util.zip.ZipEntry;
+
+import org.apache.hadoop.fs.Path;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * A test to rest the RunJar class.
+ */
+public class TestMRCJCRunJar {
+
+  private static String TEST_ROOT_DIR = new Path(System.getProperty(
+      "test.build.data", "/tmp")).toString();
+
+  private static final String TEST_JAR_NAME = "testjar.jar";
+  private static final String CLASS_NAME = "Hello.class";
+
+  @Test
+  public void testRunjar() throws Throwable {
+    File outFile = new File(TEST_ROOT_DIR, "out");
+    // delete if output file already exists.
+    if (outFile.exists()) {
+      outFile.delete();
+    }
+    File makeTestJar = makeTestJar();
+
+    String[] args = new String[3];
+    args[0] = makeTestJar.getAbsolutePath();
+    args[1] = "org.apache.hadoop.util.Hello";
+    args[2] = outFile.toString();
+    RunJar.main(args);
+    Assert.assertTrue("RunJar failed", outFile.exists());
+  }
+
+  private File makeTestJar() throws IOException {
+    File jarFile = new File(TEST_ROOT_DIR, TEST_JAR_NAME);
+    JarOutputStream jstream = new JarOutputStream(new FileOutputStream(jarFile));
+    InputStream entryInputStream = this.getClass().getResourceAsStream(
+        CLASS_NAME);
+    ZipEntry entry = new ZipEntry("org/apache/hadoop/util/" + CLASS_NAME);
+    jstream.putNextEntry(entry);
+    BufferedInputStream bufInputStream = new BufferedInputStream(
+        entryInputStream, 2048);
+    int count;
+    byte[] data = new byte[2048];
+    while ((count = bufInputStream.read(data, 0, 2048)) != -1) {
+      jstream.write(data, 0, count);
+    }
+    jstream.closeEntry();
+    jstream.close();
+
+    return jarFile;
+  }
+}
\ No newline at end of file



Mime
View raw message