hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From bo...@apache.org
Subject svn commit: r1462527 [2/2] - in /hadoop/common/branches/branch-0.23/hadoop-mapreduce-project: ./ hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/ hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/j...
Date Fri, 29 Mar 2013 16:27:55 GMT
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java?rev=1462527&r1=1462526&r2=1462527&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java
(original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java
Fri Mar 29 16:27:53 2013
@@ -21,41 +21,78 @@ import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
 import java.util.Arrays;
 
 import junit.framework.TestCase;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
+import org.junit.Test;
 
-public class TestMultiFileSplit extends TestCase{
+import static org.junit.Assert.*;
 
-    public void testReadWrite() throws Exception {
-      MultiFileSplit split = new MultiFileSplit(new JobConf(), new Path[] {new Path("/test/path/1"),
new Path("/test/path/2")}, new long[] {100,200});
-        
-      ByteArrayOutputStream bos = null;
-      byte[] result = null;
-      try {    
-        bos = new ByteArrayOutputStream();
-        split.write(new DataOutputStream(bos));
-        result = bos.toByteArray();
-      } finally {
-        IOUtils.closeStream(bos);
-      }
-      
-      MultiFileSplit readSplit = new MultiFileSplit();
-      ByteArrayInputStream bis = null;
-      try {
-        bis = new ByteArrayInputStream(result);
-        readSplit.readFields(new DataInputStream(bis));
-      } finally {
-        IOUtils.closeStream(bis);
-      }
-      
-      assertTrue(split.getLength() != 0);
-      assertEquals(split.getLength(), readSplit.getLength());
-      assertTrue(Arrays.equals(split.getPaths(), readSplit.getPaths()));
-      assertTrue(Arrays.equals(split.getLengths(), readSplit.getLengths()));
-      System.out.println(split.toString());
+/**
+ * 
+ * test MultiFileSplit class
+ */
+public class TestMultiFileSplit {
+  
+  @Test (timeout=10000)
+  public void testReadWrite() throws Exception {
+    MultiFileSplit split = new MultiFileSplit(new JobConf(), new Path[] {
+        new Path("/test/path/1"), new Path("/test/path/2") }, new long[] { 100,
+        200 });
+
+    ByteArrayOutputStream bos = null;
+    byte[] result = null;
+    try {
+      bos = new ByteArrayOutputStream();
+      split.write(new DataOutputStream(bos));
+      result = bos.toByteArray();
+    } finally {
+      IOUtils.closeStream(bos);
+    }
+
+    MultiFileSplit readSplit = new MultiFileSplit();
+    ByteArrayInputStream bis = null;
+    try {
+      bis = new ByteArrayInputStream(result);
+      readSplit.readFields(new DataInputStream(bis));
+    } finally {
+      IOUtils.closeStream(bis);
     }
+
+    assertTrue(split.getLength() != 0);
+    assertEquals(split.getLength(), readSplit.getLength());
+    assertTrue(Arrays.equals(split.getPaths(), readSplit.getPaths()));
+    assertTrue(Arrays.equals(split.getLengths(), readSplit.getLengths()));
+    System.out.println(split.toString());
+  }
+
+  /**
+   * test method getLocations
+   * 
+   * @throws IOException
+   */
+  public void testgetLocations() throws IOException {
+    JobConf job = new JobConf();
+
+    File tmpFile = File.createTempFile("test", "txt");
+    tmpFile.createNewFile();
+    OutputStream out = new FileOutputStream(tmpFile);
+    out.write("tempfile".getBytes());
+    out.flush();
+    out.close();
+    Path[] path = { new Path(tmpFile.getAbsolutePath()) };
+    long[] lengths = { 100 };
+
+    MultiFileSplit split = new MultiFileSplit(job, path, lengths);
+    String[] locations = split.getLocations();
+    assertTrue(locations.length == 1);
+    assertEquals(locations[0], "localhost");
+  }
 }

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java?rev=1462527&r1=1462526&r2=1462527&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java
(original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java
Fri Mar 29 16:27:53 2013
@@ -19,18 +19,45 @@
 package org.apache.hadoop.mapred;
 
 import static org.junit.Assert.*;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
 
-import java.util.List;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintStream;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.ClusterStatus.BlackListInfo;
+import org.apache.hadoop.mapred.JobClient.NetworkedJob;
+import org.apache.hadoop.mapred.JobClient.TaskStatusFilter;
+import org.apache.hadoop.mapred.lib.IdentityMapper;
+import org.apache.hadoop.mapred.lib.IdentityReducer;
+import org.apache.hadoop.mapreduce.Cluster.JobTrackerStatus;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.yarn.YarnException;
 import org.junit.Test;
-import static org.mockito.Mockito.*;
-
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
 
 public class TestNetworkedJob {
-
-  @SuppressWarnings("deprecation")
-  @Test
+  private static String TEST_ROOT_DIR = new File(System.getProperty(
+      "test.build.data", "/tmp")).toURI().toString().replace(' ', '+');
+  private static Path testDir = new Path(TEST_ROOT_DIR + "/test_mini_mr_local");
+  private static Path inFile = new Path(testDir, "in");
+  private static Path outDir = new Path(testDir, "out");
+  
+  @Test (timeout=10000)
   public void testGetNullCounters() throws Exception {
     //mock creation
     Job mockJob = mock(Job.class);
@@ -41,4 +68,321 @@ public class TestNetworkedJob {
     //verification
     verify(mockJob).getCounters();
   }
+  @Test (timeout=100000)
+  public void testGetJobStatus() throws IOException, InterruptedException,
+      ClassNotFoundException {
+    MiniMRClientCluster mr = null;
+    FileSystem fileSys = null;
+
+    try {
+      mr = MiniMRClientClusterFactory.create(this.getClass(), 2,
+          new Configuration());
+
+      JobConf job = new JobConf(mr.getConfig());
+
+      fileSys = FileSystem.get(job);
+      fileSys.delete(testDir, true);
+      FSDataOutputStream out = fileSys.create(inFile, true);
+      out.writeBytes("This is a test file");
+      out.close();
+
+      FileInputFormat.setInputPaths(job, inFile);
+      FileOutputFormat.setOutputPath(job, outDir);
+
+      job.setInputFormat(TextInputFormat.class);
+      job.setOutputFormat(TextOutputFormat.class);
+
+      job.setMapperClass(IdentityMapper.class);
+      job.setReducerClass(IdentityReducer.class);
+      job.setNumReduceTasks(0);
+
+
+    } finally {
+      if (fileSys != null) {
+        fileSys.delete(testDir, true);
+      }
+      if (mr != null) {
+        mr.stop();
+      }
+    }
+  }
+/**
+ * test JobConf 
+ * @throws Exception
+ */
+  @SuppressWarnings({ "unused", "deprecation" })
+  @Test (timeout=100000)
+  public void testNetworkedJob() throws Exception {
+    // mock creation
+    MiniMRClientCluster mr = null;
+    FileSystem fileSys = null;
+
+    try {
+      Configuration conf = new Configuration();
+      mr = MiniMRClientClusterFactory.create(this.getClass(), 2, conf);
+
+      JobConf job = new JobConf(mr.getConfig());
+
+      fileSys = FileSystem.get(job);
+      fileSys.delete(testDir, true);
+      FSDataOutputStream out = fileSys.create(inFile, true);
+      out.writeBytes("This is a test file");
+      out.close();
+
+      FileInputFormat.setInputPaths(job, inFile);
+      FileOutputFormat.setOutputPath(job, outDir);
+
+      job.setInputFormat(TextInputFormat.class);
+      job.setOutputFormat(TextOutputFormat.class);
+
+      job.setMapperClass(IdentityMapper.class);
+      job.setReducerClass(IdentityReducer.class);
+      job.setNumReduceTasks(0);
+
+      JobClient client = new JobClient(mr.getConfig());
+
+      RunningJob rj = client.submitJob(job);
+      JobID jobId = rj.getID();
+      NetworkedJob runningJob = (NetworkedJob) client.getJob(jobId);
+      runningJob.setJobPriority(JobPriority.HIGH.name());
+      // test getters
+      assertTrue(runningJob.getConfiguration().toString()
+          .endsWith("0001/job.xml"));
+      assertEquals(runningJob.getID(), jobId);
+      assertEquals(runningJob.getJobID(), jobId.toString());
+      assertTrue(runningJob.getJobName().contains( "hadoop-"));
+      assertTrue(runningJob.getJobFile().endsWith(
+          ".staging/" + runningJob.getJobID() + "/job.xml"));
+      assertTrue(runningJob.getTrackingURL().length() > 0);
+      assertTrue(runningJob.mapProgress() == 0.0f);
+      assertTrue(runningJob.reduceProgress() == 0.0f);
+      assertTrue(runningJob.cleanupProgress() == 0.0f);
+      assertTrue(runningJob.setupProgress() == 0.0f);
+
+      TaskCompletionEvent[] tce = runningJob.getTaskCompletionEvents(0);
+      assertEquals(tce.length, 0);
+
+      assertEquals(runningJob.getHistoryUrl(),"");
+      assertFalse(runningJob.isRetired());
+      assertEquals( runningJob.getFailureInfo(),"");
+      assertEquals(client.getMapTaskReports(jobId).length, 0);
+      
+      try {
+        client.getSetupTaskReports(jobId);
+      } catch (YarnException e) {
+        assertEquals(e.getMessage(), "Unrecognized task type: JOB_SETUP");
+      }
+      try {
+        client.getCleanupTaskReports(jobId);
+      } catch (YarnException e) {
+        assertEquals(e.getMessage(), "Unrecognized task type: JOB_CLEANUP");
+      }
+      assertEquals(client.getReduceTaskReports(jobId).length, 0);
+      // test ClusterStatus
+      ClusterStatus status = client.getClusterStatus(true);
+      assertEquals(status.getActiveTrackerNames().size(), 2);
+      // it method does not implemented and always return empty array or null;
+      assertEquals(status.getBlacklistedTrackers(), 0);
+      assertEquals(status.getBlacklistedTrackerNames().size(), 0);
+      assertEquals(status.getBlackListedTrackersInfo().size(), 0);
+      assertEquals(status.getJobTrackerStatus(), JobTrackerStatus.RUNNING);
+      assertEquals(status.getMapTasks(), 1);
+      assertEquals(status.getMaxMapTasks(), 20);
+      assertEquals(status.getMaxReduceTasks(), 4);
+      assertEquals(status.getNumExcludedNodes(), 0);
+      assertEquals(status.getReduceTasks(), 1);
+      assertEquals(status.getTaskTrackers(), 2);
+      assertEquals(status.getTTExpiryInterval(), 0);
+      assertEquals(status.getJobTrackerStatus(), JobTrackerStatus.RUNNING);
+
+      // test read and write
+      ByteArrayOutputStream dataOut = new ByteArrayOutputStream();
+      status.write(new DataOutputStream(dataOut));
+      ClusterStatus status2 = new ClusterStatus();
+
+      status2.readFields(new DataInputStream(new ByteArrayInputStream(dataOut
+          .toByteArray())));
+      assertEquals(status.getActiveTrackerNames(),
+          status2.getActiveTrackerNames());
+      assertEquals(status.getBlackListedTrackersInfo(),
+          status2.getBlackListedTrackersInfo());
+      assertEquals(status.getMapTasks(), status2.getMapTasks());
+
+      try {
+      } catch (RuntimeException e) {
+        assertTrue(e.getMessage().endsWith("not found on CLASSPATH"));
+      }
+
+      // test taskStatusfilter
+      JobClient.setTaskOutputFilter(job, TaskStatusFilter.ALL);
+      assertEquals(JobClient.getTaskOutputFilter(job), TaskStatusFilter.ALL);
+
+      // runningJob.setJobPriority(JobPriority.HIGH.name());
+
+      // test default map
+      assertEquals(client.getDefaultMaps(), 20);
+      assertEquals(client.getDefaultReduces(), 4);
+      assertEquals(client.getSystemDir().getName(), "jobSubmitDir");
+      // test queue information
+      JobQueueInfo[] rootQueueInfo = client.getRootQueues();
+      assertEquals(rootQueueInfo.length, 1);
+      assertEquals(rootQueueInfo[0].getQueueName(), "default");
+      JobQueueInfo[] qinfo = client.getQueues();
+      assertEquals(qinfo.length, 1);
+      assertEquals(qinfo[0].getQueueName(), "default");
+      assertEquals(client.getChildQueues("default").length, 0);
+      assertEquals(client.getJobsFromQueue("default").length, 1);
+      assertTrue(client.getJobsFromQueue("default")[0].getJobFile().endsWith(
+          "/job.xml"));
+
+      JobQueueInfo qi = client.getQueueInfo("default");
+      assertEquals(qi.getQueueName(), "default");
+      assertEquals(qi.getQueueState(), "running");
+
+      QueueAclsInfo[] aai = client.getQueueAclsForCurrentUser();
+      assertEquals(aai.length, 2);
+      assertEquals(aai[0].getQueueName(), "root");
+      assertEquals(aai[1].getQueueName(), "default");
+      // test token
+      Token<DelegationTokenIdentifier> token = client
+          .getDelegationToken(new Text(UserGroupInformation.getCurrentUser()
+              .getShortUserName()));
+      assertEquals(token.getKind().toString(), "RM_DELEGATION_TOKEN");
+      
+      // test JobClient
+      
+      try {
+        long l = client.renewDelegationToken(token);
+      } catch (UnsupportedOperationException e) {
+
+        assertTrue(e.getMessage().endsWith(
+            "is not supported  for RM_DELEGATION_TOKEN tokens"));
+      }
+      try {
+        client.cancelDelegationToken(token);
+      } catch (UnsupportedOperationException e) {
+        assertTrue(e.getMessage().endsWith(
+            "is not supported  for RM_DELEGATION_TOKEN tokens"));
+      }
+    } finally {
+      if (fileSys != null) {
+        fileSys.delete(testDir, true);
+      }
+      if (mr != null) {
+        mr.stop();
+      }
+    }
+  }
+
+  /**
+   * test BlackListInfo class
+   * 
+   * @throws IOException
+   */
+  @Test (timeout=10000)
+  public void testBlackListInfo() throws IOException {
+    BlackListInfo info = new BlackListInfo();
+    info.setBlackListReport("blackListInfo");
+    info.setReasonForBlackListing("reasonForBlackListing");
+    info.setTrackerName("trackerName");
+    ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
+    DataOutput out = new DataOutputStream(byteOut);
+    info.write(out);
+    BlackListInfo info2 = new BlackListInfo();
+    info2.readFields(new DataInputStream(new ByteArrayInputStream(byteOut
+        .toByteArray())));
+    assertEquals(info, info);
+    assertEquals(info.toString(), info.toString());
+    assertEquals(info.getTrackerName(), "trackerName");
+    assertEquals(info.getReasonForBlackListing(), "reasonForBlackListing");
+    assertEquals(info.getBlackListReport(), "blackListInfo");
+
+  }
+/**
+ *  test run from command line JobQueueClient
+ * @throws Exception
+ */
+  @Test (timeout=10000)
+  public void testJobQueueClient() throws Exception {
+        MiniMRClientCluster mr = null;
+    FileSystem fileSys = null;
+    PrintStream oldOut = System.out;
+    try {
+      Configuration conf = new Configuration();
+      mr = MiniMRClientClusterFactory.create(this.getClass(), 2, conf);
+
+      JobConf job = new JobConf(mr.getConfig());
+
+      fileSys = FileSystem.get(job);
+      fileSys.delete(testDir, true);
+      FSDataOutputStream out = fileSys.create(inFile, true);
+      out.writeBytes("This is a test file");
+      out.close();
+
+      FileInputFormat.setInputPaths(job, inFile);
+      FileOutputFormat.setOutputPath(job, outDir);
+
+      job.setInputFormat(TextInputFormat.class);
+      job.setOutputFormat(TextOutputFormat.class);
+
+      job.setMapperClass(IdentityMapper.class);
+      job.setReducerClass(IdentityReducer.class);
+      job.setNumReduceTasks(0);
+
+      JobClient client = new JobClient(mr.getConfig());
+
+      client.submitJob(job);
+
+      JobQueueClient jobClient = new JobQueueClient(job);
+
+      ByteArrayOutputStream bytes = new ByteArrayOutputStream();
+      System.setOut(new PrintStream(bytes));
+      String[] arg = { "-list" };
+      jobClient.run(arg);
+      assertTrue(bytes.toString().contains("Queue Name : default"));
+      assertTrue(bytes.toString().contains("Queue State : running"));
+      bytes = new ByteArrayOutputStream();
+      System.setOut(new PrintStream(bytes));
+      String[] arg1 = { "-showacls" };
+      jobClient.run(arg1);
+      assertTrue(bytes.toString().contains("Queue acls for user :"));
+      assertTrue(bytes.toString().contains(
+          "root  ADMINISTER_QUEUE,SUBMIT_APPLICATIONS"));
+      assertTrue(bytes.toString().contains(
+          "default  ADMINISTER_QUEUE,SUBMIT_APPLICATIONS"));
+
+      // test for info and default queue
+
+      bytes = new ByteArrayOutputStream();
+      System.setOut(new PrintStream(bytes));
+      String[] arg2 = { "-info", "default" };
+      jobClient.run(arg2);
+      assertTrue(bytes.toString().contains("Queue Name : default"));
+      assertTrue(bytes.toString().contains("Queue State : running"));
+      assertTrue(bytes.toString().contains("Scheduling Info"));
+
+      // test for info , default queue and jobs
+      bytes = new ByteArrayOutputStream();
+      System.setOut(new PrintStream(bytes));
+      String[] arg3 = { "-info", "default", "-showJobs" };
+      jobClient.run(arg3);
+      assertTrue(bytes.toString().contains("Queue Name : default"));
+      assertTrue(bytes.toString().contains("Queue State : running"));
+      assertTrue(bytes.toString().contains("Scheduling Info"));
+      assertTrue(bytes.toString().contains("job_1"));
+
+      String[] arg4 = {};
+      jobClient.run(arg4);
+
+      
+    } finally {
+      System.setOut(oldOut);
+      if (fileSys != null) {
+        fileSys.delete(testDir, true);
+      }
+      if (mr != null) {
+        mr.stop();
+      }
+    }
+  }
 }

Added: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestQueueConfigurationParser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestQueueConfigurationParser.java?rev=1462527&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestQueueConfigurationParser.java
(added)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestQueueConfigurationParser.java
Fri Mar 29 16:27:53 2013
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapred;
+
+import java.io.StringWriter;
+
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
+import javax.xml.transform.Transformer;
+import javax.xml.transform.TransformerFactory;
+import javax.xml.transform.dom.DOMSource;
+import javax.xml.transform.stream.StreamResult;
+
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import static org.junit.Assert.*;
+
+import org.junit.Test;
+
+public class TestQueueConfigurationParser {
+  /**
+   * test xml generation
+   * 
+   * @throws ParserConfigurationException
+   * @throws Exception 
+   */
+  @Test (timeout=10000)
+  public void testQueueConfigurationParser()
+      throws  Exception {
+    JobQueueInfo info = new JobQueueInfo("root", "rootInfo");
+    JobQueueInfo infoChild1 = new JobQueueInfo("child1", "child1Info");
+    JobQueueInfo infoChild2 = new JobQueueInfo("child2", "child1Info");
+
+    info.addChild(infoChild1);
+    info.addChild(infoChild2);
+    DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory
+        .newInstance();
+    DocumentBuilder builder = docBuilderFactory.newDocumentBuilder();
+    Document document = builder.newDocument();
+
+    // test QueueConfigurationParser.getQueueElement.
+    Element e = QueueConfigurationParser.getQueueElement(document, info);
+    // transform result to string for check
+    DOMSource domSource = new DOMSource(e);
+    StringWriter writer = new StringWriter();
+    StreamResult result = new StreamResult(writer);
+    TransformerFactory tf = TransformerFactory.newInstance();
+    Transformer transformer = tf.newTransformer();
+    transformer.transform(domSource, result);
+    String str = writer.toString();
+
+    assertTrue(str
+        .endsWith("<queue><name>root</name><properties/><state>running</state><queue><name>child1</name><properties/><state>running</state></queue><queue><name>child2</name><properties/><state>running</state></queue></queue>"));
+  }
+}

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java?rev=1462527&r1=1462526&r2=1462527&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java
(original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java
Fri Mar 29 16:27:53 2013
@@ -17,13 +17,18 @@
  */
 package org.apache.hadoop.mapred;
 
-import junit.framework.TestCase;
+import java.util.Map;
+
 
 import org.apache.hadoop.mapred.StatisticsCollector.TimeWindow;
 import org.apache.hadoop.mapred.StatisticsCollector.Stat;
+import org.junit.Test;
 
-public class TestStatisticsCollector extends TestCase{
+import static org.junit.Assert.*;
+public class TestStatisticsCollector {
 
+  @SuppressWarnings("rawtypes")
+  @Test (timeout =20000)
   public void testMovingWindow() throws Exception {
     StatisticsCollector collector = new StatisticsCollector(1);
     TimeWindow window = new TimeWindow("test", 6, 2);
@@ -78,6 +83,28 @@ public class TestStatisticsCollector ext
     collector.update();
     assertEquals((10+10+10+12+13+14), stat.getValues().get(window).getValue());
     assertEquals(95, stat.getValues().get(sincStart).getValue());
+    
+    //  test Stat class 
+    Map updaters= collector.getUpdaters();
+    assertEquals(updaters.size(),2);
+    Map<String, Stat> ststistics=collector.getStatistics();
+    assertNotNull(ststistics.get("m1"));
+    
+   Stat newStat= collector.createStat("m2"); 
+    assertEquals(newStat.name, "m2");
+    Stat st=collector.removeStat("m1");
+    assertEquals(st.name, "m1");
+    assertEquals((10+10+10+12+13+14), stat.getValues().get(window).getValue());
+    assertEquals(95, stat.getValues().get(sincStart).getValue());
+     st=collector.removeStat("m1");
+     // try to remove stat again
+    assertNull(st);
+    collector.start();
+    // waiting 2,5 sec
+    Thread.sleep(2500);
+    assertEquals(69, stat.getValues().get(window).getValue());
+    assertEquals(95, stat.getValues().get(sincStart).getValue());
+  
   }
 
 }

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java?rev=1462527&r1=1462526&r2=1462527&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java
(original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java
Fri Mar 29 16:27:53 2013
@@ -61,11 +61,12 @@ public class TestTextInputFormat {
       throw new RuntimeException("init failure", e);
     }
   }
+  @SuppressWarnings("deprecation")
   private static Path workDir =
     new Path(new Path(System.getProperty("test.build.data", "/tmp")),
              "TestTextInputFormat").makeQualified(localFs);
 
-  @Test
+  @Test (timeout=100000)
   public void testFormat() throws Exception {
     JobConf job = new JobConf(defaultConf);
     Path file = new Path(workDir, "test.txt");
@@ -145,7 +146,7 @@ public class TestTextInputFormat {
     }
   }
 
-  @Test
+  @Test (timeout=500000)
   public void testSplitableCodecs() throws IOException {
     JobConf conf = new JobConf(defaultConf);
     int seed = new Random().nextInt();
@@ -250,7 +251,7 @@ public class TestTextInputFormat {
                                            bufsz);
   }
 
-  @Test
+  @Test (timeout=10000)
   public void testUTF8() throws Exception {
     LineReader in = makeStream("abcd\u20acbdcd\u20ac");
     Text line = new Text();
@@ -269,7 +270,7 @@ public class TestTextInputFormat {
    *
    * @throws Exception
    */
-  @Test
+  @Test (timeout=10000)
   public void testNewLines() throws Exception {
     final String STR = "a\nbb\n\nccc\rdddd\r\r\r\n\r\neeeee";
     final int STRLENBYTES = STR.getBytes().length;
@@ -309,7 +310,7 @@ public class TestTextInputFormat {
    *
    * @throws Exception
    */
-  @Test
+  @Test (timeout=10000)
   public void testMaxLineLength() throws Exception {
     final String STR = "a\nbb\n\nccc\rdddd\r\neeeee";
     final int STRLENBYTES = STR.getBytes().length;
@@ -334,7 +335,7 @@ public class TestTextInputFormat {
     }
   }
 
-  @Test
+  @Test (timeout=10000)
   public void testMRMaxLine() throws Exception {
     final int MAXPOS = 1024 * 1024;
     final int MAXLINE = 10 * 1024;
@@ -354,6 +355,9 @@ public class TestTextInputFormat {
         position += b.length;
         return b.length;
       }
+      public void reset() {
+        position=0;
+      }
     };
     final LongWritable key = new LongWritable();
     final Text val = new Text();
@@ -362,8 +366,14 @@ public class TestTextInputFormat {
     conf.setInt(org.apache.hadoop.mapreduce.lib.input.
                 LineRecordReader.MAX_LINE_LENGTH, MAXLINE);
     conf.setInt("io.file.buffer.size", BUF); // used by LRR
-    final LineRecordReader lrr = new LineRecordReader(infNull, 0, MAXPOS, conf);
+    // test another constructor 
+     LineRecordReader lrr = new LineRecordReader(infNull, 0, MAXPOS, conf);
+    assertFalse("Read a line from null", lrr.next(key, val));
+    infNull.reset();
+     lrr = new LineRecordReader(infNull, 0L, MAXLINE, MAXPOS);
     assertFalse("Read a line from null", lrr.next(key, val));
+    
+    
   }
 
   private static void writeFile(FileSystem fs, Path name, 
@@ -400,7 +410,7 @@ public class TestTextInputFormat {
   /**
    * Test using the gzip codec for reading
    */
-  @Test
+  @Test  (timeout=10000)
   public void testGzip() throws IOException {
     JobConf job = new JobConf(defaultConf);
     CompressionCodec gzip = new GzipCodec();
@@ -434,7 +444,7 @@ public class TestTextInputFormat {
   /**
    * Test using the gzip codec and an empty input file
    */
-  @Test
+  @Test (timeout=10000)
   public void testGzipEmpty() throws IOException {
     JobConf job = new JobConf(defaultConf);
     CompressionCodec gzip = new GzipCodec();

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java?rev=1462527&r1=1462526&r2=1462527&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java
(original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java
Fri Mar 29 16:27:53 2013
@@ -19,12 +19,14 @@
 package org.apache.hadoop.mapred;
 
 import java.io.*;
-import junit.framework.TestCase;
 
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.io.*;
+import org.junit.Test;
 
-public class TestTextOutputFormat extends TestCase {
+import static org.junit.Assert.*;
+
+public class TestTextOutputFormat  {
   private static JobConf defaultConf = new JobConf();
 
   private static FileSystem localFs = null;
@@ -43,8 +45,7 @@ public class TestTextOutputFormat extend
                       new Path(System.getProperty("test.build.data", "."), 
                                "data"), 
                       FileOutputCommitter.TEMP_DIR_NAME), "_" + attempt);
-
-  @SuppressWarnings("unchecked")
+ @Test  (timeout=10000)
   public void testFormat() throws Exception {
     JobConf job = new JobConf();
     job.set(JobContext.TASK_ATTEMPT_ID, attempt);
@@ -59,8 +60,8 @@ public class TestTextOutputFormat extend
     // A reporter that does nothing
     Reporter reporter = Reporter.NULL;
 
-    TextOutputFormat theOutputFormat = new TextOutputFormat();
-    RecordWriter theRecordWriter =
+    TextOutputFormat<Object,Object> theOutputFormat = new TextOutputFormat<Object,Object>();
+    RecordWriter<Object,Object> theRecordWriter =
       theOutputFormat.getRecordWriter(localFs, job, file, reporter);
 
     Text key1 = new Text("key1");
@@ -95,7 +96,7 @@ public class TestTextOutputFormat extend
 
   }
 
-  @SuppressWarnings("unchecked")
+ @Test  (timeout=10000)
   public void testFormatWithCustomSeparator() throws Exception {
     JobConf job = new JobConf();
     String separator = "\u0001";
@@ -112,8 +113,8 @@ public class TestTextOutputFormat extend
     // A reporter that does nothing
     Reporter reporter = Reporter.NULL;
 
-    TextOutputFormat theOutputFormat = new TextOutputFormat();
-    RecordWriter theRecordWriter =
+    TextOutputFormat<Object,Object> theOutputFormat = new TextOutputFormat<Object,Object>();
+    RecordWriter<Object,Object> theRecordWriter =
       theOutputFormat.getRecordWriter(localFs, job, file, reporter);
 
     Text key1 = new Text("key1");
@@ -147,7 +148,63 @@ public class TestTextOutputFormat extend
     assertEquals(output, expectedOutput.toString());
 
   }
-
+  /**
+   * test compressed file
+   * @throws IOException
+   */
+ 
+ @Test  (timeout=10000)
+ public void testCompress() throws IOException{
+   JobConf job = new JobConf();
+   String separator = "\u0001";
+   job.set("mapreduce.output.textoutputformat.separator", separator);
+   job.set(JobContext.TASK_ATTEMPT_ID, attempt);
+   job.set(org.apache.hadoop.mapreduce.lib.output.FileOutputFormat.COMPRESS,"true");
+   
+   FileOutputFormat.setOutputPath(job, workDir.getParent().getParent());
+   FileOutputFormat.setWorkOutputPath(job, workDir);
+   FileSystem fs = workDir.getFileSystem(job);
+   if (!fs.mkdirs(workDir)) {
+     fail("Failed to create output directory");
+   }
+   String file = "test.txt";
+
+   // A reporter that does nothing
+   Reporter reporter = Reporter.NULL;
+
+   TextOutputFormat<Object,Object> theOutputFormat = new TextOutputFormat<Object,Object>();
+   RecordWriter<Object,Object> theRecordWriter =
+     theOutputFormat.getRecordWriter(localFs, job, file, reporter);
+   Text key1 = new Text("key1");
+   Text key2 = new Text("key2");
+   Text val1 = new Text("val1");
+   Text val2 = new Text("val2");
+   NullWritable nullWritable = NullWritable.get();
+
+   try {
+     theRecordWriter.write(key1, val1);
+     theRecordWriter.write(null, nullWritable);
+     theRecordWriter.write(null, val1);
+     theRecordWriter.write(nullWritable, val2);
+     theRecordWriter.write(key2, nullWritable);
+     theRecordWriter.write(key1, null);
+     theRecordWriter.write(null, null);
+     theRecordWriter.write(key2, val2);
+
+   } finally {
+     theRecordWriter.close(reporter);
+   }
+   File expectedFile = new File(new Path(workDir, file).toString());
+   StringBuffer expectedOutput = new StringBuffer();
+   expectedOutput.append(key1).append(separator).append(val1).append("\n");
+   expectedOutput.append(val1).append("\n");
+   expectedOutput.append(val2).append("\n");
+   expectedOutput.append(key2).append("\n");
+   expectedOutput.append(key1).append("\n");
+   expectedOutput.append(key2).append(separator).append(val2).append("\n");
+   String output = UtilsForTests.slurp(expectedFile);
+   assertEquals(output, expectedOutput.toString());
+ }
   public static void main(String[] args) throws Exception {
     new TestTextOutputFormat().testFormat();
   }



Mime
View raw message