hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cdoug...@apache.org
Subject svn commit: r726896 - in /hadoop/core/trunk: ./ src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/
Date Tue, 16 Dec 2008 00:58:25 GMT
Author: cdouglas
Date: Mon Dec 15 16:58:24 2008
New Revision: 726896

URL: http://svn.apache.org/viewvc?rev=726896&view=rev
Log:
HADOOP-4860. Split TestFileTailingAdapters into three separate tests to avoid contention.
Contributed by Eric Yang.

Added:
    hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestLogRotate.java
    hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestRawAdaptor.java
Modified:
    hadoop/core/trunk/CHANGES.txt
    hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestFileTailingAdaptors.java

Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=726896&r1=726895&r2=726896&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Mon Dec 15 16:58:24 2008
@@ -403,6 +403,9 @@
     HADOOP-4698. Lower io.sort.mb to 10 in the tests and raise the junit memory
     limit to 512m from 256m. (Nigel Daley via cdouglas)
 
+    HADOOP-4860. Split TestFileTailingAdapters into three separate tests to
+    avoid contention. (Eric Yang via cdouglas)
+
 Release 0.19.1 - Unreleased
 
   IMPROVEMENTS

Modified: hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestFileTailingAdaptors.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestFileTailingAdaptors.java?rev=726896&r1=726895&r2=726896&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestFileTailingAdaptors.java
(original)
+++ hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestFileTailingAdaptors.java
Mon Dec 15 16:58:24 2008
@@ -36,32 +36,6 @@
     chunks = new ChunkCatcherConnector();
     chunks.start();
   }
-  
-  public void testRawAdaptor() throws IOException, InterruptedException, ChukwaAgent.AlreadyRunningException
{
-
-    ChukwaAgent  agent = new ChukwaAgent();
-    // Remove any adaptor left over from previous run
-    ChukwaConfiguration cc = new ChukwaConfiguration();
-    int portno = cc.getInt("chukwaAgent.control.port", 9093);
-    ChukwaAgentController cli = new ChukwaAgentController("localhost", portno);
-    cli.removeAll();
-    // sleep for some time to make sure we don't get chunk from existing streams
-    Thread.sleep(5000);
-    File testFile = makeTestFile("/tmp/chukwaRawTest",80);
-    long adaptorId = agent.processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.FileTailingAdaptor"
+
-        " raw " + testFile + " 0");
-    assertTrue(adaptorId != -1);
-    Chunk c = chunks.waitForAChunk();
-    while(!c.getDataType().equals("raw")) {
-        c = chunks.waitForAChunk();
-    }
-    assertTrue(c.getDataType().equals("raw"));
-    assertTrue(c.getRecordOffsets().length == 1);
-    assertTrue(c.getSeqID() == testFile.length());
-    agent.stopAdaptor(adaptorId, false);
-    agent.shutdown();
-  }
-
 
   public void testCrSepAdaptor() throws IOException, InterruptedException, ChukwaAgent.AlreadyRunningException
{
     ChukwaAgent  agent = new ChukwaAgent();
@@ -83,7 +57,6 @@
         c = chunks.waitForAChunk();
     }
     assertTrue(c.getSeqID() == testFile.length());    
-System.out.println(c.getRecordOffsets().length);
     assertTrue(c.getRecordOffsets().length == 80);
     int recStart = 0;
     for(int rec = 0 ; rec < c.getRecordOffsets().length; ++rec) {
@@ -96,56 +69,7 @@
     agent.stopAdaptor(adaptorId, false);
     agent.shutdown();
   }
-  
-  public void testLogRotate() throws IOException, InterruptedException, ChukwaAgent.AlreadyRunningException
{
-    ChukwaAgent  agent = new ChukwaAgent();
-    // Remove any adaptor left over from previous run
-    ChukwaConfiguration cc = new ChukwaConfiguration();
-    int portno = cc.getInt("chukwaAgent.control.port", 9093);
-    ChukwaAgentController cli = new ChukwaAgentController("localhost", portno);
-    cli.removeAll();
-    // sleep for some time to make sure we don't get chunk from existing streams
-    Thread.sleep(5000);
-    File testFile = makeTestFile("/tmp/chukwaLogRotateTest",80);
-    long adaptorId = agent.processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8"
+
-        " lines " + testFile + " 0");
-    assertTrue(adaptorId != -1);
-    System.out.println("getting a chunk...");
-    Chunk c = chunks.waitForAChunk(); 
-    System.out.println("got chunk");
-    while(!c.getDataType().equals("lines")) {
-        c = chunks.waitForAChunk();
-    }
-    assertTrue(c.getSeqID() == testFile.length());	  
-    assertTrue(c.getRecordOffsets().length == 80);
-    int recStart = 0;
-    for(int rec = 0 ; rec < c.getRecordOffsets().length; ++rec) {
-      String record = new String(c.getData(), recStart, c.getRecordOffsets()[rec] - recStart+1);
-      System.out.println("record "+ rec+ " was: " + record);
-      assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n"));
-      recStart = c.getRecordOffsets()[rec] +1;
-    }
-    assertTrue(c.getDataType().equals("lines"));
-    testFile = makeTestFile("/tmp/chukwaLogRotateTest",40);
-    c = chunks.waitForAChunk(); 
-    System.out.println("got chunk");
-    while(!c.getDataType().equals("lines")) {
-        c = chunks.waitForAChunk();
-    }
-    //assertTrue(c.getSeqID() == testFile.length());	  
-    assertTrue(c.getRecordOffsets().length == 40);
-    recStart = 0;
-    for(int rec = 0 ; rec < c.getRecordOffsets().length; ++rec) {
-      String record = new String(c.getData(), recStart, c.getRecordOffsets()[rec] - recStart+1);
-      System.out.println("record "+ rec+ " was: " + record);
-      assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n"));
-      recStart = c.getRecordOffsets()[rec] +1;
-    }
-    assertTrue(c.getDataType().equals("lines"));
-    agent.stopAdaptor(adaptorId, false);
-    agent.shutdown();
-  }
-  
+
   private File makeTestFile(String name, int size) throws IOException {
     File tmpOutput = new File(name);
     FileOutputStream fos = new FileOutputStream(tmpOutput);

Added: hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestLogRotate.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestLogRotate.java?rev=726896&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestLogRotate.java
(added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestLogRotate.java
Mon Dec 15 16:58:24 2008
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.chukwa.datacollection.adaptor.filetailer;
+
+import java.io.*;
+
+import junit.framework.TestCase;
+import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
+
+import java.util.Map;
+import java.util.Iterator;
+import org.apache.hadoop.chukwa.Chunk;
+import org.apache.hadoop.chukwa.datacollection.adaptor.*;
+import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
+import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController;
+import org.apache.hadoop.chukwa.datacollection.connector.ChunkCatcherConnector;
+
+public class TestLogRotate extends TestCase {
+  ChunkCatcherConnector chunks;
+  public TestLogRotate() {
+    chunks = new ChunkCatcherConnector();
+    chunks.start();
+  }
+
+  public void testLogRotate() throws IOException, InterruptedException, ChukwaAgent.AlreadyRunningException
{
+    ChukwaAgent  agent = new ChukwaAgent();
+    // Remove any adaptor left over from previous run
+    ChukwaConfiguration cc = new ChukwaConfiguration();
+    int portno = cc.getInt("chukwaAgent.control.port", 9093);
+    ChukwaAgentController cli = new ChukwaAgentController("localhost", portno);
+    cli.removeAll();
+    // sleep for some time to make sure we don't get chunk from existing streams
+    Thread.sleep(5000);
+    File testFile = makeTestFile("/tmp/chukwaLogRotateTest",80);
+    long adaptorId = agent.processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8"
+
+        " lines " + testFile + " 0");
+    assertTrue(adaptorId != -1);
+    System.out.println("getting a chunk...");
+    Chunk c = chunks.waitForAChunk(); 
+    System.out.println("got chunk");
+    while(!c.getDataType().equals("lines")) {
+        c = chunks.waitForAChunk();
+    }
+    assertTrue(c.getSeqID() == testFile.length());	  
+    assertTrue(c.getRecordOffsets().length == 80);
+    int recStart = 0;
+    for(int rec = 0 ; rec < c.getRecordOffsets().length; ++rec) {
+      String record = new String(c.getData(), recStart, c.getRecordOffsets()[rec] - recStart+1);
+      System.out.println("record "+ rec+ " was: " + record);
+      assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n"));
+      recStart = c.getRecordOffsets()[rec] +1;
+    }
+    assertTrue(c.getDataType().equals("lines"));
+    testFile = makeTestFile("/tmp/chukwaLogRotateTest",40);
+    c = chunks.waitForAChunk(); 
+    System.out.println("got chunk");
+    while(!c.getDataType().equals("lines")) {
+        c = chunks.waitForAChunk();
+    }
+    //assertTrue(c.getSeqID() == testFile.length());	  
+    assertTrue(c.getRecordOffsets().length == 40);
+    recStart = 0;
+    for(int rec = 0 ; rec < c.getRecordOffsets().length; ++rec) {
+      String record = new String(c.getData(), recStart, c.getRecordOffsets()[rec] - recStart+1);
+      System.out.println("record "+ rec+ " was: " + record);
+      assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n"));
+      recStart = c.getRecordOffsets()[rec] +1;
+    }
+    assertTrue(c.getDataType().equals("lines"));
+    agent.stopAdaptor(adaptorId, false);
+    agent.shutdown();
+  }
+
+  private File makeTestFile(String name, int size) throws IOException {
+    File tmpOutput = new File(name);
+    FileOutputStream fos = new FileOutputStream(tmpOutput);
+    
+    PrintWriter pw = new PrintWriter(fos);
+    for(int i = 0; i < size; ++i) {
+      pw.print(i + " ");
+      pw.println("abcdefghijklmnopqrstuvwxyz");
+    }
+    pw.flush();
+    pw.close();
+    return tmpOutput;
+  }
+  
+  public static void main(String[] args) {
+    try {
+      TestLogRotate tests = new TestLogRotate();
+      tests.testLogRotate();
+    } catch(Exception e) {
+      e.printStackTrace();
+    }
+  }
+  
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestRawAdaptor.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestRawAdaptor.java?rev=726896&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestRawAdaptor.java
(added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestRawAdaptor.java
Mon Dec 15 16:58:24 2008
@@ -0,0 +1,88 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.chukwa.datacollection.adaptor.filetailer;
+
+import java.io.*;
+
+import junit.framework.TestCase;
+import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
+
+import java.util.Map;
+import java.util.Iterator;
+import org.apache.hadoop.chukwa.Chunk;
+import org.apache.hadoop.chukwa.datacollection.adaptor.*;
+import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
+import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController;
+import org.apache.hadoop.chukwa.datacollection.connector.ChunkCatcherConnector;
+
+public class TestRawAdaptor extends TestCase {
+  ChunkCatcherConnector chunks;
+  public TestRawAdaptor() {
+    chunks = new ChunkCatcherConnector();
+    chunks.start();
+  }
+  
+  public void testRawAdaptor() throws IOException, InterruptedException, ChukwaAgent.AlreadyRunningException
{
+
+    ChukwaAgent  agent = new ChukwaAgent();
+    // Remove any adaptor left over from previous run
+    ChukwaConfiguration cc = new ChukwaConfiguration();
+    int portno = cc.getInt("chukwaAgent.control.port", 9093);
+    ChukwaAgentController cli = new ChukwaAgentController("localhost", portno);
+    cli.removeAll();
+    // sleep for some time to make sure we don't get chunk from existing streams
+    Thread.sleep(5000);
+    File testFile = makeTestFile("/tmp/chukwaRawTest",80);
+    long adaptorId = agent.processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.FileTailingAdaptor"
+
+        " raw " + testFile + " 0");
+    assertTrue(adaptorId != -1);
+    Chunk c = chunks.waitForAChunk();
+    while(!c.getDataType().equals("raw")) {
+        c = chunks.waitForAChunk();
+    }
+    assertTrue(c.getDataType().equals("raw"));
+    assertTrue(c.getRecordOffsets().length == 1);
+    assertTrue(c.getSeqID() == testFile.length());
+    agent.stopAdaptor(adaptorId, false);
+    agent.shutdown();
+  }
+
+  private File makeTestFile(String name, int size) throws IOException {
+    File tmpOutput = new File(name);
+    FileOutputStream fos = new FileOutputStream(tmpOutput);
+    
+    PrintWriter pw = new PrintWriter(fos);
+    for(int i = 0; i < size; ++i) {
+      pw.print(i + " ");
+      pw.println("abcdefghijklmnopqrstuvwxyz");
+    }
+    pw.flush();
+    pw.close();
+    return tmpOutput;
+  }
+  
+  public static void main(String[] args) {
+    try {
+      TestRawAdaptor tests = new TestRawAdaptor();
+      tests.testRawAdaptor();
+    } catch(Exception e) {
+      e.printStackTrace();
+    }
+  }
+  
+}



Mime
View raw message