Return-Path: Delivered-To: apmail-hadoop-chukwa-commits-archive@minotaur.apache.org Received: (qmail 4114 invoked from network); 11 Mar 2009 22:40:42 -0000 Received: from hermes.apache.org (HELO mail.apache.org) (140.211.11.2) by minotaur.apache.org with SMTP; 11 Mar 2009 22:40:42 -0000 Received: (qmail 21419 invoked by uid 500); 11 Mar 2009 22:40:42 -0000 Delivered-To: apmail-hadoop-chukwa-commits-archive@hadoop.apache.org Received: (qmail 21406 invoked by uid 500); 11 Mar 2009 22:40:42 -0000 Mailing-List: contact chukwa-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: chukwa-dev@hadoop.apache.org Delivered-To: mailing list chukwa-commits@hadoop.apache.org Received: (qmail 21394 invoked by uid 99); 11 Mar 2009 22:40:42 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 11 Mar 2009 15:40:42 -0700 X-ASF-Spam-Status: No, hits=-1998.5 required=10.0 tests=ALL_TRUSTED,WEIRD_PORT X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 11 Mar 2009 22:40:35 +0000 Received: by eris.apache.org (Postfix, from userid 65534) id 08B1A2388D9A; Wed, 11 Mar 2009 22:39:44 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r752666 [15/16] - in /hadoop/chukwa/trunk: ./ src/java/org/apache/hadoop/chukwa/ src/java/org/apache/hadoop/chukwa/conf/ src/java/org/apache/hadoop/chukwa/database/ src/java/org/apache/hadoop/chukwa/datacollection/ src/java/org/apache/hadoo... Date: Wed, 11 Mar 2009 22:39:32 -0000 To: chukwa-commits@hadoop.apache.org From: asrabkin@apache.org X-Mailer: svnmailer-1.0.8 Message-Id: <20090311223944.08B1A2388D9A@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestFileTailingAdaptors.java URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestFileTailingAdaptors.java?rev=752666&r1=752665&r2=752666&view=diff ============================================================================== --- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestFileTailingAdaptors.java (original) +++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestFileTailingAdaptors.java Wed Mar 11 22:39:26 2009 @@ -17,11 +17,10 @@ */ package org.apache.hadoop.chukwa.datacollection.adaptor.filetailer; -import java.io.*; +import java.io.*; import junit.framework.TestCase; import org.apache.hadoop.chukwa.conf.ChukwaConfiguration; - import java.util.Map; import java.util.Iterator; import org.apache.hadoop.chukwa.Chunk; @@ -32,13 +31,15 @@ public class TestFileTailingAdaptors extends TestCase { ChunkCatcherConnector chunks; + public TestFileTailingAdaptors() { chunks = new ChunkCatcherConnector(); chunks.start(); } - public void testCrSepAdaptor() throws IOException, InterruptedException, ChukwaAgent.AlreadyRunningException { - ChukwaAgent agent = new ChukwaAgent(); + public void testCrSepAdaptor() throws IOException, InterruptedException, + ChukwaAgent.AlreadyRunningException { + ChukwaAgent agent = new ChukwaAgent(); // Remove any adaptor left over from previous run ChukwaConfiguration cc = new ChukwaConfiguration(); int portno = cc.getInt("chukwaAgent.control.port", 9093); @@ -46,36 +47,39 @@ cli.removeAll(); // sleep for some time to make sure we don't get chunk from existing streams Thread.sleep(5000); - File testFile = makeTestFile("chukwaCrSepTest",80); - long adaptorId = agent.processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8" + - " lines " + testFile + " 0"); + File testFile = makeTestFile("chukwaCrSepTest", 80); + long adaptorId = agent + .processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8" + + " lines " + testFile + " 0"); assertTrue(adaptorId != -1); System.out.println("getting a chunk..."); - Chunk c = chunks.waitForAChunk(); + Chunk c = chunks.waitForAChunk(); System.out.println("got chunk"); - while(!c.getDataType().equals("lines")) { - c = chunks.waitForAChunk(); + while (!c.getDataType().equals("lines")) { + c = chunks.waitForAChunk(); } - assertTrue(c.getSeqID() == testFile.length()); + assertTrue(c.getSeqID() == testFile.length()); assertTrue(c.getRecordOffsets().length == 80); int recStart = 0; - for(int rec = 0 ; rec < c.getRecordOffsets().length; ++rec) { - String record = new String(c.getData(), recStart, c.getRecordOffsets()[rec] - recStart+1); - System.out.println("record "+ rec+ " was: " + record); + for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) { + String record = new String(c.getData(), recStart, + c.getRecordOffsets()[rec] - recStart + 1); + System.out.println("record " + rec + " was: " + record); assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n")); - recStart = c.getRecordOffsets()[rec] +1; + recStart = c.getRecordOffsets()[rec] + 1; } - assertTrue(c.getDataType().equals("lines")); + assertTrue(c.getDataType().equals("lines")); agent.stopAdaptor(adaptorId, false); agent.shutdown(); } private File makeTestFile(String name, int size) throws IOException { - File tmpOutput = new File(System.getProperty("test.build.data", "/tmp"),name); + File tmpOutput = new File(System.getProperty("test.build.data", "/tmp"), + name); FileOutputStream fos = new FileOutputStream(tmpOutput); - + PrintWriter pw = new PrintWriter(fos); - for(int i = 0; i < size; ++i) { + for (int i = 0; i < size; ++i) { pw.print(i + " "); pw.println("abcdefghijklmnopqrstuvwxyz"); } @@ -83,14 +87,14 @@ pw.close(); return tmpOutput; } - + public static void main(String[] args) { try { TestFileTailingAdaptors tests = new TestFileTailingAdaptors(); tests.testCrSepAdaptor(); - } catch(Exception e) { + } catch (Exception e) { e.printStackTrace(); } } - + } Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestLogRotate.java URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestLogRotate.java?rev=752666&r1=752665&r2=752666&view=diff ============================================================================== --- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestLogRotate.java (original) +++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestLogRotate.java Wed Mar 11 22:39:26 2009 @@ -17,11 +17,10 @@ */ package org.apache.hadoop.chukwa.datacollection.adaptor.filetailer; -import java.io.*; +import java.io.*; import junit.framework.TestCase; import org.apache.hadoop.chukwa.conf.ChukwaConfiguration; - import java.util.Map; import java.util.Iterator; import org.apache.hadoop.chukwa.Chunk; @@ -32,13 +31,15 @@ public class TestLogRotate extends TestCase { ChunkCatcherConnector chunks; + public TestLogRotate() { chunks = new ChunkCatcherConnector(); chunks.start(); } - public void testLogRotate() throws IOException, InterruptedException, ChukwaAgent.AlreadyRunningException { - ChukwaAgent agent = new ChukwaAgent(); + public void testLogRotate() throws IOException, InterruptedException, + ChukwaAgent.AlreadyRunningException { + ChukwaAgent agent = new ChukwaAgent(); // Remove any adaptor left over from previous run ChukwaConfiguration cc = new ChukwaConfiguration(); int portno = cc.getInt("chukwaAgent.control.port", 9093); @@ -46,40 +47,43 @@ cli.removeAll(); // sleep for some time to make sure we don't get chunk from existing streams Thread.sleep(5000); - File testFile = makeTestFile("chukwaLogRotateTest",80); - long adaptorId = agent.processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8" + - " lines " + testFile + " 0"); + File testFile = makeTestFile("chukwaLogRotateTest", 80); + long adaptorId = agent + .processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8" + + " lines " + testFile + " 0"); assertTrue(adaptorId != -1); System.out.println("getting a chunk..."); - Chunk c = chunks.waitForAChunk(); + Chunk c = chunks.waitForAChunk(); System.out.println("got chunk"); - while(!c.getDataType().equals("lines")) { - c = chunks.waitForAChunk(); + while (!c.getDataType().equals("lines")) { + c = chunks.waitForAChunk(); } - assertTrue(c.getSeqID() == testFile.length()); + assertTrue(c.getSeqID() == testFile.length()); assertTrue(c.getRecordOffsets().length == 80); int recStart = 0; - for(int rec = 0 ; rec < c.getRecordOffsets().length; ++rec) { - String record = new String(c.getData(), recStart, c.getRecordOffsets()[rec] - recStart+1); - System.out.println("record "+ rec+ " was: " + record); + for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) { + String record = new String(c.getData(), recStart, + c.getRecordOffsets()[rec] - recStart + 1); + System.out.println("record " + rec + " was: " + record); assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n")); - recStart = c.getRecordOffsets()[rec] +1; + recStart = c.getRecordOffsets()[rec] + 1; } assertTrue(c.getDataType().equals("lines")); - testFile = makeTestFile("chukwaLogRotateTest",40); - c = chunks.waitForAChunk(); + testFile = makeTestFile("chukwaLogRotateTest", 40); + c = chunks.waitForAChunk(); System.out.println("got chunk"); - while(!c.getDataType().equals("lines")) { - c = chunks.waitForAChunk(); + while (!c.getDataType().equals("lines")) { + c = chunks.waitForAChunk(); } - //assertTrue(c.getSeqID() == testFile.length()); + // assertTrue(c.getSeqID() == testFile.length()); assertTrue(c.getRecordOffsets().length == 40); recStart = 0; - for(int rec = 0 ; rec < c.getRecordOffsets().length; ++rec) { - String record = new String(c.getData(), recStart, c.getRecordOffsets()[rec] - recStart+1); - System.out.println("record "+ rec+ " was: " + record); + for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) { + String record = new String(c.getData(), recStart, + c.getRecordOffsets()[rec] - recStart + 1); + System.out.println("record " + rec + " was: " + record); assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n")); - recStart = c.getRecordOffsets()[rec] +1; + recStart = c.getRecordOffsets()[rec] + 1; } assertTrue(c.getDataType().equals("lines")); agent.stopAdaptor(adaptorId, false); @@ -87,11 +91,12 @@ } private File makeTestFile(String name, int size) throws IOException { - File tmpOutput = new File(System.getProperty("test.build.data", "/tmp"),name); + File tmpOutput = new File(System.getProperty("test.build.data", "/tmp"), + name); FileOutputStream fos = new FileOutputStream(tmpOutput); - + PrintWriter pw = new PrintWriter(fos); - for(int i = 0; i < size; ++i) { + for (int i = 0; i < size; ++i) { pw.print(i + " "); pw.println("abcdefghijklmnopqrstuvwxyz"); } @@ -99,14 +104,14 @@ pw.close(); return tmpOutput; } - + public static void main(String[] args) { try { TestLogRotate tests = new TestLogRotate(); tests.testLogRotate(); - } catch(Exception e) { + } catch (Exception e) { e.printStackTrace(); } } - + } Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestRawAdaptor.java URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestRawAdaptor.java?rev=752666&r1=752665&r2=752666&view=diff ============================================================================== --- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestRawAdaptor.java (original) +++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestRawAdaptor.java Wed Mar 11 22:39:26 2009 @@ -17,11 +17,10 @@ */ package org.apache.hadoop.chukwa.datacollection.adaptor.filetailer; -import java.io.*; +import java.io.*; import junit.framework.TestCase; import org.apache.hadoop.chukwa.conf.ChukwaConfiguration; - import java.util.Map; import java.util.Iterator; import org.apache.hadoop.chukwa.Chunk; @@ -32,14 +31,16 @@ public class TestRawAdaptor extends TestCase { ChunkCatcherConnector chunks; + public TestRawAdaptor() { chunks = new ChunkCatcherConnector(); chunks.start(); } - - public void testRawAdaptor() throws IOException, InterruptedException, ChukwaAgent.AlreadyRunningException { - ChukwaAgent agent = new ChukwaAgent(); + public void testRawAdaptor() throws IOException, InterruptedException, + ChukwaAgent.AlreadyRunningException { + + ChukwaAgent agent = new ChukwaAgent(); // Remove any adaptor left over from previous run ChukwaConfiguration cc = new ChukwaConfiguration(); int portno = cc.getInt("chukwaAgent.control.port", 9093); @@ -47,13 +48,14 @@ cli.removeAll(); // sleep for some time to make sure we don't get chunk from existing streams Thread.sleep(5000); - File testFile = makeTestFile("chukwaRawTest",80); - long adaptorId = agent.processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.FileTailingAdaptor" + - " raw " + testFile + " 0"); + File testFile = makeTestFile("chukwaRawTest", 80); + long adaptorId = agent + .processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.FileTailingAdaptor" + + " raw " + testFile + " 0"); assertTrue(adaptorId != -1); Chunk c = chunks.waitForAChunk(); - while(!c.getDataType().equals("raw")) { - c = chunks.waitForAChunk(); + while (!c.getDataType().equals("raw")) { + c = chunks.waitForAChunk(); } assertTrue(c.getDataType().equals("raw")); assertTrue(c.getRecordOffsets().length == 1); @@ -63,11 +65,12 @@ } private File makeTestFile(String name, int size) throws IOException { - File tmpOutput = new File(System.getProperty("test.build.data", "/tmp"),name); + File tmpOutput = new File(System.getProperty("test.build.data", "/tmp"), + name); FileOutputStream fos = new FileOutputStream(tmpOutput); - + PrintWriter pw = new PrintWriter(fos); - for(int i = 0; i < size; ++i) { + for (int i = 0; i < size; ++i) { pw.print(i + " "); pw.println("abcdefghijklmnopqrstuvwxyz"); } @@ -75,14 +78,14 @@ pw.close(); return tmpOutput; } - + public static void main(String[] args) { try { TestRawAdaptor tests = new TestRawAdaptor(); tests.testRawAdaptor(); - } catch(Exception e) { + } catch (Exception e) { e.printStackTrace(); } } - + } Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestStartAtOffset.java URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestStartAtOffset.java?rev=752666&r1=752665&r2=752666&view=diff ============================================================================== --- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestStartAtOffset.java (original) +++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestStartAtOffset.java Wed Mar 11 22:39:26 2009 @@ -17,30 +17,31 @@ */ package org.apache.hadoop.chukwa.datacollection.adaptor.filetailer; + import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintWriter; - import org.apache.hadoop.chukwa.conf.ChukwaConfiguration; import org.apache.hadoop.chukwa.Chunk; import org.apache.hadoop.chukwa.datacollection.adaptor.*; import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent; import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController; import org.apache.hadoop.chukwa.datacollection.connector.ChunkCatcherConnector; - import junit.framework.TestCase; public class TestStartAtOffset extends TestCase { - + ChunkCatcherConnector chunks; + public TestStartAtOffset() { chunks = new ChunkCatcherConnector(); chunks.start(); } - - public void testStartAtOffset() throws IOException, InterruptedException, ChukwaAgent.AlreadyRunningException { - ChukwaAgent agent = new ChukwaAgent(); + + public void testStartAtOffset() throws IOException, InterruptedException, + ChukwaAgent.AlreadyRunningException { + ChukwaAgent agent = new ChukwaAgent(); // Remove any adaptor left over from previous run ChukwaConfiguration cc = new ChukwaConfiguration(); int portno = cc.getInt("chukwaAgent.control.port", 9093); @@ -49,33 +50,36 @@ // sleep for some time to make sure we don't get chunk from existing streams Thread.sleep(5000); File testFile = makeTestFile(); - int startOffset = 0; // skip first line - long adaptorId = agent.processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8 " + - "lines "+ startOffset+ " " + testFile + " " + startOffset); + int startOffset = 0; // skip first line + long adaptorId = agent + .processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8 " + + "lines " + startOffset + " " + testFile + " " + startOffset); assertTrue(adaptorId != -1); System.out.println("getting a chunk..."); - Chunk c = chunks.waitForAChunk(); + Chunk c = chunks.waitForAChunk(); System.out.println("got chunk"); - while(!c.getDataType().equals("lines")) { - c = chunks.waitForAChunk(); + while (!c.getDataType().equals("lines")) { + c = chunks.waitForAChunk(); } - assertTrue(c.getSeqID() == testFile.length() + startOffset); - System.out.println("RecordOffsets length:"+c.getRecordOffsets().length); + assertTrue(c.getSeqID() == testFile.length() + startOffset); + System.out.println("RecordOffsets length:" + c.getRecordOffsets().length); assertTrue(c.getRecordOffsets().length == 80); // 80 lines in the file. int recStart = 0; - for(int rec = 0 ; rec < c.getRecordOffsets().length; ++rec) { - String record = new String(c.getData(), recStart, c.getRecordOffsets()[rec] - recStart+1); - System.out.println("record "+ rec+ " was: " + record); + for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) { + String record = new String(c.getData(), recStart, + c.getRecordOffsets()[rec] - recStart + 1); + System.out.println("record " + rec + " was: " + record); assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n")); - recStart = c.getRecordOffsets()[rec] +1; + recStart = c.getRecordOffsets()[rec] + 1; } assertTrue(c.getDataType().equals("lines")); agent.stopAdaptor(adaptorId, false); agent.shutdown(); } - - public void testStartAfterOffset() throws IOException, InterruptedException, ChukwaAgent.AlreadyRunningException { - ChukwaAgent agent = new ChukwaAgent(); + + public void testStartAfterOffset() throws IOException, InterruptedException, + ChukwaAgent.AlreadyRunningException { + ChukwaAgent agent = new ChukwaAgent(); // Remove any adaptor left over from previous run ChukwaConfiguration cc = new ChukwaConfiguration(); int portno = cc.getInt("chukwaAgent.control.port", 9093); @@ -85,36 +89,45 @@ Thread.sleep(5000); File testFile = makeTestFile(); int startOffset = 0; - long adaptorId = agent.processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8 " + - "lines "+ startOffset+ " " + testFile + " " + (startOffset + 29) ); + long adaptorId = agent + .processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8 " + + "lines " + + startOffset + + " " + + testFile + + " " + + (startOffset + 29)); assertTrue(adaptorId != -1); System.out.println("getting a chunk..."); - Chunk c = chunks.waitForAChunk(); + Chunk c = chunks.waitForAChunk(); System.out.println("got chunk"); - while(!c.getDataType().equals("lines")) { - c = chunks.waitForAChunk(); + while (!c.getDataType().equals("lines")) { + c = chunks.waitForAChunk(); } - assertTrue(c.getSeqID() == testFile.length() + startOffset); - - assertTrue(c.getRecordOffsets().length == 79);//80 lines in test file, minus the one we skipped + assertTrue(c.getSeqID() == testFile.length() + startOffset); + + assertTrue(c.getRecordOffsets().length == 79);// 80 lines in test file, + // minus the one we skipped int recStart = 0; - for(int rec = 0 ; rec < c.getRecordOffsets().length; ++rec) { - String record = new String(c.getData(), recStart, c.getRecordOffsets()[rec] - recStart+1); - System.out.println("record "+ rec+ " was: " + record); - assertTrue(record.equals((rec+1) + " abcdefghijklmnopqrstuvwxyz\n")); - recStart = c.getRecordOffsets()[rec] +1; + for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) { + String record = new String(c.getData(), recStart, + c.getRecordOffsets()[rec] - recStart + 1); + System.out.println("record " + rec + " was: " + record); + assertTrue(record.equals((rec + 1) + " abcdefghijklmnopqrstuvwxyz\n")); + recStart = c.getRecordOffsets()[rec] + 1; } - assertTrue(c.getDataType().equals("lines")); + assertTrue(c.getDataType().equals("lines")); agent.stopAdaptor(adaptorId, false); agent.shutdown(); } - + private File makeTestFile() throws IOException { - File tmpOutput = new File(System.getProperty("test.build.data", "/tmp"), "chukwaTest"); + File tmpOutput = new File(System.getProperty("test.build.data", "/tmp"), + "chukwaTest"); FileOutputStream fos = new FileOutputStream(tmpOutput); - + PrintWriter pw = new PrintWriter(fos); - for(int i = 0; i < 80; ++i) { + for (int i = 0; i < 80; ++i) { pw.print(i + " "); pw.println("abcdefghijklmnopqrstuvwxyz"); } @@ -122,6 +135,5 @@ pw.close(); return tmpOutput; } - - + } Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/agent/TestAgent.java URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/agent/TestAgent.java?rev=752666&r1=752665&r2=752666&view=diff ============================================================================== --- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/agent/TestAgent.java (original) +++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/agent/TestAgent.java Wed Mar 11 22:39:26 2009 @@ -17,43 +17,43 @@ */ package org.apache.hadoop.chukwa.datacollection.agent; -import java.util.ArrayList; +import java.util.ArrayList; import org.apache.hadoop.chukwa.conf.ChukwaConfiguration; import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent; import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController; import org.apache.hadoop.chukwa.datacollection.test.ConsoleOutConnector; - import junit.framework.TestCase; public class TestAgent extends TestCase { - public void testStopAndStart() { try { ChukwaAgent agent = new ChukwaAgent(); ConsoleOutConnector conn = new ConsoleOutConnector(agent, true); conn.start(); - + ChukwaConfiguration cc = new ChukwaConfiguration(); int portno = cc.getInt("chukwaAgent.control.port", 9093); ChukwaAgentController cli = new ChukwaAgentController("localhost", portno); - - for(int i=1; i < 20; ++i) { - long adaptorId = cli.add("org.apache.hadoop.chukwa.util.ConstRateAdaptor", "raw" + i, "2000" + i, 0); + + for (int i = 1; i < 20; ++i) { + long adaptorId = cli.add( + "org.apache.hadoop.chukwa.util.ConstRateAdaptor", "raw" + i, "2000" + + i, 0); assertTrue(adaptorId != -1); - Thread.sleep(2000); + Thread.sleep(2000); cli.removeAll(); } agent.shutdown(); conn.shutdown(); - } catch(Exception e) { + } catch (Exception e) { e.printStackTrace(); fail(e.toString()); } } - + public void testMultiStopAndStart() { try { @@ -61,27 +61,29 @@ ConsoleOutConnector conn = new ConsoleOutConnector(agent, true); conn.start(); int count = agent.adaptorCount(); - for(int trial=0; trial < 20; ++trial) { + for (int trial = 0; trial < 20; ++trial) { ArrayList runningAdaptors = new ArrayList(); - - for(int i = 1; i < 7; ++i) { - long l = agent.processCommand("add org.apache.hadoop.chukwa.util.ConstRateAdaptor raw"+i+ " 2000"+i+" 0"); + + for (int i = 1; i < 7; ++i) { + long l = agent + .processCommand("add org.apache.hadoop.chukwa.util.ConstRateAdaptor raw" + + i + " 2000" + i + " 0"); assertTrue(l != -1); runningAdaptors.add(l); } - Thread.sleep(1000); - for(Long l: runningAdaptors) + Thread.sleep(1000); + for (Long l : runningAdaptors) agent.stopAdaptor(l, false); Thread.sleep(5000); assertTrue(agent.adaptorCount() == count); } agent.shutdown(); - } catch(Exception e) { + } catch (Exception e) { e.printStackTrace(); fail(e.toString()); } } - + public void testLogRotate() { } Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/agent/TestAgentConfig.java URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/agent/TestAgentConfig.java?rev=752666&r1=752665&r2=752666&view=diff ============================================================================== --- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/agent/TestAgentConfig.java (original) +++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/agent/TestAgentConfig.java Wed Mar 11 22:39:26 2009 @@ -1,16 +1,15 @@ package org.apache.hadoop.chukwa.datacollection.agent; -import java.io.*; +import java.io.*; import org.apache.hadoop.chukwa.datacollection.test.ConsoleOutConnector; import org.apache.hadoop.conf.Configuration; - import junit.framework.TestCase; public class TestAgentConfig extends TestCase { public void testInitAdaptors_vs_Checkpoint() { try { - //create two target files, foo and bar + // create two target files, foo and bar File foo = File.createTempFile("foo", "test"); foo.deleteOnExit(); PrintStream ps = new PrintStream(new FileOutputStream(foo)); @@ -22,111 +21,117 @@ ps = new PrintStream(new FileOutputStream(bar)); ps.println("bar"); ps.close(); - - //initially, read foo + + // initially, read foo File initialAdaptors = File.createTempFile("initial", "adaptors"); initialAdaptors.deleteOnExit(); ps = new PrintStream(new FileOutputStream(initialAdaptors)); - ps.println("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8 raw 0 " - + foo.getAbsolutePath() +" 0 "); + ps + .println("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8 raw 0 " + + foo.getAbsolutePath() + " 0 "); ps.close(); - + Configuration conf = new Configuration(); - conf.set("chukwaAgent.initial_adaptors", initialAdaptors.getAbsolutePath()); + conf.set("chukwaAgent.initial_adaptors", initialAdaptors + .getAbsolutePath()); File checkpointDir = File.createTempFile("chukwatest", "checkpoint"); checkpointDir.delete(); checkpointDir.mkdir(); checkpointDir.deleteOnExit(); conf.set("chukwaAgent.checkpoint.dir", checkpointDir.getAbsolutePath()); - + ChukwaAgent agent = new ChukwaAgent(conf); ConsoleOutConnector conn = new ConsoleOutConnector(agent, true); conn.start(); - assertEquals(1, agent.adaptorCount());//check that we processed initial adaptors + assertEquals(1, agent.adaptorCount());// check that we processed initial + // adaptors assertNotNull(agent.getAdaptorList().get(1L)); assertTrue(agent.getAdaptorList().get(1L).getStreamName().contains("foo")); - - System.out.println("---------------------done with first run, now stopping"); + + System.out + .println("---------------------done with first run, now stopping"); agent.shutdown(); assertEquals(0, agent.adaptorCount()); - //at this point, there should be a checkpoint file with a tailer reading foo. - //we're going to rewrite initial adaptors to read bar; but after reboot we should - //still only be looking at foo. - ps = new PrintStream(new FileOutputStream(initialAdaptors, false));//overwrite - ps.println("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8 raw 0 " - + bar.getAbsolutePath() +" 0 "); + // at this point, there should be a checkpoint file with a tailer reading + // foo. + // we're going to rewrite initial adaptors to read bar; but after reboot + // we should + // still only be looking at foo. + ps = new PrintStream(new FileOutputStream(initialAdaptors, false));// overwrite + ps + .println("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8 raw 0 " + + bar.getAbsolutePath() + " 0 "); ps.close(); System.out.println("---------------------restarting"); agent = new ChukwaAgent(conf); conn = new ConsoleOutConnector(agent, true); conn.start(); - assertEquals(1, agent.adaptorCount());//check that we processed initial adaptors + assertEquals(1, agent.adaptorCount());// check that we processed initial + // adaptors assertNotNull(agent.getAdaptorList().get(1L)); assertTrue(agent.getAdaptorList().get(1L).getStreamName().contains("foo")); agent.shutdown(); System.out.println("---------------------done"); - - - } catch(Exception e) { + + } catch (Exception e) { e.printStackTrace(); fail(e.toString()); } } - - + public void testNoCheckpoints() { try { String tmpdir = System.getProperty("test.build.data", "/tmp"); - File NONCE_DIR = new File(tmpdir,"/test_chukwa_checkpoints"); - if(NONCE_DIR.exists()) { - for(File f: NONCE_DIR.listFiles()) + File NONCE_DIR = new File(tmpdir, "/test_chukwa_checkpoints"); + if (NONCE_DIR.exists()) { + for (File f : NONCE_DIR.listFiles()) f.delete(); NONCE_DIR.delete(); } -// assertFalse(NONCE_DIR.exists()); + // assertFalse(NONCE_DIR.exists()); Configuration conf = new Configuration(); conf.set("chukwaAgent.checkpoint.dir", NONCE_DIR.getAbsolutePath()); conf.setBoolean("chukwaAgent.checkpoint.enabled", true); conf.setInt("chukwaAgent.control.port", 0); - + System.out.println("\n\n===checkpoints enabled, dir does not exist:"); ChukwaAgent agent = new ChukwaAgent(conf); assertEquals(0, agent.getAdaptorList().size()); agent.shutdown(); assertTrue(NONCE_DIR.exists()); - for(File f: NONCE_DIR.listFiles()) + for (File f : NONCE_DIR.listFiles()) f.delete(); - System.out.println("\n\n===checkpoints enabled, dir exists but is empty:"); + System.out + .println("\n\n===checkpoints enabled, dir exists but is empty:"); agent = new ChukwaAgent(conf); assertEquals(0, agent.getAdaptorList().size()); agent.shutdown(); - for(File f: NONCE_DIR.listFiles()) + for (File f : NONCE_DIR.listFiles()) f.delete(); - - System.out.println("\n\n===checkpoints enabled, dir exists with zero-length file:"); + + System.out + .println("\n\n===checkpoints enabled, dir exists with zero-length file:"); (new File(NONCE_DIR, "chukwa_checkpoint_0")).createNewFile(); agent = new ChukwaAgent(conf); assertEquals(0, agent.getAdaptorList().size()); - agent.processCommand("ADD org.apache.hadoop.chukwa.datacollection.adaptor.ChukwaTestAdaptor testdata 0"); + agent + .processCommand("ADD org.apache.hadoop.chukwa.datacollection.adaptor.ChukwaTestAdaptor testdata 0"); agent.shutdown(); assertTrue(new File(NONCE_DIR, "chukwa_checkpoint_1").exists()); - System.out.println("\n\n===checkpoints enabled, dir exists with valid checkpoint"); + System.out + .println("\n\n===checkpoints enabled, dir exists with valid checkpoint"); agent = new ChukwaAgent(conf); assertEquals(1, agent.getAdaptorList().size()); agent.shutdown(); - //checkpoint # increments by one on boot and reload + // checkpoint # increments by one on boot and reload assertTrue(new File(NONCE_DIR, "chukwa_checkpoint_2").exists()); - - } - catch(Exception e) { + } catch (Exception e) { fail(e.toString()); } } - - - + } Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/agent/TestCmd.java URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/agent/TestCmd.java?rev=752666&r1=752665&r2=752666&view=diff ============================================================================== --- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/agent/TestCmd.java (original) +++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/agent/TestCmd.java Wed Mar 11 22:39:26 2009 @@ -18,86 +18,98 @@ package org.apache.hadoop.chukwa.datacollection.agent; + import org.apache.hadoop.chukwa.datacollection.adaptor.Adaptor; import org.apache.hadoop.chukwa.datacollection.adaptor.ChukwaTestAdaptor; import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent.AlreadyRunningException; import org.apache.hadoop.chukwa.datacollection.test.ConsoleOutConnector; - import junit.framework.TestCase; public class TestCmd extends TestCase { - public void testAddCmdWithParam() - { + public void testAddCmdWithParam() { ChukwaAgent agent; try { agent = new ChukwaAgent(); ConsoleOutConnector conn = new ConsoleOutConnector(agent, true); conn.start(); - long l = agent.processCommand("ADD org.apache.hadoop.chukwa.datacollection.adaptor.ChukwaTestAdaptor chukwaTestAdaptorType 0 my param1 param2 /var/log/messages 114027"); + long l = agent + .processCommand("ADD org.apache.hadoop.chukwa.datacollection.adaptor.ChukwaTestAdaptor chukwaTestAdaptorType 0 my param1 param2 /var/log/messages 114027"); assertTrue(l != -1); Adaptor adaptor = agent.getAdaptorList().get(l); ChukwaTestAdaptor chukwaTestAdaptor = (ChukwaTestAdaptor) adaptor; - assertTrue("error in type","chukwaTestAdaptorType".intern() == chukwaTestAdaptor.getType().intern()); - assertTrue("error in param", "0 my param1 param2 /var/log/messages".intern() == chukwaTestAdaptor.getParams().intern()); - assertTrue("error in startOffset",114027l == chukwaTestAdaptor.getStartOffset()); + assertTrue("error in type", + "chukwaTestAdaptorType".intern() == chukwaTestAdaptor.getType() + .intern()); + assertTrue("error in param", "0 my param1 param2 /var/log/messages" + .intern() == chukwaTestAdaptor.getParams().intern()); + assertTrue("error in startOffset", 114027l == chukwaTestAdaptor + .getStartOffset()); agent.stopAdaptor(l, false); agent.shutdown(); Thread.sleep(2000); - } catch(InterruptedException e) { - + } catch (InterruptedException e) { + } catch (AlreadyRunningException e) { e.printStackTrace(); fail(e.toString()); } } - - public void testAddCmdWithoutParam1() - { + + public void testAddCmdWithoutParam1() { ChukwaAgent agent; try { agent = new ChukwaAgent(); ConsoleOutConnector conn = new ConsoleOutConnector(agent, true); conn.start(); - long l = agent.processCommand("ADD org.apache.hadoop.chukwa.datacollection.adaptor.ChukwaTestAdaptor chukwaTestAdaptorType 114027"); + long l = agent + .processCommand("ADD org.apache.hadoop.chukwa.datacollection.adaptor.ChukwaTestAdaptor chukwaTestAdaptorType 114027"); assertTrue(l != -1); Adaptor adaptor = agent.getAdaptorList().get(l); ChukwaTestAdaptor chukwaTestAdaptor = (ChukwaTestAdaptor) adaptor; - assertTrue("error in type","chukwaTestAdaptorType".intern() == chukwaTestAdaptor.getType().intern()); - assertTrue("error in param", "".intern() == chukwaTestAdaptor.getParams().intern()); - assertTrue("error in startOffset",114027l == chukwaTestAdaptor.getStartOffset()); + assertTrue("error in type", + "chukwaTestAdaptorType".intern() == chukwaTestAdaptor.getType() + .intern()); + assertTrue("error in param", "".intern() == chukwaTestAdaptor.getParams() + .intern()); + assertTrue("error in startOffset", 114027l == chukwaTestAdaptor + .getStartOffset()); agent.stopAdaptor(l, false); agent.shutdown(); Thread.sleep(2000); - } catch(InterruptedException e) { - + } catch (InterruptedException e) { + } catch (AlreadyRunningException e) { e.printStackTrace(); fail(e.toString()); } } - - public void testAddCmdWithoutParam2() - { + + public void testAddCmdWithoutParam2() { ChukwaAgent agent; try { agent = new ChukwaAgent(); ConsoleOutConnector conn = new ConsoleOutConnector(agent, true); conn.start(); - long l = agent.processCommand("ADD org.apache.hadoop.chukwa.datacollection.adaptor.ChukwaTestAdaptor" - + " chukwaTestAdaptorType 0 114027"); + long l = agent + .processCommand("ADD org.apache.hadoop.chukwa.datacollection.adaptor.ChukwaTestAdaptor" + + " chukwaTestAdaptorType 0 114027"); assertTrue(l != -1); Adaptor adaptor = agent.getAdaptorList().get(l); ChukwaTestAdaptor chukwaTestAdaptor = (ChukwaTestAdaptor) adaptor; - assertTrue("error in type","chukwaTestAdaptorType".intern() == chukwaTestAdaptor.getType().intern()); - assertTrue("error in param", "0".intern() == chukwaTestAdaptor.getParams().intern()); - assertTrue("error in startOffset",114027l == chukwaTestAdaptor.getStartOffset()); + assertTrue("error in type", + "chukwaTestAdaptorType".intern() == chukwaTestAdaptor.getType() + .intern()); + assertTrue("error in param", "0".intern() == chukwaTestAdaptor + .getParams().intern()); + assertTrue("error in startOffset", 114027l == chukwaTestAdaptor + .getStartOffset()); agent.stopAdaptor(l, false); agent.shutdown(); Thread.sleep(2000); - } catch(InterruptedException e) { - + } catch (InterruptedException e) { + } catch (AlreadyRunningException e) { e.printStackTrace(); fail(e.toString()); Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/collector/CaptureWriter.java URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/collector/CaptureWriter.java?rev=752666&r1=752665&r2=752666&view=diff ============================================================================== --- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/collector/CaptureWriter.java (original) +++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/collector/CaptureWriter.java Wed Mar 11 22:39:26 2009 @@ -17,9 +17,9 @@ */ package org.apache.hadoop.chukwa.datacollection.collector; + import java.util.ArrayList; import java.util.List; - import org.apache.hadoop.chukwa.Chunk; import org.apache.hadoop.chukwa.datacollection.writer.ChukwaWriter; import org.apache.hadoop.chukwa.datacollection.writer.WriterException; @@ -31,17 +31,18 @@ @Override public void add(List chunks) throws WriterException { - synchronized(outputs) { - for(Chunk c: chunks) - outputs.add(c); + synchronized (outputs) { + for (Chunk c : chunks) + outputs.add(c); } } @Override - public void close() throws WriterException { } + public void close() throws WriterException { + } @Override - public void init(Configuration c) throws WriterException { } - -} + public void init(Configuration c) throws WriterException { + } +} Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/collector/CollectorTest.java URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/collector/CollectorTest.java?rev=752666&r1=752665&r2=752666&view=diff ============================================================================== --- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/collector/CollectorTest.java (original) +++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/collector/CollectorTest.java Wed Mar 11 22:39:26 2009 @@ -18,6 +18,7 @@ package org.apache.hadoop.chukwa.datacollection.collector; + import junit.framework.TestCase; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.chukwa.conf.ChukwaConfiguration; @@ -25,47 +26,49 @@ import org.apache.hadoop.chukwa.datacollection.collector.servlet.ServletCollector; import org.apache.hadoop.chukwa.datacollection.sender.*; import org.apache.hadoop.chukwa.datacollection.writer.*; - import java.util.*; - import org.mortbay.jetty.Server; import org.mortbay.jetty.servlet.Context; import org.mortbay.jetty.servlet.ServletHolder; public class CollectorTest extends TestCase { - - + public void testCollector() { try { Configuration conf = new Configuration(); conf.set("chukwaCollector.chunkSuppressBufferSize", "10"); - conf.set("chukwaCollector.pipeline", - "org.apache.hadoop.chukwa.datacollection.writer.Dedup,"//note comma - + "org.apache.hadoop.chukwa.datacollection.collector.CaptureWriter"); - conf.set("chukwaCollector.writerClass", PipelineStageWriter.class.getCanonicalName()); + conf + .set( + "chukwaCollector.pipeline", + "org.apache.hadoop.chukwa.datacollection.writer.Dedup,"// note + // comma + + "org.apache.hadoop.chukwa.datacollection.collector.CaptureWriter"); + conf.set("chukwaCollector.writerClass", PipelineStageWriter.class + .getCanonicalName()); ChukwaHttpSender sender = new ChukwaHttpSender(conf); ArrayList collectorList = new ArrayList(); collectorList.add("http://localhost:9990/chukwa"); sender.setCollectors(new RetryListOfCollectors(collectorList, 50)); Server server = new Server(9990); - Context root = new Context(server,"/",Context.SESSIONS); - + Context root = new Context(server, "/", Context.SESSIONS); + root.addServlet(new ServletHolder(new ServletCollector(conf)), "/*"); server.start(); server.setStopAtShutdown(false); Thread.sleep(1000); - - Chunk c = new ChunkImpl("data", "stream", 0, "testing -- this should appear once".getBytes(), null); + + Chunk c = new ChunkImpl("data", "stream", 0, + "testing -- this should appear once".getBytes(), null); ArrayList toSend = new ArrayList(); toSend.add(c); toSend.add(c); sender.send(toSend); Thread.sleep(1000); assertEquals(1, CaptureWriter.outputs.size()); - } catch(Exception e) { + } catch (Exception e) { fail(e.toString()); } - + } } Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/connector/ChunkCatcherConnector.java URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/connector/ChunkCatcherConnector.java?rev=752666&r1=752665&r2=752666&view=diff ============================================================================== --- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/connector/ChunkCatcherConnector.java (original) +++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/connector/ChunkCatcherConnector.java Wed Mar 11 22:39:26 2009 @@ -17,6 +17,7 @@ */ package org.apache.hadoop.chukwa.datacollection.connector; + import org.apache.hadoop.chukwa.Chunk; import org.apache.hadoop.chukwa.datacollection.*; import java.util.ArrayList; @@ -24,10 +25,11 @@ public class ChunkCatcherConnector implements Connector { ChunkQueue eq; - public void start(){ + + public void start() { eq = DataFactory.getInstance().getEventQueue(); } - + public Chunk waitForAChunk() throws InterruptedException { ArrayList chunks = new ArrayList(); eq.collect(chunks, 1); @@ -38,9 +40,8 @@ } @Override - public void reloadConfiguration() - { - System.out.println("reloadConfiguration"); + public void reloadConfiguration() { + System.out.println("reloadConfiguration"); } } Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/connector/TestFailedCollector.java URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/connector/TestFailedCollector.java?rev=752666&r1=752665&r2=752666&view=diff ============================================================================== --- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/connector/TestFailedCollector.java (original) +++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/connector/TestFailedCollector.java Wed Mar 11 22:39:26 2009 @@ -18,43 +18,41 @@ package org.apache.hadoop.chukwa.datacollection.connector; -import java.io.File; +import java.io.File; import org.apache.hadoop.chukwa.conf.ChukwaConfiguration; import org.apache.hadoop.chukwa.datacollection.TempFileUtil; import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent; import org.apache.hadoop.chukwa.datacollection.connector.http.HttpConnector; import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController; - import junit.framework.TestCase; public class TestFailedCollector extends TestCase { - - public void testFailedCollector() - { + + public void testFailedCollector() { try { ChukwaAgent agent = new ChukwaAgent(); - boolean failed=false; - HttpConnector connector = new HttpConnector(agent, "http://localhost:1234/chukwa"); + boolean failed = false; + HttpConnector connector = new HttpConnector(agent, + "http://localhost:1234/chukwa"); connector.start(); - + ChukwaConfiguration cc = new ChukwaConfiguration(); int portno = cc.getInt("chukwaAgent.control.port", 9093); ChukwaAgentController cli = new ChukwaAgentController("localhost", portno); - + File tmpOutput = TempFileUtil.makeBinary(2000); - + cli.addFile("unknown", tmpOutput.getAbsolutePath()); System.out.println("have " + agent.adaptorCount() + " running adaptors"); cli.removeFile("unknown", tmpOutput.getAbsolutePath()); - - + tmpOutput.delete(); assertFalse(failed); System.out.println("done"); agent.shutdown(); connector.shutdown(); - } catch(Exception e) { + } catch (Exception e) { e.printStackTrace(); } } Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/controller/TestAgentClient.java URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/controller/TestAgentClient.java?rev=752666&r1=752665&r2=752666&view=diff ============================================================================== --- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/controller/TestAgentClient.java (original) +++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/controller/TestAgentClient.java Wed Mar 11 22:39:26 2009 @@ -17,15 +17,14 @@ */ package org.apache.hadoop.chukwa.datacollection.controller; + import org.apache.hadoop.chukwa.conf.ChukwaConfiguration; import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent; import org.apache.hadoop.chukwa.datacollection.connector.Connector; import org.apache.hadoop.chukwa.datacollection.connector.http.HttpConnector; import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController; - import java.io.IOException; import java.util.Map; - import junit.framework.TestCase; public class TestAgentClient extends TestCase { @@ -33,39 +32,42 @@ ChukwaAgent agent; ChukwaAgentController c; Connector httpConnector; - //consoleConnector = new ConsoleOutConnector(agent); - + + // consoleConnector = new ConsoleOutConnector(agent); + protected void setUp() throws ChukwaAgent.AlreadyRunningException { config = new ChukwaConfiguration(); agent = new ChukwaAgent(); c = new ChukwaAgentController(); - httpConnector = new HttpConnector(agent); //use default source for list of collectors (i.e. conf/connectors) + httpConnector = new HttpConnector(agent); // use default source for list of + // collectors (i.e. + // conf/connectors) httpConnector.start(); -// assertTrue(Integer.parseInt(config.get("chukwaAgent.control.port")) == agent.getControlSock().getPortNumber()); + // assertTrue(Integer.parseInt(config.get("chukwaAgent.control.port")) == + // agent.getControlSock().getPortNumber()); } - - protected void tearDown(){ + + protected void tearDown() { System.out.println("in tearDown()"); - ((HttpConnector)httpConnector).shutdown(); + ((HttpConnector) httpConnector).shutdown(); } - + public void testAddFile() { String appType = "junit_addFileTest"; String params = "testFile"; - try{ - //add the fileTailer to the agent using the client + try { + // add the fileTailer to the agent using the client System.out.println("Adding adaptor with filename: " + params); long adaptorID = c.addFile(appType, params); System.out.println("Successfully added adaptor, id is:" + adaptorID); - - //do a list on the agent to see if the adaptor has been added for this file + + // do a list on the agent to see if the adaptor has been added for this + // file Map listResult = c.list(); assertTrue(listResult.containsKey(adaptorID)); - } - catch(IOException e) - { + } catch (IOException e) { e.printStackTrace(); } } Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/sender/TestRetryListOfCollectors.java URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/sender/TestRetryListOfCollectors.java?rev=752666&r1=752665&r2=752666&view=diff ============================================================================== --- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/sender/TestRetryListOfCollectors.java (original) +++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/sender/TestRetryListOfCollectors.java Wed Mar 11 22:39:26 2009 @@ -17,15 +17,14 @@ */ package org.apache.hadoop.chukwa.datacollection.sender; + import junit.framework.TestCase; import java.util.*; - import org.apache.hadoop.chukwa.datacollection.sender.RetryListOfCollectors; public class TestRetryListOfCollectors extends TestCase { - public void testRetryList() - { + public void testRetryList() { List hosts = new ArrayList(); hosts.add("host1"); hosts.add("host2"); @@ -33,20 +32,20 @@ hosts.add("host4"); RetryListOfCollectors rloc = new RetryListOfCollectors(hosts, 2000); assertEquals(hosts.size(), rloc.total()); - - for(int i = 0; i < hosts.size(); ++i) { + + for (int i = 0; i < hosts.size(); ++i) { assertTrue(rloc.hasNext()); - String s = rloc.next(); + String s = rloc.next(); assertTrue(s != null); System.out.println(s); } - - if(rloc.hasNext()) { + + if (rloc.hasNext()) { String s = rloc.next(); System.out.println("saw unexpected collector " + s); fail(); } - + } } Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/writer/TestClientAck.java URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/writer/TestClientAck.java?rev=752666&r1=752665&r2=752666&view=diff ============================================================================== --- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/writer/TestClientAck.java (original) +++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/writer/TestClientAck.java Wed Mar 11 22:39:26 2009 @@ -1,23 +1,22 @@ package org.apache.hadoop.chukwa.datacollection.writer; + import junit.framework.Assert; import junit.framework.TestCase; import org.apache.hadoop.chukwa.datacollection.writer.ClientAck; -public class TestClientAck extends TestCase -{ +public class TestClientAck extends TestCase { - public void testWait4AckTimeOut() - { + public void testWait4AckTimeOut() { ClientAck clientAck = new ClientAck(); long startDate = System.currentTimeMillis(); clientAck.wait4Ack(); long now = System.currentTimeMillis(); - long duration = now - startDate ; + long duration = now - startDate; duration = duration - clientAck.getTimeOut(); - - Assert.assertTrue("should not wait nore than " - + clientAck.getTimeOut() + " + 7sec" , duration < 7000); + + Assert.assertTrue("should not wait nore than " + clientAck.getTimeOut() + + " + 7sec", duration < 7000); Assert.assertEquals(ClientAck.KO_LOCK, clientAck.getStatus()); } Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/extraction/demux/TestDemux.java URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/extraction/demux/TestDemux.java?rev=752666&r1=752665&r2=752666&view=diff ============================================================================== --- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/extraction/demux/TestDemux.java (original) +++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/extraction/demux/TestDemux.java Wed Mar 11 22:39:26 2009 @@ -17,9 +17,9 @@ */ package org.apache.hadoop.chukwa.extraction.demux; + import java.io.IOException; import java.util.Calendar; - import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.mapred.JobConf; @@ -29,37 +29,41 @@ import org.apache.hadoop.chukwa.ChunkImpl; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*; - import junit.framework.TestCase; + /** * test the Demux job in one process, using mini-mr. * * Unfortunately, this test case needs more jars than the rest of chukwa, - * including hadoop-*-test, commons-cli, and jetty5 - * - * + * including hadoop-*-test, commons-cli, and jetty5 + * + * * */ public class TestDemux extends TestCase { java.util.Random r = new java.util.Random(); + public ChunkImpl getARandomChunk() { int ms = r.nextInt(1000); - String line = "2008-05-29 10:42:22,"+ ms + " INFO org.apache.hadoop.dfs.DataNode: Some text goes here" +r.nextInt() + "\n"; + String line = "2008-05-29 10:42:22," + ms + + " INFO org.apache.hadoop.dfs.DataNode: Some text goes here" + + r.nextInt() + "\n"; - ChunkImpl c = new ChunkImpl("HadoopLogProcessor", "test", line.length() -1L, line.getBytes(), null); + ChunkImpl c = new ChunkImpl("HadoopLogProcessor", "test", + line.length() - 1L, line.getBytes(), null); return c; } - - public void writeASinkFile(Configuration conf, FileSystem fileSys, Path dest, int chunks) throws IOException { + public void writeASinkFile(Configuration conf, FileSystem fileSys, Path dest, + int chunks) throws IOException { FSDataOutputStream out = fileSys.create(dest); Calendar calendar = Calendar.getInstance(); SequenceFile.Writer seqFileWriter = SequenceFile.createWriter(conf, out, ChukwaArchiveKey.class, ChunkImpl.class, SequenceFile.CompressionType.NONE, null); - for(int i=0; i < chunks; ++i) { + for (int i = 0; i < chunks; ++i) { ChunkImpl chunk = getARandomChunk(); ChukwaArchiveKey archiveKey = new ChukwaArchiveKey(); // FIXME compute this once an hour @@ -76,33 +80,37 @@ seqFileWriter.close(); out.close(); } - - private void runDemux(JobConf job, Path sortInput, Path sortOutput) - throws Exception { + + private void runDemux(JobConf job, Path sortInput, Path sortOutput) + throws Exception { // Setup command-line arguments to 'sort' - String[] sortArgs = {sortInput.toString(), sortOutput.toString()}; - + String[] sortArgs = { sortInput.toString(), sortOutput.toString() }; + // Run Sort assertEquals(ToolRunner.run(job, new Demux(), sortArgs), 0); } - + int NUM_HADOOP_SLAVES = 1; int LINES = 10000; private static final Path DEMUX_INPUT_PATH = new Path("/demux/input"); private static final Path DEMUX_OUTPUT_PATH = new Path("/demux/output"); public void testDemux() { - try{ + try { System.out.println("testing demux"); Configuration conf = new Configuration(); - System.setProperty("hadoop.log.dir", System.getProperty("test.build.data", "/tmp")); - MiniDFSCluster dfs = new MiniDFSCluster(conf, NUM_HADOOP_SLAVES, true, null); + System.setProperty("hadoop.log.dir", System.getProperty( + "test.build.data", "/tmp")); + MiniDFSCluster dfs = new MiniDFSCluster(conf, NUM_HADOOP_SLAVES, true, + null); FileSystem fileSys = dfs.getFileSystem(); - MiniMRCluster mr = new MiniMRCluster(NUM_HADOOP_SLAVES, fileSys.getUri().toString(), 1); + MiniMRCluster mr = new MiniMRCluster(NUM_HADOOP_SLAVES, fileSys.getUri() + .toString(), 1); writeASinkFile(conf, fileSys, DEMUX_INPUT_PATH, LINES); - System.out.println("wrote " + - fileSys.getFileStatus(DEMUX_INPUT_PATH).getLen() + " bytes of temp test data"); + System.out.println("wrote " + + fileSys.getFileStatus(DEMUX_INPUT_PATH).getLen() + + " bytes of temp test data"); long ts_start = System.currentTimeMillis(); runDemux(mr.createJobConf(), DEMUX_INPUT_PATH, DEMUX_OUTPUT_PATH); @@ -110,13 +118,12 @@ long bytes = fileSys.getContentSummary(DEMUX_OUTPUT_PATH).getLength(); System.out.println("result was " + bytes + " bytes long"); System.out.println("processing took " + time + " milliseconds"); - System.out.println("aka " + time * 1.0 / LINES + " ms per line or " + - time *1000.0 / bytes + " ms per kilobyte of log data"); - - } catch(Exception e) { + System.out.println("aka " + time * 1.0 / LINES + " ms per line or " + + time * 1000.0 / bytes + " ms per kilobyte of log data"); + + } catch (Exception e) { e.printStackTrace(); } } - - + } Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/ChukwaTestOutputCollector.java URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/ChukwaTestOutputCollector.java?rev=752666&r1=752665&r2=752666&view=diff ============================================================================== --- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/ChukwaTestOutputCollector.java (original) +++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/ChukwaTestOutputCollector.java Wed Mar 11 22:39:26 2009 @@ -1,36 +1,32 @@ package org.apache.hadoop.chukwa.extraction.demux.processor.mapper; + import java.io.IOException; import java.util.HashMap; import java.util.Iterator; - import org.apache.hadoop.mapred.OutputCollector; -public class ChukwaTestOutputCollector implements OutputCollector -{ - public HashMap data = new HashMap(); - - public void collect(K key, V value) throws IOException - { - data.put(key, value); - } - - @Override - public String toString() - { - Iterator it = data.keySet().iterator(); - K key = null; - V value = null; - StringBuilder sb = new StringBuilder(); - - while(it.hasNext()) - { - key = it.next(); - value = data.get(key); - sb.append("Key[").append(key).append("] value[").append(value).append("]\n"); - } - return sb.toString(); - } +public class ChukwaTestOutputCollector implements OutputCollector { + public HashMap data = new HashMap(); + + public void collect(K key, V value) throws IOException { + data.put(key, value); + } + + @Override + public String toString() { + Iterator it = data.keySet().iterator(); + K key = null; + V value = null; + StringBuilder sb = new StringBuilder(); + + while (it.hasNext()) { + key = it.next(); + value = data.get(key); + sb.append("Key[").append(key).append("] value[").append(value).append( + "]\n"); + } + return sb.toString(); + } - } Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestAbtractProcessor.java URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestAbtractProcessor.java?rev=752666&r1=752665&r2=752666&view=diff ============================================================================== --- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestAbtractProcessor.java (original) +++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestAbtractProcessor.java Wed Mar 11 22:39:26 2009 @@ -1,7 +1,7 @@ package org.apache.hadoop.chukwa.extraction.demux.processor.mapper; -import junit.framework.TestCase; +import junit.framework.TestCase; import org.apache.hadoop.chukwa.Chunk; import org.apache.hadoop.chukwa.ChunkBuilder; import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord; @@ -10,51 +10,47 @@ import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reporter; -public class TestAbtractProcessor extends TestCase -{ +public class TestAbtractProcessor extends TestCase { + + String[] data = { "dsjsjbsfjds\ndsafsfasd\n", + "asdgHSAJGDGYDGGHAgd7364rt3478tc4\nhr473rt346t\n", "e gqd yeegyxuyexfg\n" }; - String[] data = {"dsjsjbsfjds\ndsafsfasd\n","asdgHSAJGDGYDGGHAgd7364rt3478tc4\nhr473rt346t\n","e gqd yeegyxuyexfg\n"}; - - public void testParse() - { - - - ChunkBuilder cb = new ChunkBuilder(); - cb.addRecord(RecordConstants.escapeAllButLastRecordSeparator("\n", data[0]).getBytes()); - cb.addRecord(RecordConstants.escapeAllButLastRecordSeparator("\n", data[1]).getBytes()); - cb.addRecord(RecordConstants.escapeAllButLastRecordSeparator("\n", data[2]).getBytes()); - Chunk chunk = cb.getChunk(); - OutputCollector output = new ChukwaTestOutputCollector(); - TProcessor p = new TProcessor(); - p.data = data; - p.process(null,chunk, output, null); - } + public void testParse() { + ChunkBuilder cb = new ChunkBuilder(); + cb.addRecord(RecordConstants.escapeAllButLastRecordSeparator("\n", data[0]) + .getBytes()); + cb.addRecord(RecordConstants.escapeAllButLastRecordSeparator("\n", data[1]) + .getBytes()); + cb.addRecord(RecordConstants.escapeAllButLastRecordSeparator("\n", data[2]) + .getBytes()); + Chunk chunk = cb.getChunk(); + OutputCollector output = new ChukwaTestOutputCollector(); + TProcessor p = new TProcessor(); + p.data = data; + p.process(null, chunk, output, null); + } } -class TProcessor extends AbstractProcessor -{ - String[] data = null; - int count = 0; - - @Override - protected void parse(String recordEntry, - OutputCollector output, Reporter reporter) - { - if (!recordEntry.equals(data[count])) - { - System.out.println("[" + recordEntry +"]"); - System.out.println("[" + data[count] +"]"); - throw new RuntimeException("not the same record"); - } - count ++; - } - - public String getDataType() - { - // TODO Auto-generated method stub - return null; - } +class TProcessor extends AbstractProcessor { + String[] data = null; + int count = 0; + + @Override + protected void parse(String recordEntry, + OutputCollector output, Reporter reporter) { + if (!recordEntry.equals(data[count])) { + System.out.println("[" + recordEntry + "]"); + System.out.println("[" + data[count] + "]"); + throw new RuntimeException("not the same record"); + } + count++; + } + + public String getDataType() { + // TODO Auto-generated method stub + return null; + } } \ No newline at end of file Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestHadoopLogProcessor.java URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestHadoopLogProcessor.java?rev=752666&r1=752665&r2=752666&view=diff ============================================================================== --- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestHadoopLogProcessor.java (original) +++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestHadoopLogProcessor.java Wed Mar 11 22:39:26 2009 @@ -18,10 +18,9 @@ package org.apache.hadoop.chukwa.extraction.demux.processor.mapper; -import java.io.IOException; +import java.io.IOException; import junit.framework.TestCase; - import org.apache.hadoop.chukwa.Chunk; import org.apache.hadoop.chukwa.ChunkImpl; import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord; @@ -34,46 +33,46 @@ * * Currently more or less just a stub */ -public class TestHadoopLogProcessor extends TestCase{ - +public class TestHadoopLogProcessor extends TestCase { + long serializedSize = 0; OutputCollector nullcollector = new OutputCollector() { - public void collect(ChukwaRecordKey arg0, ChukwaRecord arg1) throws IOException - { + public void collect(ChukwaRecordKey arg0, ChukwaRecord arg1) + throws IOException { serializedSize += arg1.toString().length(); } }; - - public void testHLPParseTimes() { HadoopLogProcessor hlp = new HadoopLogProcessor(); - + int LINES = 50000; long bytes = 0; long ts_start = System.currentTimeMillis(); - for(int i =0; i < LINES; ++i) { + for (int i = 0; i < LINES; ++i) { Chunk c = getNewChunk(); bytes += c.getData().length; - hlp.process(null,c, nullcollector, Reporter.NULL); - // hlp.parse(line, nullcollector, Reporter.NULL); + hlp.process(null, c, nullcollector, Reporter.NULL); + // hlp.parse(line, nullcollector, Reporter.NULL); } long time = (System.currentTimeMillis() - ts_start); System.out.println("parse took " + time + " milliseconds"); - System.out.println("aka " + time * 1.0 / LINES + " ms per line or " + - time *1000.0 / bytes + " ms per kilobyte of log data"); + System.out.println("aka " + time * 1.0 / LINES + " ms per line or " + time + * 1000.0 / bytes + " ms per kilobyte of log data"); System.out.println("output records had total length of " + serializedSize); } - java.util.Random r = new java.util.Random(); + public Chunk getNewChunk() { int ms = r.nextInt(1000); - String line = "2008-05-29 10:42:22,"+ ms + " INFO org.apache.hadoop.dfs.DataNode: Some text goes here" +r.nextInt() + "\n"; - ChunkImpl c = new ChunkImpl("HadoopLogProcessor", "test" ,line.length() -1 ,line.getBytes() , null ); - + String line = "2008-05-29 10:42:22," + ms + + " INFO org.apache.hadoop.dfs.DataNode: Some text goes here" + + r.nextInt() + "\n"; + ChunkImpl c = new ChunkImpl("HadoopLogProcessor", "test", + line.length() - 1, line.getBytes(), null); + return c; } - -} +} Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/hicc/TestChart.java URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/hicc/TestChart.java?rev=752666&r1=752665&r2=752666&view=diff ============================================================================== --- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/hicc/TestChart.java (original) +++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/hicc/TestChart.java Wed Mar 11 22:39:26 2009 @@ -17,6 +17,7 @@ */ package org.apache.hadoop.chukwa.hicc; + import junit.framework.TestCase; import javax.servlet.http.HttpServletRequest; import java.util.TreeMap; @@ -24,60 +25,56 @@ public class TestChart extends TestCase { - - public void testLineChart() - { - HttpServletRequest request=null; + public void testLineChart() { + HttpServletRequest request = null; Chart c = new Chart(request); - String render= "line"; + String render = "line"; TreeMap> dataMap = new TreeMap>(); TreeMap series = new TreeMap(); ArrayList labels = new ArrayList(); - for(int i=0;i<5;i++) { - labels.add(""+i); - series.put(""+i,1.0*i); + for (int i = 0; i < 5; i++) { + labels.add("" + i); + series.put("" + i, 1.0 * i); } dataMap.put("series1", series); c.setXLabelsRange(labels); - c.setDataSet(render,dataMap); + c.setDataSet(render, dataMap); String output = c.plot(); assertTrue(output.contains("lines")); } - public void testBarChart() - { - HttpServletRequest request=null; + public void testBarChart() { + HttpServletRequest request = null; Chart c = new Chart(request); - String render= "bar"; + String render = "bar"; TreeMap> dataMap = new TreeMap>(); TreeMap series = new TreeMap(); ArrayList labels = new ArrayList(); - for(int i=0;i<5;i++) { - labels.add(""+i); - series.put(""+i,1.0*i); + for (int i = 0; i < 5; i++) { + labels.add("" + i); + series.put("" + i, 1.0 * i); } dataMap.put("series1", series); c.setXLabelsRange(labels); - c.setDataSet(render,dataMap); + c.setDataSet(render, dataMap); String output = c.plot(); assertTrue(output.contains("bar")); } - public void testScatterChart() - { - HttpServletRequest request=null; + public void testScatterChart() { + HttpServletRequest request = null; Chart c = new Chart(request); - String render= "point"; + String render = "point"; TreeMap> dataMap = new TreeMap>(); TreeMap series = new TreeMap(); ArrayList labels = new ArrayList(); - for(int i=0;i<5;i++) { - labels.add(""+i); - series.put(""+i,1.0*i); + for (int i = 0; i < 5; i++) { + labels.add("" + i); + series.put("" + i, 1.0 * i); } dataMap.put("series1", series); c.setXLabelsRange(labels); - c.setDataSet(render,dataMap); + c.setDataSet(render, dataMap); String output = c.plot(); assertTrue(output.contains("point")); } Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/inputtools/TestInputFormat.java URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/inputtools/TestInputFormat.java?rev=752666&r1=752665&r2=752666&view=diff ============================================================================== --- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/inputtools/TestInputFormat.java (original) +++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/inputtools/TestInputFormat.java Wed Mar 11 22:39:26 2009 @@ -18,12 +18,10 @@ package org.apache.hadoop.chukwa.inputtools; -import java.io.IOException; +import java.io.IOException; import org.apache.hadoop.mapred.Reporter; - import junit.framework.TestCase; - import org.apache.hadoop.chukwa.*; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapred.*; @@ -32,61 +30,55 @@ import org.apache.hadoop.io.*; public class TestInputFormat extends TestCase { - - String[] lines = { - "the rain", - "in spain", - "falls mainly", - "in the plain" - }; - + + String[] lines = { "the rain", "in spain", "falls mainly", "in the plain" }; + public void testInputFormat() { - + try { - JobConf conf = new JobConf(); - String TMP_DIR = System.getProperty("test.build.data", "/tmp"); - Path filename = new Path("file:///"+TMP_DIR+"/tmpSeqFile"); - SequenceFile.Writer sfw = SequenceFile.createWriter(FileSystem.getLocal(conf), - conf, filename, ChukwaArchiveKey.class, ChunkImpl.class, - SequenceFile.CompressionType.NONE, Reporter.NULL); - - - StringBuilder buf = new StringBuilder(); - int offsets[] = new int[lines.length]; - for(int i= 0; i < lines.length; ++i) { - buf.append(lines[i]); - buf.append("\n"); - offsets[i] = buf.length()-1; - } - ChukwaArchiveKey key = new ChukwaArchiveKey(0, "datatype", "sname", 0); - ChunkImpl val = new ChunkImpl("datatype", "sname", 0, buf.toString().getBytes(), null); - val.setRecordOffsets(offsets); - sfw.append(key, val); - sfw.append(key, val); //write it twice - sfw.close(); - - - long len = FileSystem.getLocal(conf).getFileStatus(filename).getLen(); - InputSplit split = new FileSplit(filename, 0, len, (String[] ) null); - ChukwaInputFormat in = new ChukwaInputFormat(); - RecordReader r= in.getRecordReader(split, conf, Reporter.NULL); - - - LongWritable l = r.createKey(); - Text line = r.createValue(); - for(int i =0 ; i < lines.length * 2; ++i) { + JobConf conf = new JobConf(); + String TMP_DIR = System.getProperty("test.build.data", "/tmp"); + Path filename = new Path("file:///" + TMP_DIR + "/tmpSeqFile"); + SequenceFile.Writer sfw = SequenceFile.createWriter(FileSystem + .getLocal(conf), conf, filename, ChukwaArchiveKey.class, + ChunkImpl.class, SequenceFile.CompressionType.NONE, Reporter.NULL); + + StringBuilder buf = new StringBuilder(); + int offsets[] = new int[lines.length]; + for (int i = 0; i < lines.length; ++i) { + buf.append(lines[i]); + buf.append("\n"); + offsets[i] = buf.length() - 1; + } + ChukwaArchiveKey key = new ChukwaArchiveKey(0, "datatype", "sname", 0); + ChunkImpl val = new ChunkImpl("datatype", "sname", 0, buf.toString() + .getBytes(), null); + val.setRecordOffsets(offsets); + sfw.append(key, val); + sfw.append(key, val); // write it twice + sfw.close(); + + long len = FileSystem.getLocal(conf).getFileStatus(filename).getLen(); + InputSplit split = new FileSplit(filename, 0, len, (String[]) null); + ChukwaInputFormat in = new ChukwaInputFormat(); + RecordReader r = in.getRecordReader(split, conf, + Reporter.NULL); + + LongWritable l = r.createKey(); + Text line = r.createValue(); + for (int i = 0; i < lines.length * 2; ++i) { + boolean succeeded = r.next(l, line); + assertTrue(succeeded); + assertEquals(i, l.get()); + assertEquals(lines[i % lines.length], line.toString()); + System.out.println("read line: " + l.get() + " " + line); + } boolean succeeded = r.next(l, line); - assertTrue(succeeded); - assertEquals(i, l.get()); - assertEquals(lines[i % lines.length] , line.toString()); - System.out.println("read line: "+ l.get() + " "+ line); - } - boolean succeeded = r.next(l, line); - assertFalse(succeeded); - - } catch(IOException e) { + assertFalse(succeeded); + + } catch (IOException e) { e.printStackTrace(); - fail("IO exception "+ e); + fail("IO exception " + e); } } Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/util/TestRecordConsts.java URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/util/TestRecordConsts.java?rev=752666&r1=752665&r2=752666&view=diff ============================================================================== --- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/util/TestRecordConsts.java (original) +++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/util/TestRecordConsts.java Wed Mar 11 22:39:26 2009 @@ -17,24 +17,25 @@ */ package org.apache.hadoop.chukwa.util; + import junit.framework.TestCase; public class TestRecordConsts extends TestCase { - public void testEscapeAllButLastRecordSeparator() - { - String post = RecordConstants.escapeAllButLastRecordSeparator("\n", "foo bar baz\n"); + public void testEscapeAllButLastRecordSeparator() { + String post = RecordConstants.escapeAllButLastRecordSeparator("\n", + "foo bar baz\n"); assertEquals(post, "foo bar baz\n"); - - post = RecordConstants.escapeAllButLastRecordSeparator("\n", "foo\nbar\nbaz\n"); + + post = RecordConstants.escapeAllButLastRecordSeparator("\n", + "foo\nbar\nbaz\n"); post = post.replaceAll(RecordConstants.RECORD_SEPARATOR_ESCAPE_SEQ, "^D"); assertEquals(post, "foo^D\nbar^D\nbaz\n"); - System.out.println("string is " + post+"."); + System.out.println("string is " + post + "."); } - public void testEscapeAllRecordSeparators() - { + public void testEscapeAllRecordSeparators() { } }