hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From omal...@apache.org
Subject svn commit: r685353 [13/13] - in /hadoop/core/trunk: ./ src/contrib/chukwa/ src/contrib/chukwa/bin/ src/contrib/chukwa/build/ src/contrib/chukwa/conf/ src/contrib/chukwa/dist/ src/contrib/chukwa/docs/ src/contrib/chukwa/docs/paper/ src/contrib/chukwa/h...
Date Tue, 12 Aug 2008 22:35:23 GMT
Added: hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/agent/TestAgent.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/agent/TestAgent.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/agent/TestAgent.java
(added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/agent/TestAgent.java
Tue Aug 12 15:35:16 2008
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.chukwa.datacollection.agent;
+
+import java.util.ArrayList;
+
+import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
+import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
+import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController;
+import org.apache.hadoop.chukwa.datacollection.test.ConsoleOutConnector;
+
+import junit.framework.TestCase;
+
+public class TestAgent extends TestCase {
+
+
+  public void testStopAndStart() {
+
+    try {
+      ChukwaAgent agent = new ChukwaAgent();
+      ConsoleOutConnector conn = new ConsoleOutConnector(agent, true);
+      conn.start();
+      
+      ChukwaConfiguration cc = new ChukwaConfiguration();
+      int portno = cc.getInt("chukwaAgent.control.port", 9093);
+      ChukwaAgentController cli = new ChukwaAgentController("localhost", portno);
+      
+      for(int i=1; i < 20; ++i) {
+        cli.add("org.apache.hadoop.chukwa.util.ConstRateAdaptor", "raw" + i, "20000", 0);
+        assertTrue(agent.adaptorCount() == 1);
+        Thread.sleep(2000);   
+        cli.removeAll();
+        assertTrue(agent.adaptorCount() == 0);
+      }
+      agent.shutdown();
+      conn.shutdown();
+    } catch(Exception e) {
+      e.printStackTrace();
+      fail(e.toString());
+    }
+  }
+  
+  public void testMultiStopAndStart() {
+
+    try {
+      ChukwaAgent agent = new ChukwaAgent();
+      ConsoleOutConnector conn = new ConsoleOutConnector(agent, true);
+      conn.start();
+      
+      for(int trial=0; trial < 20; ++trial) {
+        ArrayList<Long> runningAdaptors = new ArrayList<Long>();
+       
+        for(int i = 1; i < 7; ++i) {
+          long l = agent.processCommand("add org.apache.hadoop.chukwa.util.ConstRateAdaptor
raw"+i+ " 20000 0");
+          assertTrue(agent.adaptorCount() == i); 
+          assertTrue(l != -1);
+          runningAdaptors.add(l);
+        }
+        Thread.sleep(1000);   
+        for(Long l: runningAdaptors)
+          agent.stopAdaptor(l, true);
+        assertTrue(agent.adaptorCount() == 0);
+      }
+      agent.shutdown();
+    } catch(Exception e) {
+      e.printStackTrace();
+      fail(e.toString());
+    }
+  }
+
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/connector/ChunkCatcherConnector.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/connector/ChunkCatcherConnector.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/connector/ChunkCatcherConnector.java
(added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/connector/ChunkCatcherConnector.java
Tue Aug 12 15:35:16 2008
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.chukwa.datacollection.connector;
+
+import org.apache.hadoop.chukwa.Chunk;
+import org.apache.hadoop.chukwa.datacollection.*;
+import java.util.ArrayList;
+
+public class ChunkCatcherConnector implements Connector {
+
+  ChunkQueue eq;
+  public void start(){
+    eq = DataFactory.getInstance().getEventQueue();
+  }
+  
+  public Chunk waitForAChunk() throws InterruptedException {
+    ArrayList<Chunk> chunks = new ArrayList<Chunk>();
+    eq.collect(chunks, 1);
+    return chunks.get(0);
+  }
+
+  public void shutdown() {
+  }
+
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/connector/TestFailedCollector.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/connector/TestFailedCollector.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/connector/TestFailedCollector.java
(added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/connector/TestFailedCollector.java
Tue Aug 12 15:35:16 2008
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.datacollection.connector;
+
+import java.io.File;
+
+import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
+import org.apache.hadoop.chukwa.datacollection.TempFileUtil;
+import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
+import org.apache.hadoop.chukwa.datacollection.connector.http.HttpConnector;
+import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController;
+
+import junit.framework.TestCase;
+
+public class TestFailedCollector extends TestCase {
+  
+  public void testFailedCollector()
+  {
+    try {
+      ChukwaAgent agent = new ChukwaAgent();
+      boolean failed=false;
+      HttpConnector connector = new HttpConnector(agent, "http://localhost:1234/chukwa");
+      connector.start();
+      
+      ChukwaConfiguration cc = new ChukwaConfiguration();
+      int portno = cc.getInt("chukwaAgent.control.port", 9093);
+      ChukwaAgentController cli = new ChukwaAgentController("localhost", portno);
+      
+      File tmpOutput = TempFileUtil.makeBinary(2000);
+      
+      cli.addFile("unknown", tmpOutput.getAbsolutePath());
+      System.out.println("have " + agent.adaptorCount() + " running adaptors");
+      cli.removeFile("unknown", tmpOutput.getAbsolutePath());
+    
+      
+      tmpOutput.delete();
+      assertFalse(failed);
+      System.out.println("done");
+      agent.shutdown();
+      connector.shutdown();
+    } catch(Exception e) {
+      e.printStackTrace();
+    }
+  }
+
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/controller/TestAgentClient.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/controller/TestAgentClient.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/controller/TestAgentClient.java
(added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/controller/TestAgentClient.java
Tue Aug 12 15:35:16 2008
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.chukwa.datacollection.controller;
+
+import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
+import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
+import org.apache.hadoop.chukwa.datacollection.connector.Connector;
+import org.apache.hadoop.chukwa.datacollection.connector.http.HttpConnector;
+import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController;
+
+import java.io.IOException;
+import java.util.Map;
+
+import junit.framework.TestCase;
+
+public class TestAgentClient extends TestCase {
+  ChukwaConfiguration config;
+  ChukwaAgent agent;
+  ChukwaAgentController c;
+  Connector httpConnector;
+  //consoleConnector = new ConsoleOutConnector(agent);
+  
+  protected void setUp() throws ChukwaAgent.AlreadyRunningException {
+    config = new ChukwaConfiguration();
+    agent = new ChukwaAgent();
+    c = new ChukwaAgentController();
+    httpConnector = new HttpConnector(agent); //use default source for list of collectors
(i.e. conf/connectors)
+
+    httpConnector.start();
+
+//    assertTrue(Integer.parseInt(config.get("chukwaAgent.control.port")) == agent.getControlSock().getPortNumber());
+  }
+  
+  protected void tearDown(){
+    System.out.println("in tearDown()");
+    ((HttpConnector)httpConnector).shutdown();
+  }
+  
+  public void testAddFile() {
+    String appType = "junit_addFileTest";
+    String params = "testFile";
+    try{
+      //add the fileTailer to the agent using the client
+      System.out.println("Adding adaptor with filename: " + params);
+      long adaptorID = c.addFile(appType, params);
+      System.out.println("Successfully added adaptor, id is:" + adaptorID);
+      
+      //do a list on the agent to see if the adaptor has been added for this file
+      Map<Long, ChukwaAgentController.Adaptor> listResult = c.list();
+      assertTrue(listResult.containsKey(adaptorID));
+    }
+    catch(IOException e)
+    {
+      e.printStackTrace();
+    }
+  }
+
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/sender/TestRetryListOfCollectors.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/sender/TestRetryListOfCollectors.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/sender/TestRetryListOfCollectors.java
(added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/datacollection/sender/TestRetryListOfCollectors.java
Tue Aug 12 15:35:16 2008
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.chukwa.datacollection.sender;
+
+import junit.framework.TestCase;
+import java.util.*;
+
+import org.apache.hadoop.chukwa.datacollection.sender.RetryListOfCollectors;
+
+public class TestRetryListOfCollectors extends TestCase {
+
+  public void testRetryList()
+  {
+    List<String> hosts = new ArrayList<String>();
+    hosts.add("host1");
+    hosts.add("host2");
+    hosts.add("host3");
+    hosts.add("host4");
+    RetryListOfCollectors rloc = new RetryListOfCollectors(hosts, 2000);
+    assertEquals(hosts.size(), rloc.total());
+    
+    for(int i = 0; i < hosts.size(); ++i) {
+      assertTrue(rloc.hasNext());
+      String s =  rloc.next();
+      assertTrue(s != null);
+      System.out.println(s);
+    }
+    
+    if(rloc.hasNext()) {
+      String s = rloc.next();
+      System.out.println("saw unexpected collector " + s);
+      fail();
+    }
+  
+  }
+
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/extraction/demux/TestDemux.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/extraction/demux/TestDemux.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/extraction/demux/TestDemux.java
(added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/extraction/demux/TestDemux.java
Tue Aug 12 15:35:16 2008
@@ -0,0 +1,122 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.chukwa.extraction.demux;
+
+import java.io.IOException;
+import java.util.Calendar;
+
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MiniMRCluster;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.hadoop.chukwa.ChukwaArchiveKey;
+import org.apache.hadoop.chukwa.ChunkImpl;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.*;
+
+import junit.framework.TestCase;
+/**
+ * test the Demux job in one process, using mini-mr.
+ * 
+ * Unfortunately, this test case needs more jars than the rest of chukwa,
+ *  including hadoop-*-test, commons-cli, and jetty5
+ *  
+ *  
+ * 
+ */
+public class TestDemux extends TestCase {
+
+  java.util.Random r = new java.util.Random();
+  public ChunkImpl getARandomChunk() {
+    int ms = r.nextInt(1000);
+    String line = "2008-05-29 10:42:22,"+ ms + " INFO org.apache.hadoop.dfs.DataNode: Some
text goes here" +r.nextInt() + "\n";
+
+    ChunkImpl c = new ChunkImpl("HadoopLogProcessor", "test", line.length() -1L, line.getBytes(),
null);
+    return c;
+  }
+  
+
+  public void writeASinkFile(Configuration conf, FileSystem fileSys, Path dest, int chunks)
throws IOException {
+    FSDataOutputStream out = fileSys.create(dest);
+
+    Calendar calendar = Calendar.getInstance();
+    SequenceFile.Writer seqFileWriter = SequenceFile.createWriter(conf, out,
+        ChukwaArchiveKey.class, ChunkImpl.class,
+        SequenceFile.CompressionType.NONE, null);
+    for(int i=0; i < chunks; ++i) {
+      ChunkImpl chunk = getARandomChunk();
+      ChukwaArchiveKey archiveKey = new ChukwaArchiveKey();
+      // FIXME compute this once an hour
+      calendar.setTimeInMillis(System.currentTimeMillis());
+      calendar.set(Calendar.MINUTE, 0);
+      calendar.set(Calendar.SECOND, 0);
+      calendar.set(Calendar.MILLISECOND, 0);
+      archiveKey.setTimePartition(calendar.getTimeInMillis());
+      archiveKey.setDataType(chunk.getDataType());
+      archiveKey.setStreamName(chunk.getStreamName());
+      archiveKey.setSeqId(chunk.getSeqID());
+      seqFileWriter.append(archiveKey, chunk);
+    }
+    seqFileWriter.close();
+    out.close();
+  }
+  
+  private void runDemux(JobConf job, Path sortInput, Path sortOutput) 
+  throws Exception {
+    // Setup command-line arguments to 'sort'
+    String[] sortArgs = {sortInput.toString(), sortOutput.toString()};
+    
+    // Run Sort
+    assertEquals(ToolRunner.run(job, new Demux(), sortArgs), 0);
+  }
+  
+  int NUM_HADOOP_SLAVES = 1;
+  int LINES = 10000;
+  private static final Path DEMUX_INPUT_PATH = new Path("/demux/input");
+  private static final Path DEMUX_OUTPUT_PATH = new Path("/demux/output");
+
+  public void testDemux() {
+    try{
+      System.out.println("testing demux");
+      Configuration conf = new Configuration();
+      System.setProperty("hadoop.log.dir", "/tmp/");
+      MiniDFSCluster dfs = new MiniDFSCluster(conf, NUM_HADOOP_SLAVES, true, null);
+      FileSystem fileSys = dfs.getFileSystem();
+      MiniMRCluster mr = new MiniMRCluster(NUM_HADOOP_SLAVES, fileSys.getUri().toString(),
1);
+      writeASinkFile(conf, fileSys, DEMUX_INPUT_PATH, LINES);
+
+      System.out.println("wrote " + 
+      fileSys.getFileStatus(DEMUX_INPUT_PATH).getLen() + " bytes of temp test data");
+      long ts_start = System.currentTimeMillis();
+      runDemux(mr.createJobConf(), DEMUX_INPUT_PATH, DEMUX_OUTPUT_PATH);
+
+      long time = (System.currentTimeMillis() - ts_start);
+      long bytes = fileSys.getContentSummary(DEMUX_OUTPUT_PATH).getLength();
+      System.out.println("result was " + bytes + " bytes long");
+      System.out.println("processing took " + time + " milliseconds");
+      System.out.println("aka " + time * 1.0 / LINES + " ms per line or " + 
+          time *1000.0 / bytes  + " ms per kilobyte of log data");
+      
+    } catch(Exception e) {
+      e.printStackTrace();
+    }
+  }
+  
+  
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/inputtools/log4j/TestChukwaAppender.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/inputtools/log4j/TestChukwaAppender.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/inputtools/log4j/TestChukwaAppender.java
(added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/inputtools/log4j/TestChukwaAppender.java
Tue Aug 12 15:35:16 2008
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.inputtools.log4j;
+
+import org.apache.hadoop.chukwa.Chunk;
+import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
+import org.apache.hadoop.chukwa.datacollection.connector.ChunkCatcherConnector;
+
+import junit.framework.TestCase;
+import java.io.*;
+import org.apache.log4j.*;
+import org.apache.log4j.spi.*;
+
+public class TestChukwaAppender extends TestCase {
+  
+  public void testChukwaAppender() {
+    try {
+    
+    ChukwaAgent agent = new ChukwaAgent();
+    ChunkCatcherConnector chunks = new ChunkCatcherConnector();
+    chunks.start();
+    Logger myLogger = Logger.getLogger(TestChukwaAppender.class);
+    File out = new File("/tmp/chukwa_test_out");
+    out.delete();
+    ChukwaDailyRollingFileAppender app = new ChukwaDailyRollingFileAppender(
+        new SimpleLayout(), out.getAbsolutePath(), "yyyy-MM-dd");
+    app.append(new LoggingEvent("foo", myLogger,  System.currentTimeMillis(),Priority.INFO,
"foo", null));
+    assertEquals(1, agent.adaptorCount());
+    Chunk c = chunks.waitForAChunk();
+    System.out.println("read a chunk OK");
+    String logLine = new String(c.getData());
+    assertTrue(logLine.equals("INFO - foo\n"));
+    System.out.println(new String(c.getData()));
+    //
+    } catch(Exception e) {
+      e.printStackTrace();
+    }
+  }
+
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/util/TestRecordConsts.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/util/TestRecordConsts.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/util/TestRecordConsts.java
(added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/test/org/apache/hadoop/chukwa/util/TestRecordConsts.java
Tue Aug 12 15:35:16 2008
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.chukwa.util;
+
+import junit.framework.TestCase;
+
+public class TestRecordConsts extends TestCase {
+
+  public void testEscapeAllButLastRecordSeparator()
+  {
+    String post = RecordConstants.escapeAllButLastRecordSeparator("\n", "foo bar baz\n");
+    assertEquals(post, "foo bar baz\n");
+    
+    post = RecordConstants.escapeAllButLastRecordSeparator("\n", "foo\nbar\nbaz\n");
+    post = post.replaceAll(RecordConstants.RECORD_SEPARATOR_ESCAPE_SEQ, "^D");
+    assertEquals(post, "foo^D\nbar^D\nbaz\n");
+
+    System.out.println("string is " + post+".");
+  }
+
+  public void testEscapeAllRecordSeparators()
+  {
+  }
+
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/web/collector/WEB-INF/web.xml
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/web/collector/WEB-INF/web.xml?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/web/collector/WEB-INF/web.xml (added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/web/collector/WEB-INF/web.xml Tue Aug 12 15:35:16
2008
@@ -0,0 +1,36 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+
+<web-app xmlns="http://java.sun.com/xml/ns/javaee"
+   xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+   xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_2_5.xsd"
+   version="2.5"> 
+
+    <description>
+      Chukwa Collector.
+    </description>
+    <display-name>Chukwa Collector.</display-name>
+
+
+    <servlet>
+      <servlet-name>chukwaCollector</servlet-name>
+      <servlet-class>org.apache.hadoop.chukwa.datacollection.collector.servlet.ServletCollector</servlet-class>
+          <init-param>
+            <param-name>chukwaCollector.outputDir</param-name>
+            <param-value>hdfs://localhost:9000/user/jboulon/chukwa/logs</param-value>
+          </init-param>
+          <init-param>
+            <param-name>chukwaCollector.rotateInterval</param-name>
+            <param-value>300000</param-value>
+          </init-param>
+          <init-param>
+                <param-name>chukwaCollector.useHDFS</param-name>
+            <param-value>True/</param-value>
+          </init-param>
+    </servlet>
+
+    <servlet-mapping>
+        <servlet-name>chukwaCollector</servlet-name>
+        <url-pattern>/collector</url-pattern>
+    </servlet-mapping>
+
+</web-app>

Added: hadoop/core/trunk/src/contrib/chukwa/tools/expire.sh
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/tools/expire.sh?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/tools/expire.sh (added)
+++ hadoop/core/trunk/src/contrib/chukwa/tools/expire.sh Tue Aug 12 15:35:16 2008
@@ -0,0 +1,34 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+if [ $# -lt 2 ]; then
+  echo "Usage: $0 <days> <dirctory> <nowait>"
+  echo "       <days>      - numeric number indicates days prior to expiration"
+  echo "       <directory> - target directory"
+  echo "       <nowait>    - set to \"nowait\" to delete files without confirmation"
+  exit 0;
+fi
+
+if [ "X$3" == "Xnowait" ]; then
+    find $2 -atime +$1 | xargs -i rm {}
+else
+  echo -n "Delete files in $2, last access more than $1 day(s) ago? (y/n) "
+  read q
+  if [ "X$q" == "Xy" ]; then
+    find $2 -atime +$1 | xargs -i rm {}
+  fi
+fi

Propchange: hadoop/core/trunk/src/contrib/chukwa/tools/expire.sh
------------------------------------------------------------------------------
    svn:executable = *



Mime
View raw message