chukwa-dev mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ey...@apache.org
Subject [14/22] chukwa git commit: CHUKWA-762. Fixed stale test cases. (Eric Yang)
Date Thu, 25 Jun 2015 20:48:45 GMT
CHUKWA-762. Fixed stale test cases.  (Eric Yang)


Project: http://git-wip-us.apache.org/repos/asf/chukwa/repo
Commit: http://git-wip-us.apache.org/repos/asf/chukwa/commit/7770e225
Tree: http://git-wip-us.apache.org/repos/asf/chukwa/tree/7770e225
Diff: http://git-wip-us.apache.org/repos/asf/chukwa/diff/7770e225

Branch: refs/heads/master
Commit: 7770e2257275b150f7a42afd939d552410a4ff67
Parents: df343af
Author: Eric Yang <eyang@apache.org>
Authored: Mon Jun 22 10:54:27 2015 -0700
Committer: Eric Yang <eyang@apache.org>
Committed: Mon Jun 22 10:55:40 2015 -0700

----------------------------------------------------------------------
 CHANGES.txt                                     |   2 +
 .../datacollection/writer/TestSocketTee.java    |   3 +-
 .../chukwa/dataloader/TestSocketDataLoader.java |   3 +-
 .../demux/processor/mapper/TestJobLogEntry.java | 120 -------------------
 4 files changed, 4 insertions(+), 124 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/chukwa/blob/7770e225/CHANGES.txt
----------------------------------------------------------------------
diff --git a/CHANGES.txt b/CHANGES.txt
index 90c77c0..ce31c88 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -44,6 +44,8 @@ Trunk (unreleased changes)
 
   BUGS
 
+    CHUKWA-762. Fixed stale test cases.  (Eric Yang)
+
     CHUKWA-761.  Removed confspell from Chukwa.  (Eric Yang)
 
     CHUKWA-758. Updated default dashboard to include system metrics.  (Eric Yang)

http://git-wip-us.apache.org/repos/asf/chukwa/blob/7770e225/src/test/java/org/apache/hadoop/chukwa/datacollection/writer/TestSocketTee.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/hadoop/chukwa/datacollection/writer/TestSocketTee.java
b/src/test/java/org/apache/hadoop/chukwa/datacollection/writer/TestSocketTee.java
index c14b4eb..e97138d 100644
--- a/src/test/java/org/apache/hadoop/chukwa/datacollection/writer/TestSocketTee.java
+++ b/src/test/java/org/apache/hadoop/chukwa/datacollection/writer/TestSocketTee.java
@@ -39,8 +39,7 @@ public class TestSocketTee  extends TestCase{
     conf.set("chukwa.writerClass", 
         PipelineStageWriter.class.getCanonicalName());
     
-    PipelineStageWriter psw = new PipelineStageWriter();
-    psw.init(conf);
+    PipelineStageWriter psw = new PipelineStageWriter(conf);
 
     System.out.println("pipeline established; now pushing a chunk");
     ArrayList<Chunk> l = new ArrayList<Chunk>();

http://git-wip-us.apache.org/repos/asf/chukwa/blob/7770e225/src/test/java/org/apache/hadoop/chukwa/dataloader/TestSocketDataLoader.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/hadoop/chukwa/dataloader/TestSocketDataLoader.java b/src/test/java/org/apache/hadoop/chukwa/dataloader/TestSocketDataLoader.java
index d3394a8..7132831 100644
--- a/src/test/java/org/apache/hadoop/chukwa/dataloader/TestSocketDataLoader.java
+++ b/src/test/java/org/apache/hadoop/chukwa/dataloader/TestSocketDataLoader.java
@@ -45,8 +45,7 @@ public class TestSocketDataLoader  extends TestCase{
     conf.set("chukwa.writerClass", 
         PipelineStageWriter.class.getCanonicalName());
     
-    PipelineStageWriter psw = new PipelineStageWriter();
-    psw.init(conf);
+    PipelineStageWriter psw = new PipelineStageWriter(conf);
 
     SocketDataLoader sdl = new SocketDataLoader("all");
     

http://git-wip-us.apache.org/repos/asf/chukwa/blob/7770e225/src/test/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestJobLogEntry.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestJobLogEntry.java
b/src/test/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestJobLogEntry.java
deleted file mode 100644
index 0dccb9d..0000000
--- a/src/test/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestJobLogEntry.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.chukwa.extraction.demux.processor.mapper;
-
-import java.io.BufferedReader;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.chukwa.extraction.demux.processor.mapper.JobLog.JobLogLine;
-import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
-import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
-import org.apache.hadoop.mapred.OutputCollector;
-import org.apache.hadoop.mapred.Reporter;
-
-import junit.framework.TestCase;
-
-public class TestJobLogEntry extends TestCase {
-	private ArrayList<String> testLogList = new ArrayList<String>();
-
-	protected void setUp() throws Exception {
-		super.setUp();
-		InputStream stream = this.getClass().getResourceAsStream("/TestJobLog.txt");
-		BufferedReader br = new BufferedReader(new InputStreamReader(stream));
-		while(true) {
-			String line = br.readLine();
-			if(line == null)
-				break;
-			testLogList.add(line);
-		}
-		
-		stream = this.getClass().getResourceAsStream("/Hadoop18JobHistoryLog.txt");
-		br = new BufferedReader(new InputStreamReader(stream));
-		while(true) {
-			String line = br.readLine();
-			if(line == null)
-				break;
-			testLogList.add(line);
-		}
-	}
-
-	public void testJobLogEntry() {
-    JobLog jobLog = new JobLog();
-		JobLogLine log = jobLog.getJobLogLine(testLogList.get(1));
-		assertEquals("JobData", log.getLogType());
-		assertEquals("hdfs://test33/tmp/hadoop-gmon/mapred/system/job_200903062215_0577/job\\.xml",
log.get("JOBCONF"));
-		assertEquals("job_200903062215_0577", log.get("JOBID"));
-		assertEquals("grep-search", log.get("JOBNAME"));
-		assertEquals("gmon", log.get("USER"));
-		assertEquals("1236386525570", log.get("SUBMIT_TIME"));
-		assertEquals(1236386525570l, log.getTimestamp());
-		
-		log = jobLog.getJobLogLine(testLogList.get(2));
-		assertEquals(1236386525570l, log.getTimestamp());
-		
-		log = jobLog.getJobLogLine(testLogList.get(4));
-		assertEquals("TaskData", log.getLogType());
-		assertEquals("", log.get("SPLITS"));
-		assertEquals(1236386529449l, log.getTimestamp());
-		
-		log = jobLog.getJobLogLine(testLogList.get(72));
-		assertEquals("TaskData", log.getLogType());
-		assertEquals("{(org\\.apache\\.hadoop\\.mapred\\.Task$Counter)(Map-Reduce Framework)[(REDUCE_INPUT_GROUPS)(Reduce
input groups)(0)][(COMBINE_OUTPUT_RECORDS)(Combine output records)(0)][(REDUCE_SHUFFLE_BYTES)(Reduce
shuffle bytes)(0)][(REDUCE_OUTPUT_RECORDS)(Reduce output records)(0)][(SPILLED_RECORDS)(Spilled
Records)(0)][(COMBINE_INPUT_RECORDS)(Combine input records)(0)][(REDUCE_INPUT_RECORDS)(Reduce
input records)(0)]}", log.get("COUNTERS"));
-		
-		log = jobLog.getJobLogLine(testLogList.get(73));
-		HashMap<String, Long> counters = log.getCounterHash().flat();
-		assertEquals("1", counters.get("Counter:org.apache.hadoop.mapred.JobInProgress$Counter:TOTAL_LAUNCHED_REDUCES").toString());
-		assertEquals("20471", counters.get("Counter:FileSystemCounters:HDFS_BYTES_READ").toString());
-		
-		log = jobLog.getJobLogLine(testLogList.get(90));
-		assertTrue("START_TIME should not exist", log.get("START_TIME")==null);
-
-		log = jobLog.getJobLogLine("");
-		assertTrue(log==null);
-		
-		log = jobLog.getJobLogLine("Job JOBID=\"job_200903042324_8630\" FINISH_TIME=\"1236527538594\"
JOB_STATUS=\"SUCCESS\" FINISHED_MAPS=\"10\" FINISHED_REDUCES=\"8\" FAILED_MAPS=\"0\" FAILED_REDUCES=\"0\"
COUNTERS=\"input records:0,Map-Reduce Framework.Reduce input records:57038\"");
-		
-		// print all key-values
-		for(String line : testLogList) {
-			log = jobLog.getJobLogLine(line);
-			if(log == null) {
-			  continue;
-			}
-			System.out.println(log.getLogType());
-			for(Entry<String, String> entry : log.entrySet()) {
-				String k = entry.getKey();
-				String v = entry.getValue();
-				System.out.println(k + ": " + v);
-				if(k.equals("START_TIME") || k.equals("FINISH_TIME"))
-					assertTrue(v!=null && !v.equals("0"));
-			}
-			
-			// list all counters for this entry
-			for(Entry<String, Long> entry : log.getCounterHash().flat().entrySet()) {
-				System.out.println(entry.getKey() + ": " + entry.getValue());
-			}
-			
-			System.out.println();
-		}
-	}
-	
-}


Mime
View raw message