hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From heyongqi...@apache.org
Subject svn commit: r1179493 - in /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql: MapRedStats.java exec/HadoopJobExecHelper.java
Date Thu, 06 Oct 2011 03:23:24 GMT
Author: heyongqiang
Date: Thu Oct  6 03:23:23 2011
New Revision: 1179493

URL: http://svn.apache.org/viewvc?rev=1179493&view=rev
Log:
HIVE-2479: Log more Hadoop task counter values in the MapRedStats class (Kevin Wilfong via
He Yongqiang)

Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/MapRedStats.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/MapRedStats.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/MapRedStats.java?rev=1179493&r1=1179492&r2=1179493&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/MapRedStats.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/MapRedStats.java Thu Oct  6 03:23:23
2011
@@ -18,6 +18,9 @@
 
 package org.apache.hadoop.hive.ql;
 
+import org.apache.hadoop.mapred.Counters;
+import org.apache.hadoop.mapred.Counters.Counter;
+
 /**
  * MapRedStats.
  *
@@ -30,13 +33,7 @@ public class MapRedStats {
   int numMap;
   int numReduce;
   long cpuMSec;
-  long hdfsRead = -1;
-  long hdfsWrite = -1;
-  long mapInputRecords = -1;
-  long mapOutputRecords = -1;
-  long reduceInputRecords = -1;
-  long reduceOutputRecords = -1;
-  long reduceShuffleBytes = -1;
+  Counters counters = null;
   boolean success;
 
   String jobId;
@@ -73,60 +70,12 @@ public class MapRedStats {
     this.numReduce = numReduce;
   }
 
-  public long getHdfsRead() {
-    return hdfsRead;
-  }
-
-  public void setHdfsRead(long hdfsRead) {
-    this.hdfsRead = hdfsRead;
-  }
-
-  public long getHdfsWrite() {
-    return hdfsWrite;
-  }
-
-  public void setHdfsWrite(long hdfsWrite) {
-    this.hdfsWrite = hdfsWrite;
+  public void setCounters(Counters taskCounters) {
+    this.counters = taskCounters;
   }
 
-  public long getMapInputRecords() {
-    return mapInputRecords;
-  }
-
-  public void setMapInputRecords(long mapInputRecords) {
-    this.mapInputRecords = mapInputRecords;
-  }
-
-  public long getMapOutputRecords() {
-    return mapOutputRecords;
-  }
-
-  public void setMapOutputRecords(long mapOutputRecords) {
-    this.mapOutputRecords = mapOutputRecords;
-  }
-
-  public long getReduceInputRecords() {
-    return reduceInputRecords;
-  }
-
-  public void setReduceInputRecords(long reduceInputRecords) {
-    this.reduceInputRecords = reduceInputRecords;
-  }
-
-  public long getReduceOutputRecords() {
-    return reduceOutputRecords;
-  }
-
-  public void setReduceOutputRecords(long reduceOutputRecords) {
-    this.reduceOutputRecords = reduceOutputRecords;
-  }
-
-  public long getReduceShuffleBytes() {
-    return reduceShuffleBytes;
-  }
-
-  public void setReduceShuffleBytes(long reduceShuffleBytes) {
-    this.reduceShuffleBytes = reduceShuffleBytes;
+  public Counters getCounters() {
+    return this.counters;
   }
 
   public void setCpuMSec(long cpuMSec) {
@@ -160,12 +109,20 @@ public class MapRedStats {
       sb.append(" Accumulative CPU: " + (cpuMSec / 1000D) + " sec  ");
     }
 
-    if (hdfsRead >= 0) {
-      sb.append(" HDFS Read: " + hdfsRead);
-    }
-
-    if (hdfsWrite >= 0) {
-      sb.append(" HDFS Write: " + hdfsWrite);
+    if (counters != null) {
+      Counter hdfsReadCntr = counters.findCounter("FileSystemCounters",
+          "HDFS_BYTES_READ");
+      long hdfsRead;
+      if (hdfsReadCntr != null && (hdfsRead = hdfsReadCntr.getValue()) >= 0) {
+        sb.append(" HDFS Read: " + hdfsRead);
+      }
+
+      Counter hdfsWrittenCntr = counters.findCounter("FileSystemCounters",
+          "HDFS_BYTES_WRITTEN");
+      long hdfsWritten;
+      if (hdfsWrittenCntr != null && (hdfsWritten = hdfsWrittenCntr.getValue()) >=
0) {
+        sb.append(" HDFS Write: " + hdfsWritten);
+      }
     }
 
     sb.append(" " + (success ? "SUCESS" : "FAIL"));

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java?rev=1179493&r1=1179492&r2=1179493&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java Thu Oct
 6 03:23:23 2011
@@ -52,10 +52,8 @@ import org.apache.hadoop.mapred.TaskComp
 import org.apache.hadoop.mapred.TaskReport;
 import org.apache.hadoop.mapred.Counters.Counter;
 import org.apache.log4j.Appender;
-import org.apache.log4j.BasicConfigurator;
 import org.apache.log4j.FileAppender;
 import org.apache.log4j.LogManager;
-import org.apache.log4j.PropertyConfigurator;
 
 public class HadoopJobExecHelper {
 
@@ -393,52 +391,7 @@ public class HadoopJobExecHelper {
     }
 
     MapRedStats mapRedStats = new MapRedStats(numMap, numReduce, cpuMsec, success, rj.getID().toString());
-
-    if (ctrs != null) {
-      Counter ctr;
-
-      ctr = ctrs.findCounter("org.apache.hadoop.mapred.Task$Counter",
-          "REDUCE_SHUFFLE_BYTES");
-      if (ctr != null) {
-        mapRedStats.setReduceShuffleBytes(ctr.getValue());
-      }
-
-      ctr = ctrs.findCounter("org.apache.hadoop.mapred.Task$Counter",
-          "MAP_INPUT_RECORDS");
-      if (ctr != null) {
-        mapRedStats.setMapInputRecords(ctr.getValue());
-      }
-
-      ctr = ctrs.findCounter("org.apache.hadoop.mapred.Task$Counter",
-          "MAP_OUTPUT_RECORDS");
-      if (ctr != null) {
-        mapRedStats.setMapOutputRecords(ctr.getValue());
-      }
-
-      ctr = ctrs.findCounter("org.apache.hadoop.mapred.Task$Counter",
-          "REDUCE_INPUT_RECORDS");
-      if (ctr != null) {
-        mapRedStats.setReduceInputRecords(ctr.getValue());
-      }
-
-      ctr = ctrs.findCounter("org.apache.hadoop.mapred.Task$Counter",
-          "REDUCE_OUTPUT_RECORDS");
-      if (ctr != null) {
-        mapRedStats.setReduceOutputRecords(ctr.getValue());
-      }
-
-      ctr = ctrs.findCounter("FileSystemCounters",
-          "HDFS_BYTES_READ");
-      if (ctr != null) {
-        mapRedStats.setHdfsRead(ctr.getValue());
-      }
-
-      ctr = ctrs.findCounter("FileSystemCounters",
-          "HDFS_BYTES_WRITTEN");
-      if (ctr != null) {
-        mapRedStats.setHdfsWrite(ctr.getValue());
-      }
-    }
+    mapRedStats.setCounters(ctrs);
 
     this.task.setDone();
     // update based on the final value of the counters



Mime
View raw message