hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mbau...@apache.org
Subject svn commit: r1362919 - in /hbase/branches/0.89-fb/src/main: java/org/apache/hadoop/hbase/io/hfile/ java/org/apache/hadoop/hbase/ipc/ java/org/apache/hadoop/hbase/regionserver/wal/ ruby/hbase/ ruby/shell/commands/
Date Wed, 18 Jul 2012 12:50:46 GMT
Author: mbautin
Date: Wed Jul 18 12:50:45 2012
New Revision: 1362919

URL: http://svn.apache.org/viewvc?rev=1362919&view=rev
Log:
[HBASE-6215] Per-request profiling - more metrics

Author: aurickq

Summary: added profiling for block cache hit/miss, HLog sync time, block paging time.

Test Plan: HBase shell

Reviewers: kranganathan, mbautin

Reviewed By: mbautin

CC: hbase-eng@

Differential Revision: https://phabricator.fb.com/D514785

Modified:
    hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java
    hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java
    hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java
    hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java
    hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/ipc/ProfilingData.java
    hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java
    hbase/branches/0.89-fb/src/main/ruby/hbase/table.rb
    hbase/branches/0.89-fb/src/main/ruby/shell/commands/get_profiling.rb

Modified: hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java?rev=1362919&r1=1362918&r2=1362919&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java
(original)
+++ hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java
Wed Jul 18 12:50:45 2012
@@ -36,6 +36,9 @@ import org.apache.hadoop.hbase.io.encodi
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory;
 import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
+import org.apache.hadoop.hbase.ipc.HBaseRPC;
+import org.apache.hadoop.hbase.ipc.ProfilingData;
+import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.IdLock;
 import org.apache.hadoop.io.WritableUtils;
@@ -300,6 +303,10 @@ public class HFileReaderV2 extends Abstr
                 "has wrong encoding: " + cachedBlock.getDataBlockEncoding() +
                 " (expected: " + dataBlockEncoder.getEncodingInCache() + ")");
           }
+          ProfilingData pData = HRegionServer.threadLocalProfilingData.get();
+          if (pData != null) {
+            pData.incInt(ProfilingData.DATA_BLOCK_HIT_CNT);
+          }
           return cachedBlock;
         }
         // Carry on, please load.
@@ -331,7 +338,11 @@ public class HFileReaderV2 extends Abstr
         cacheConf.getBlockCache().cacheBlock(cacheKey, hfileBlock,
             cacheConf.isInMemory());
       }
-
+      ProfilingData pData = HRegionServer.threadLocalProfilingData.get();
+      if (pData != null) {
+        pData.incInt(ProfilingData.DATA_BLOCK_MISS_CNT);
+        pData.incLong(ProfilingData.TOTAL_BLOCK_READ_TIME_NS, delta);
+      }
       return hfileBlock;
     } finally {
       offsetLock.releaseLockEntry(lockEntry);

Modified: hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java?rev=1362919&r1=1362918&r2=1362919&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java (original)
+++ hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java Wed
Jul 18 12:50:45 2012
@@ -130,6 +130,7 @@ public class HBaseClient {
     boolean done;                                 // true when call is done
     protected int version = HBaseServer.CURRENT_VERSION;
     public HBaseRPCOptions options;
+    public long startTime;
 
     protected Call(Writable param) {
       this.param = param;
@@ -519,6 +520,7 @@ public class HBaseClient {
             call.param.write(outOS);
             outOS.flush();
             baos.flush();
+            call.startTime = System.currentTimeMillis();
           } catch (IOException e) {
             LOG.error("Failed to prepare request in in-mem buffers!", e);
             markClosed(e);
@@ -566,6 +568,7 @@ public class HBaseClient {
         if (LOG.isDebugEnabled())
           LOG.debug(getName() + " got value #" + id);
         Call call = calls.get(id);
+        long totalTime = System.currentTimeMillis() - call.startTime;
         // 2. read the error boolean uncompressed
         boolean isError = localIn.readBoolean();
 
@@ -596,8 +599,15 @@ public class HBaseClient {
           if (call.getVersion() >= HBaseServer.VERSION_RPCOPTIONS) {
             boolean hasProfiling = localIn.readBoolean ();
             if (hasProfiling) {
-              call.options.profilingResult = new ProfilingData ();
+              call.options.profilingResult = new ProfilingData();
               call.options.profilingResult.readFields(localIn);
+              Long serverTimeObj = call.options.profilingResult.getLong(
+                  ProfilingData.TOTAL_SERVER_TIME_MS);
+              if (serverTimeObj != null) {
+                call.options.profilingResult.addLong(
+                    ProfilingData.CLIENT_NETWORK_LATENCY_MS, 
+                    totalTime - serverTimeObj);
+              }
             }
           }
           call.setValue(value);

Modified: hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java?rev=1362919&r1=1362918&r2=1362919&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java (original)
+++ hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java Wed Jul
18 12:50:45 2012
@@ -569,6 +569,10 @@ public class HBaseRPC {
           throw new IOException("Could not find requested method, the usual " +
               "cause is a version mismatch between client and server.");
         }
+        if (HRegionServer.threadLocalProfilingData.get () != null) {
+          HRegionServer.threadLocalProfilingData.get ().addString(
+              ProfilingData.RPC_METHOD_NAME, call.getMethodName ());
+        }
         if (verbose) trace("Call: " + call);
         Method method = implementation.getMethod(call.getMethodName(),
                 call.getParameterClasses());

Modified: hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java?rev=1362919&r1=1362918&r2=1362919&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java (original)
+++ hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java Wed
Jul 18 12:50:45 2012
@@ -242,6 +242,7 @@ public abstract class HBaseServer {
     
     protected boolean shouldProfile = false;
     protected ProfilingData profilingData = null;
+    protected String tag = null;
 
     public Call(int id, Writable param, Connection connection) {
       this.id = id;
@@ -250,6 +251,14 @@ public abstract class HBaseServer {
       this.timestamp = System.currentTimeMillis();
       this.response = null;
     }
+    
+    public void setTag (String tag) {
+      this.tag = tag;
+    }
+    
+    public String getTag () {
+      return tag;
+    }
 
     public void setVersion(int version) {
      this.version = version;
@@ -1020,6 +1029,7 @@ public abstract class HBaseServer {
       
       call.setRPCCompression(txCompression);
       call.setVersion(version);
+      call.setTag (options.getTag());
       callQueue.put(call);              // queue the call; maybe blocked here
     }
 
@@ -1084,7 +1094,8 @@ public abstract class HBaseServer {
             error = StringUtils.stringifyException(e);
           }
           long total = System.currentTimeMillis () - start;
-          call.profilingData.addLong("total_server_time.ms", total);
+          call.profilingData.addLong(
+              ProfilingData.TOTAL_SERVER_TIME_MS, total);
           UserGroupInformation.setCurrentUser(previous);
           CurCall.set(null);
           HRegionServer.threadLocalProfilingData.remove ();
@@ -1141,6 +1152,10 @@ public abstract class HBaseServer {
             		out.writeBoolean(true);
             		call.profilingData.write(out);
             	}
+            	if (call.profilingData != null) {
+                LOG.debug("Profiling info (" + call.getTag () + "): " + 
+                      call.profilingData.toString ());
+              }
             }
           } else {
             WritableUtils.writeString(out, errorClass);

Modified: hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/ipc/ProfilingData.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/ipc/ProfilingData.java?rev=1362919&r1=1362918&r2=1362919&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/ipc/ProfilingData.java (original)
+++ hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/ipc/ProfilingData.java Wed
Jul 18 12:50:45 2012
@@ -3,124 +3,272 @@ package org.apache.hadoop.hbase.ipc;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.commons.lang.mutable.MutableFloat;
+import org.apache.commons.lang.mutable.MutableInt;
+import org.apache.commons.lang.mutable.MutableLong;
 import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.io.WritableWithSize;
 import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.MapWritable;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.FloatWritable;
-import org.apache.hadoop.io.BooleanWritable;
-import org.apache.hadoop.io.BytesWritable;
-
-/*
- * A map containing profiling data
- * only maps String->String to be pretty printable
+
+/**
+ * A map containing profiling data, mapping String to 
+ * String, Long, Int, Boolean, and Float. This class is
+ * not thread-safe.
  */
 
 public class ProfilingData implements Writable {
-
-	private MapWritable mapString = new MapWritable ();
-	private MapWritable mapLong = new MapWritable ();
-	private MapWritable mapInt = new MapWritable ();
-	private MapWritable mapBoolean = new MapWritable ();
-	private MapWritable mapFloat = new MapWritable ();
+  
+  /**
+   *  total amount of time spent server-side by the RPC
+   */
+  public static final String TOTAL_SERVER_TIME_MS = "total_server_time.ms";
+  
+  /**
+   *  client reported network latency
+   */
+  public static final String CLIENT_NETWORK_LATENCY_MS = "client_network_latency.ms";
+  
+  /**
+   *  number of data block hits on get
+   */
+  public static final String DATA_BLOCK_HIT_CNT = "data_block_hit_cnt";
+  
+  /**
+   *  number of data block misses on get
+   */
+  public static final String DATA_BLOCK_MISS_CNT = "data_block_miss_cnt";
+  
+  /**
+   *  total time spent reading data blocks into cache on misses
+   */
+  public static final String TOTAL_BLOCK_READ_TIME_NS = "total_block_read_time.ns";
+  
+  /**
+   *  time spend writing to HLog
+   */
+  public static final String HLOG_WRITE_TIME_MS = "hlog_write_time.ms";
+  
+  /**
+   *  time spent syncing HLog
+   */
+  public static final String HLOG_SYNC_TIME_MS = "hlog_sync_time.ms";
+  
+  /**
+   *  name of the rpc method called
+   */
+  public static final String RPC_METHOD_NAME = "rpc_method_name";
+
+	private Map<String, String> mapString = new HashMap<String, String> ();
+	private Map<String, MutableLong> mapLong = new HashMap<String, MutableLong>
();
+	private Map<String, MutableInt> mapInt = new HashMap<String, MutableInt> ();
+	private Map<String, Boolean> mapBoolean = new HashMap<String, Boolean> ();
+	private Map<String, MutableFloat> mapFloat = new HashMap<String, MutableFloat>
();
 
 	public ProfilingData () {}
 
 	public void addString (String key, String val) {
-		mapString.put (new BytesWritable (key.getBytes ()), 
-		    new BytesWritable (val.getBytes ()));
+		mapString.put(key, val);
 	}
 
 	public String getString (String key) {
-		return new String (((BytesWritable) mapString.get 
-		    (new BytesWritable (key.getBytes ()))).get ());
+	  return mapString.get(key);
 	}
 	
 	public void addLong (String key, long val) {
-    mapLong.put (new BytesWritable (key.getBytes ()), 
-        new LongWritable (val));
+    mapLong.put(key, new MutableLong(val));
+  }
+
+  public Long getLong (String key) {
+    MutableLong ret = mapLong.get(key);
+    if (ret == null) {
+      return null;
+    }
+    return ret.toLong();
+  }
+  
+  public void incLong (String key, long amt) {
+    MutableLong dat = mapLong.get(key);
+    if (dat == null) {
+      this.addLong (key, amt);
+    } else {
+      dat.add (amt);
+    }
+  }
+
+  public void incLong (String key) {
+    this.incLong (key, 1);
+  }
+
+  public void decLong (String key, long amt) {
+    this.incLong (key, -amt);
   }
 
-  public long getLong (String key) {
-    return ((LongWritable) mapLong.get 
-        (new BytesWritable (key.getBytes ()))).get ();
+  public void decLong (String key) {
+    this.decLong (key, 1);
   }
   
   public void addInt (String key, int val) {
-    mapInt.put (new BytesWritable (key.getBytes ()), 
-        new IntWritable (val));
+    mapInt.put (key, new MutableInt (val));
+  }
+
+  public Integer getInt (String key) {
+    MutableInt ret = mapInt.get(key);
+    if (ret == null) {
+      return null;
+    }
+    return ret.toInteger();
+  }
+
+  public void incInt (String key, int amt) {
+    MutableInt dat = mapInt.get(key);
+    if (dat == null) {
+      this.addInt (key, amt);
+    } else {
+      dat.add (amt);
+    }
+  }
+
+  public void incInt (String key) {
+    this.incInt (key, 1);
+  }
+
+  public void decInt (String key, int amt) {
+    this.incInt (key, -amt);
   }
 
-  public int getInt (String key) {
-    return ((IntWritable) mapInt.get 
-        (new BytesWritable (key.getBytes ()))).get ();
+  public void decInt (String key) {
+    this.decInt (key, 1);
   }
   
   public void addBoolean (String key, boolean val) {
-    mapBoolean.put (new BytesWritable (key.getBytes ()), 
-        new BooleanWritable (val));
+    mapBoolean.put (key, val);
   }
 
-  public boolean getBoolean (String key) {
-    return ((BooleanWritable) mapBoolean.get 
-        (new BytesWritable (key.getBytes ()))).get ();
+  public Boolean getBoolean (String key) {
+    return mapBoolean.get (key);
   }
   
   public void addFloat (String key, float val) {
-    mapFloat.put (new BytesWritable (key.getBytes ()), 
-        new FloatWritable (val));
+    mapFloat.put (key, new MutableFloat (val));
   }
 
-  public float getFloat (String key) {
-    return ((FloatWritable) mapFloat.get 
-        (new BytesWritable (key.getBytes ()))).get ();
+  public Float getFloat (String key) {
+    MutableFloat ret = mapFloat.get(key);
+    if (ret == null) {
+      return null;
+    }
+    return ret.toFloat();
+  }
+  
+  public void incFloat (String key, float amt) {
+    MutableFloat dat = mapFloat.get(key);
+    if (dat == null) {
+      this.addFloat (key, amt);
+    } else {
+      dat.add (amt);
+    }
+  }
+  
+  public void decFloat (String key, float amt) {
+    this.incFloat (key, -amt);
   }
 	
 	@Override
 	public void write(DataOutput out) throws IOException {
-	 	mapString.write (out);
-	 	mapBoolean.write (out);
-	 	mapInt.write (out);
-	 	mapLong.write (out);
-	 	mapFloat.write (out);
+	  out.writeInt(mapString.size ());
+	  for (Map.Entry<String,String> entry : mapString.entrySet ()) {
+      out.writeUTF(entry.getKey());
+      out.writeUTF(entry.getValue());
+    }
+	  out.writeInt(mapBoolean.size ());
+    for (Map.Entry<String,Boolean> entry : mapBoolean.entrySet ()) {
+      out.writeUTF(entry.getKey());
+      out.writeBoolean(entry.getValue());
+    }
+    out.writeInt(mapInt.size ());
+    for (Map.Entry<String,MutableInt> entry : mapInt.entrySet ()) {
+      out.writeUTF(entry.getKey());
+      out.writeInt(entry.getValue().intValue());
+    }
+    out.writeInt(mapLong.size ());
+    for (Map.Entry<String,MutableLong> entry : mapLong.entrySet ()) {
+      out.writeUTF(entry.getKey());
+      out.writeLong(entry.getValue().longValue());
+    }
+    out.writeFloat(mapFloat.size ());
+    for (Map.Entry<String,MutableFloat> entry : mapFloat.entrySet ()) {
+      out.writeUTF(entry.getKey());
+      out.writeFloat(entry.getValue().floatValue());
+    }
 	}
 	  
 	@Override
 	public void readFields(DataInput in) throws IOException {
-	  mapString.readFields (in);
-	  mapBoolean.readFields (in);
-	  mapInt.readFields (in);
-	  mapLong.readFields (in);
-	  mapFloat.readFields (in);
+	  int size;
+	  String key;
+	  size = in.readInt();
+	  mapString.clear ();
+    for (int i = 0; i < size; i ++) {
+      key = in.readUTF();
+      this.addString(key, in.readUTF());
+    }
+    size = in.readInt();
+    mapBoolean.clear ();
+    for (int i = 0; i < size; i ++) {
+      key = in.readUTF();
+      this.addBoolean(key, in.readBoolean());
+    }
+    size = in.readInt();
+    mapInt.clear ();
+    for (int i = 0; i < size; i ++) {
+      key = in.readUTF();
+      this.addInt(key, in.readInt());
+    }
+    size = in.readInt();
+    mapLong.clear ();
+    for (int i = 0; i < size; i ++) {
+      key = in.readUTF();
+      this.addLong(key, in.readLong());
+    }
+    size = in.readInt();
+    mapFloat.clear ();
+    for (int i = 0; i < size; i ++) {
+      key = in.readUTF();
+      this.addFloat(key, in.readFloat());
+    }
 	}
 	
-	@Override
-	public String toString () {
+	public String toString (String delim) {
 	  StringBuilder sb = new StringBuilder ();
-	  for (Map.Entry<Writable,Writable> entry : mapString.entrySet ()) {
-	    sb.append (new String (((BytesWritable) entry.getKey ()).get ()) + " : " 
-	        + new String (((BytesWritable) entry.getValue ()).get ()) + "\n");
-	  }
-	  for (Map.Entry<Writable,Writable> entry : mapBoolean.entrySet ()) {
-      sb.append (new String (((BytesWritable) entry.getKey ()).get ()) + " : " 
-          + ((BooleanWritable) entry.getValue ()).get () + "\n");
-    }
-	  for (Map.Entry<Writable,Writable> entry : mapInt.entrySet ()) {
-      sb.append (new String (((BytesWritable) entry.getKey ()).get ()) + " : " 
-          + ((IntWritable) entry.getValue ()).get () + "\n");
-    }
-	  for (Map.Entry<Writable,Writable> entry : mapLong.entrySet ()) {
-      sb.append (new String (((BytesWritable) entry.getKey ()).get ()) + " : " 
-          + ((LongWritable) entry.getValue ()).get () + "\n");
-    }
-	  for (Map.Entry<Writable,Writable> entry : mapFloat.entrySet ()) {
-      sb.append (new String (((BytesWritable) entry.getKey ()).get ()) + " : " 
-          + ((FloatWritable) entry.getValue ()).get () + "\n");
+    for (Map.Entry<String,String> entry : mapString.entrySet ()) {
+      sb.append (entry.getKey () + ":" + entry.getValue () + delim);
+    }
+    for (Map.Entry<String,Boolean> entry : mapBoolean.entrySet ()) {
+      sb.append (entry.getKey () + ":" + entry.getValue () + delim);
+    }
+    for (Map.Entry<String,MutableInt> entry : mapInt.entrySet ()) {
+      sb.append (entry.getKey () + ":" + entry.getValue () + delim);
+    }
+    for (Map.Entry<String,MutableLong> entry : mapLong.entrySet ()) {
+      sb.append (entry.getKey () + ":" + entry.getValue () + delim);
+    }
+    for (Map.Entry<String,MutableFloat> entry : mapFloat.entrySet ()) {
+      sb.append (entry.getKey () + ":" + entry.getValue () + delim);
     }
-	  return sb.toString ();
+    if (sb.length () >= delim.length ()) {
+      sb.delete(sb.length () - delim.length (), sb.length ());
+    }
+    return sb.toString ();
 	}
+	
+	@Override
+	public String toString () {
+	  return this.toString (", ");
+	}
+	
+	public String toPrettyString () {
+	  return this.toString ("\n");
+  }
 }

Modified: hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java?rev=1362919&r1=1362918&r2=1362919&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java
(original)
+++ hbase/branches/0.89-fb/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java
Wed Jul 18 12:50:45 2012
@@ -77,9 +77,11 @@ import org.apache.hadoop.hbase.HTableDes
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.RemoteExceptionHandler;
 import org.apache.hadoop.hbase.Stoppable;
+import org.apache.hadoop.hbase.ipc.ProfilingData;
 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
 import org.apache.hadoop.hbase.monitoring.TaskMonitor;
 import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ClassSize;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@@ -954,13 +956,21 @@ public class HLog implements Syncable {
       // Only count 1 row as an unflushed entry.
       txid = this.unflushedEntries.incrementAndGet();
     }
-    writeTime.inc(System.currentTimeMillis() - start);
+    long time = System.currentTimeMillis() - start;
+    writeTime.inc(time);
+    ProfilingData pData = HRegionServer.threadLocalProfilingData.get();
+    if (pData != null) {
+      pData.addLong(ProfilingData.HLOG_WRITE_TIME_MS, time);
+    }
 
     // sync txn to file system
     start = System.currentTimeMillis();
     this.sync(info.isMetaRegion(), txid);
-    gsyncTime.inc(System.currentTimeMillis() - start);
-
+    time = System.currentTimeMillis() - start;
+    gsyncTime.inc(time);
+    if (pData != null) {
+      pData.addLong(ProfilingData.HLOG_SYNC_TIME_MS, time);
+    }
   }
 
   /**

Modified: hbase/branches/0.89-fb/src/main/ruby/hbase/table.rb
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/ruby/hbase/table.rb?rev=1362919&r1=1362918&r2=1362919&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/main/ruby/hbase/table.rb (original)
+++ hbase/branches/0.89-fb/src/main/ruby/hbase/table.rb Wed Jul 18 12:50:45 2012
@@ -50,14 +50,14 @@ module Hbase
       @table.setProfiling(prof)
     end
 
-#----------------------------------------------------------------------------------------------
+    #----------------------------------------------------------------------------------------------
     # Get profiling data
     def get_profiling()
       data = @table.getProfilingData()
       if data == nil
         return nil
       else
-        return data.toString()
+        return data.toPrettyString()
       end
     end
     #----------------------------------------------------------------------------------------------

Modified: hbase/branches/0.89-fb/src/main/ruby/shell/commands/get_profiling.rb
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/main/ruby/shell/commands/get_profiling.rb?rev=1362919&r1=1362918&r2=1362919&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/main/ruby/shell/commands/get_profiling.rb (original)
+++ hbase/branches/0.89-fb/src/main/ruby/shell/commands/get_profiling.rb Wed Jul 18 12:50:45
2012
@@ -37,7 +37,6 @@ module Shell
         if shell.get_profiling() == nil
           puts "No profiling data."
         else
-          puts "Profiling data:"
           puts shell.get_profiling()
         end
       end



Mime
View raw message