hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From apurt...@apache.org
Subject [1/4] hbase git commit: HBASE-18060 Backport to branch-1 HBASE-9774 HBase native metrics and metric collection for coprocessors
Date Wed, 24 May 2017 23:53:27 GMT
Repository: hbase
Updated Branches:
  refs/heads/branch-1 f2ba52ac4 -> a3c3f1012


http://git-wip-us.apache.org/repos/asf/hbase/blob/a3c3f101/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServer.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServer.java
index 8bca6c5..4c28763 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServer.java
@@ -17,10 +17,14 @@
  */
 package org.apache.hadoop.hbase.regionserver;
 
-import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
-import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
+import org.apache.hadoop.hbase.metrics.MetricRegistries;
+import org.apache.hadoop.hbase.metrics.MetricRegistry;
+import org.apache.hadoop.hbase.metrics.Timer;
+
+import com.google.common.annotations.VisibleForTesting;
 
 /**
  * <p>
@@ -36,11 +40,20 @@ public class MetricsRegionServer {
   private MetricsRegionServerSource serverSource;
   private MetricsRegionServerWrapper regionServerWrapper;
 
+  private MetricRegistry metricRegistry;
+  private Timer bulkLoadTimer;
+
   public MetricsRegionServer(MetricsRegionServerWrapper regionServerWrapper) {
     this(regionServerWrapper,
         CompatibilitySingletonFactory.getInstance(MetricsRegionServerSourceFactory.class)
             .createServer(regionServerWrapper));
 
+    // Create hbase-metrics module based metrics. The registry should already be registered
by the
+    // MetricsRegionServerSource
+    metricRegistry = MetricRegistries.global().get(serverSource.getMetricRegistryInfo()).get();
+
+    // create and use metrics from the new hbase-metrics based registry.
+    bulkLoadTimer = metricRegistry.timer("Bulkload");
   }
 
   MetricsRegionServer(MetricsRegionServerWrapper regionServerWrapper,
@@ -131,4 +144,8 @@ public class MetricsRegionServer {
     serverSource.updateCompactionInputSize(isMajor, inputBytes);
     serverSource.updateCompactionOutputSize(isMajor, outputBytes);
   }
+
+  public void updateBulkLoad(long millis) {
+    this.bulkLoadTimer.updateMillis(millis);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3c3f101/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index 9b270c6..3f23d2b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -197,6 +197,8 @@ import org.apache.hadoop.hbase.wal.WALSplitter;
 import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;
 import org.apache.zookeeper.KeeperException;
 
+import com.google.common.annotations.VisibleForTesting;
+
 /**
  * Implements the regionserver RPC services.
  */
@@ -2061,6 +2063,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
   @Override
   public BulkLoadHFileResponse bulkLoadHFile(final RpcController controller,
       final BulkLoadHFileRequest request) throws ServiceException {
+    long start = EnvironmentEdgeManager.currentTime();
     try {
       checkOpen();
       requestCount.increment();
@@ -2089,6 +2092,11 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
       return builder.build();
     } catch (IOException ie) {
       throw new ServiceException(ie);
+    } finally {
+      if (regionServer.metricsRegionServer != null) {
+        regionServer.metricsRegionServer.updateBulkLoad(
+            EnvironmentEdgeManager.currentTime() - start);
+      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3c3f101/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
index 74ecbc8..3ecd970 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
@@ -60,6 +60,7 @@ import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorService;
 import org.apache.hadoop.hbase.coprocessor.EndpointObserver;
+import org.apache.hadoop.hbase.coprocessor.MetricsCoprocessor;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.coprocessor.RegionObserver;
@@ -70,6 +71,8 @@ import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.io.Reference;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
+import org.apache.hadoop.hbase.ipc.RpcServer;
+import org.apache.hadoop.hbase.metrics.MetricRegistry;
 import org.apache.hadoop.hbase.regionserver.Region.Operation;
 import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
 import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker;
@@ -85,6 +88,11 @@ import com.google.common.collect.Lists;
 import com.google.protobuf.Message;
 import com.google.protobuf.Service;
 
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
+import com.google.protobuf.Message;
+import com.google.protobuf.Service;
+
 /**
  * Implements the coprocessor environment and runtime support for coprocessors
  * loaded within a {@link Region}.
@@ -103,7 +111,7 @@ public class RegionCoprocessorHost
   private final boolean hasCustomPostScannerFilterRow;
 
   /**
-   * 
+   *
    * Encapsulation of the environment of each coprocessor
    */
   static class RegionEnvironment extends CoprocessorHost.Environment
@@ -114,6 +122,7 @@ public class RegionCoprocessorHost
     ConcurrentMap<String, Object> sharedData;
     private final boolean useLegacyPre;
     private final boolean useLegacyPost;
+    private final MetricRegistry metricRegistry;
 
     /**
      * Constructor
@@ -135,6 +144,8 @@ public class RegionCoprocessorHost
           HRegionInfo.class, WALKey.class, WALEdit.class);
       useLegacyPost = useLegacyMethod(impl.getClass(), "postWALRestore", ObserverContext.class,
           HRegionInfo.class, WALKey.class, WALEdit.class);
+      this.metricRegistry =
+          MetricsCoprocessor.createRegistryForRegionCoprocessor(impl.getClass().getName());
     }
 
     /** @return the region */
@@ -151,6 +162,7 @@ public class RegionCoprocessorHost
 
     public void shutdown() {
       super.shutdown();
+      MetricsCoprocessor.removeRegistry(this.metricRegistry);
     }
 
     @Override
@@ -163,6 +175,10 @@ public class RegionCoprocessorHost
       return region.getRegionInfo();
     }
 
+    @Override
+    public MetricRegistry getMetricRegistryForRegionServer() {
+      return metricRegistry;
+    }
   }
 
   static class TableCoprocessorAttribute {
@@ -358,7 +374,7 @@ public class RegionCoprocessorHost
     // scan the table attributes for coprocessor load specifications
     // initialize the coprocessors
     List<RegionEnvironment> configured = new ArrayList<RegionEnvironment>();
-    for (TableCoprocessorAttribute attr: getTableCoprocessorAttrsFromSchema(conf, 
+    for (TableCoprocessorAttribute attr: getTableCoprocessorAttrsFromSchema(conf,
         region.getTableDesc())) {
       // Load encompasses classloading and coprocessor initialization
       try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3c3f101/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java
index bb27dd2..1b64ab8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java
@@ -25,22 +25,27 @@ import java.util.List;
 import org.apache.commons.lang.ClassUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.CellScanner;
 import org.apache.hadoop.hbase.Coprocessor;
 import org.apache.hadoop.hbase.CoprocessorEnvironment;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
 import org.apache.hadoop.hbase.MetaMutationAnnotation;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
+import org.apache.hadoop.hbase.coprocessor.MetricsCoprocessor;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
 import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
 import org.apache.hadoop.hbase.coprocessor.RegionServerObserver;
 import org.apache.hadoop.hbase.coprocessor.SingletonCoprocessorService;
-import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry;
+
+import org.apache.hadoop.hbase.ipc.RpcServer;
+import org.apache.hadoop.hbase.metrics.MetricRegistry;
 import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
+import org.apache.hadoop.hbase.security.User;
+import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry;
 
 @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
 @InterfaceStability.Evolving
@@ -324,8 +329,8 @@ public class RegionServerCoprocessorHost extends
    */
   static class RegionServerEnvironment extends CoprocessorHost.Environment
       implements RegionServerCoprocessorEnvironment {
-
-    private RegionServerServices regionServerServices;
+    private final RegionServerServices regionServerServices;
+    private final MetricRegistry metricRegistry;
 
     @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="BC_UNCONFIRMED_CAST",
         justification="Intentional; FB has trouble detecting isAssignableFrom")
@@ -342,12 +347,25 @@ public class RegionServerCoprocessorHost extends
           break;
         }
       }
+      this.metricRegistry =
+          MetricsCoprocessor.createRegistryForRSCoprocessor(implClass.getName());
     }
 
     @Override
     public RegionServerServices getRegionServerServices() {
       return regionServerServices;
     }
+
+    @Override
+    public MetricRegistry getMetricRegistryForRegionServer() {
+      return metricRegistry;
+    }
+
+    @Override
+    protected void shutdown() {
+      super.shutdown();
+      MetricsCoprocessor.removeRegistry(metricRegistry);
+    }
   }
 
   /**
@@ -356,6 +374,7 @@ public class RegionServerCoprocessorHost extends
    */
   static class EnvironmentPriorityComparator implements
       Comparator<CoprocessorEnvironment> {
+    @Override
     public int compare(final CoprocessorEnvironment env1,
         final CoprocessorEnvironment env2) {
       if (env1.getPriority() < env2.getPriority()) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3c3f101/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java
index 87019e8..c726001 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java
@@ -29,6 +29,12 @@ import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.coprocessor.*;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
+import org.apache.hadoop.hbase.coprocessor.MetricsCoprocessor;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
+import org.apache.hadoop.hbase.coprocessor.WALCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.WALObserver;
+import org.apache.hadoop.hbase.metrics.MetricRegistry;
 import org.apache.hadoop.hbase.wal.WAL;
 import org.apache.hadoop.hbase.wal.WALKey;
 
@@ -50,6 +56,7 @@ public class WALCoprocessorHost
 
     final boolean useLegacyPre;
     final boolean useLegacyPost;
+    private final MetricRegistry metricRegistry;
 
     @Override
     public WAL getWAL() {
@@ -78,6 +85,18 @@ public class WALCoprocessorHost
           HRegionInfo.class, WALKey.class, WALEdit.class);
       useLegacyPost = useLegacyMethod(impl.getClass(), "postWALWrite", ObserverContext.class,
           HRegionInfo.class, WALKey.class, WALEdit.class);
+      this.metricRegistry = MetricsCoprocessor.createRegistryForWALCoprocessor(implClass.getName());
+    }
+
+    @Override
+    public MetricRegistry getMetricRegistryForRegionServer() {
+      return metricRegistry;
+    }
+
+    @Override
+    protected void shutdown() {
+      super.shutdown();
+      MetricsCoprocessor.removeRegistry(this.metricRegistry);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3c3f101/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
index a647d03..e808570 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
 import org.apache.hadoop.hbase.metrics.BaseSource;
+import org.apache.hadoop.hbase.metrics.MetricRegistryInfo;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 
 /**
@@ -338,4 +339,10 @@ public class MetricsSource implements BaseSource {
   public String getMetricsName() {
     return globalSourceSource.getMetricsName();
   }
+
+  @Override
+  public MetricRegistryInfo getMetricRegistryInfo() {
+    return new MetricRegistryInfo(getMetricsName(), getMetricsDescription(),
+      getMetricsContext(), getMetricsJmxContext(), true);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3c3f101/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
index 706d8e7..af89d3c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
@@ -660,6 +660,16 @@ public class MiniHBaseCluster extends HBaseCluster {
     return hbaseCluster.getRegionServer(serverNumber);
   }
 
+  public HRegionServer getRegionServer(ServerName serverName) {
+    for (RegionServerThread t : hbaseCluster.getRegionServers()) {
+      HRegionServer r = t.getRegionServer();
+      if (r.getServerName().equals(serverName)) {
+        return r;
+      }
+    }
+    return null;
+  }
+
   public List<HRegion> getRegions(byte[] tableName) {
     return getRegions(TableName.valueOf(tableName));
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3c3f101/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorMetrics.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorMetrics.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorMetrics.java
new file mode 100644
index 0000000..6c4562e
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorMetrics.java
@@ -0,0 +1,549 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.coprocessor;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CoprocessorEnvironment;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HRegionLocation;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.Waiter.Predicate;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.RegionLocator;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
+import org.apache.hadoop.hbase.metrics.Counter;
+import org.apache.hadoop.hbase.metrics.Metric;
+import org.apache.hadoop.hbase.metrics.MetricRegistries;
+import org.apache.hadoop.hbase.metrics.MetricRegistry;
+import org.apache.hadoop.hbase.metrics.MetricRegistryInfo;
+import org.apache.hadoop.hbase.metrics.Timer;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
+import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService;
+import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest;
+import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse;
+import org.apache.hadoop.hbase.regionserver.HRegionServer;
+import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
+import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import com.google.common.base.Optional;
+import com.google.common.collect.Lists;
+import com.google.protobuf.RpcCallback;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.ServiceException;
+
+/**
+ * Testing of coprocessor metrics end-to-end.
+ */
+@Category({CoprocessorTests.class, MediumTests.class})
+public class TestCoprocessorMetrics {
+
+  private static final Log LOG = LogFactory.getLog(TestCoprocessorMetrics.class);
+  private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
+
+  private static final byte[] foo = Bytes.toBytes("foo");
+  private static final byte[] bar = Bytes.toBytes("bar");
+  /**
+   * MasterObserver that has a Timer metric for create table operation.
+   */
+  public static class CustomMasterObserver extends BaseMasterObserver {
+    private Timer createTableTimer;
+    private long start = Long.MIN_VALUE;
+
+    @Override
+    public void preCreateTable(ObserverContext<MasterCoprocessorEnvironment> ctx,
+                               HTableDescriptor desc, HRegionInfo[] regions) throws IOException
{
+      super.preCreateTable(ctx, desc, regions);
+
+      // we rely on the fact that there is only 1 instance of our MasterObserver
+      this.start = System.currentTimeMillis();
+    }
+
+    @Override
+    public void postCreateTable(ObserverContext<MasterCoprocessorEnvironment> ctx,
+                                HTableDescriptor desc, HRegionInfo[] regions) throws IOException
{
+      super.postCreateTable(ctx, desc, regions);
+      if (this.start > 0) {
+        long time = System.currentTimeMillis() - start;
+        LOG.info("Create table took: " + time);
+        createTableTimer.updateMillis(time);
+      }
+    }
+
+    @Override
+    public void start(CoprocessorEnvironment env) throws IOException {
+      super.start(env);
+      if (env instanceof MasterCoprocessorEnvironment) {
+        MetricRegistry registry =
+            ((MasterCoprocessorEnvironment) env).getMetricRegistryForMaster();
+
+        createTableTimer  = registry.timer("CreateTable");
+      }
+    }
+  }
+
+  /**
+   * RegionServerObserver that has a Counter for rollWAL requests.
+   */
+  public static class CustomRegionServerObserver extends BaseRegionServerObserver {
+    /** This is the Counter metric object to keep track of the current count across invocations
*/
+    private Counter rollWALCounter;
+    @Override
+    public void postRollWALWriterRequest(ObserverContext<RegionServerCoprocessorEnvironment>
ctx)
+        throws IOException {
+      // Increment the Counter whenever the coprocessor is called
+      rollWALCounter.increment();
+      super.postRollWALWriterRequest(ctx);
+    }
+
+    @Override
+    public void start(CoprocessorEnvironment env) throws IOException {
+      super.start(env);
+      if (env instanceof RegionServerCoprocessorEnvironment) {
+        MetricRegistry registry =
+            ((RegionServerCoprocessorEnvironment) env).getMetricRegistryForRegionServer();
+
+        if (rollWALCounter == null) {
+          rollWALCounter = registry.counter("rollWALRequests");
+        }
+      }
+    }
+  }
+
+  /**
+   * WALObserver that has a Counter for walEdits written.
+   */
+  public static class CustomWALObserver extends BaseWALObserver {
+    private Counter walEditsCount;
+
+    @Override
+    public void postWALWrite(ObserverContext<? extends WALCoprocessorEnvironment> ctx,
+                             HRegionInfo info, org.apache.hadoop.hbase.wal.WALKey logKey,
+                             WALEdit logEdit) throws IOException {
+      super.postWALWrite(ctx, info, logKey, logEdit);
+      walEditsCount.increment();
+    }
+
+    @Override
+    public void start(CoprocessorEnvironment env) throws IOException {
+      super.start(env);
+      if (env instanceof WALCoprocessorEnvironment) {
+        MetricRegistry registry =
+            ((WALCoprocessorEnvironment) env).getMetricRegistryForRegionServer();
+
+        if (walEditsCount == null) {
+          walEditsCount = registry.counter("walEditsCount");
+        }
+      }
+    }
+  }
+
+  /**
+   * RegionObserver that has a Counter for preGet()
+   */
+  public static class CustomRegionObserver extends BaseRegionObserver {
+    private Counter preGetCounter;
+
+    @Override
+    public void preGetOp(ObserverContext<RegionCoprocessorEnvironment> e, Get get,
+                         List<Cell> results) throws IOException {
+      super.preGetOp(e, get, results);
+      preGetCounter.increment();
+    }
+
+    @Override
+    public void start(CoprocessorEnvironment env) throws IOException {
+      super.start(env);
+
+      if (env instanceof RegionCoprocessorEnvironment) {
+        MetricRegistry registry =
+            ((RegionCoprocessorEnvironment) env).getMetricRegistryForRegionServer();
+
+        if (preGetCounter == null) {
+          preGetCounter = registry.counter("preGetRequests");
+        }
+      }
+    }
+  }
+
+  public static class CustomRegionObserver2 extends CustomRegionObserver {
+  }
+
+  /**
+   * RegionEndpoint to test metrics from endpoint calls
+   */
+  public static class CustomRegionEndpoint extends MultiRowMutationEndpoint {
+
+    private Timer endpointExecution;
+
+    @Override
+    public void mutateRows(RpcController controller, MutateRowsRequest request,
+                           RpcCallback<MutateRowsResponse> done) {
+      long start = System.nanoTime();
+      super.mutateRows(controller, request, done);
+      endpointExecution.updateNanos(System.nanoTime() - start);
+    }
+
+    @Override
+    public void start(CoprocessorEnvironment env) throws IOException {
+      super.start(env);
+
+      if (env instanceof RegionCoprocessorEnvironment) {
+        MetricRegistry registry =
+            ((RegionCoprocessorEnvironment) env).getMetricRegistryForRegionServer();
+
+        if (endpointExecution == null) {
+          endpointExecution = registry.timer("EndpointExecution");
+        }
+      }
+    }
+  }
+
+  @BeforeClass
+  public static void setupBeforeClass() throws Exception {
+    Configuration conf = UTIL.getConfiguration();
+    // inject master, regionserver and WAL coprocessors
+    conf.set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY,
+        CustomMasterObserver.class.getName());
+    conf.set(CoprocessorHost.REGIONSERVER_COPROCESSOR_CONF_KEY,
+        CustomRegionServerObserver.class.getName());
+    conf.set(CoprocessorHost.WAL_COPROCESSOR_CONF_KEY,
+        CustomWALObserver.class.getName());
+    conf.setBoolean(CoprocessorHost.ABORT_ON_ERROR_KEY, true);
+    UTIL.startMiniCluster();
+  }
+
+  @AfterClass
+  public static void teardownAfterClass() throws Exception {
+    UTIL.shutdownMiniCluster();
+  }
+
+  @Before
+  public void setup() throws IOException {
+    try (Connection connection = ConnectionFactory.createConnection(UTIL.getConfiguration());
+         Admin admin = connection.getAdmin()) {
+      for (HTableDescriptor htd : admin.listTables()) {
+        UTIL.deleteTable(htd.getTableName());
+      }
+    }
+  }
+
+  @Test
+  public void testMasterObserver() throws IOException {
+    // Find out the MetricRegistry used by the CP using the global registries
+    MetricRegistryInfo info = MetricsCoprocessor.createRegistryInfoForMasterCoprocessor(
+        CustomMasterObserver.class.getName());
+    Optional<MetricRegistry> registry =  MetricRegistries.global().get(info);
+    assertTrue(registry.isPresent());
+
+    Optional<Metric> metric = registry.get().get("CreateTable");
+    assertTrue(metric.isPresent());
+
+    try (Connection connection = ConnectionFactory.createConnection(UTIL.getConfiguration());
+         Admin admin = connection.getAdmin()) {
+
+      Timer createTableTimer = (Timer)metric.get();
+      long prevCount = createTableTimer.getHistogram().getCount();
+      LOG.info("Creating table");
+      admin.createTable(
+          new HTableDescriptor("testMasterObserver")
+              .addFamily(new HColumnDescriptor("foo")));
+
+      assertEquals(1, createTableTimer.getHistogram().getCount() - prevCount);
+    }
+  }
+
+  @Test
+  public void testRegionServerObserver() throws IOException {
+    try (Connection connection = ConnectionFactory.createConnection(UTIL.getConfiguration());
+         Admin admin = connection.getAdmin()) {
+      LOG.info("Rolling WALs");
+      admin.rollWALWriter(UTIL.getMiniHBaseCluster().getServerHoldingMeta());
+    }
+
+    // Find out the MetricRegistry used by the CP using the global registries
+    MetricRegistryInfo info = MetricsCoprocessor.createRegistryInfoForRSCoprocessor(
+        CustomRegionServerObserver.class.getName());
+
+    Optional<MetricRegistry> registry =  MetricRegistries.global().get(info);
+    assertTrue(registry.isPresent());
+
+    Optional<Metric> metric = registry.get().get("rollWALRequests");
+    assertTrue(metric.isPresent());
+
+    Counter rollWalRequests = (Counter)metric.get();
+    assertEquals(1, rollWalRequests.getCount());
+  }
+
+  @Test
+  public void testWALObserver() throws IOException {
+    // Find out the MetricRegistry used by the CP using the global registries
+    MetricRegistryInfo info = MetricsCoprocessor.createRegistryInfoForWALCoprocessor(
+        CustomWALObserver.class.getName());
+
+    Optional<MetricRegistry> registry =  MetricRegistries.global().get(info);
+    assertTrue(registry.isPresent());
+
+    Optional<Metric> metric = registry.get().get("walEditsCount");
+    assertTrue(metric.isPresent());
+
+    try (Connection connection = ConnectionFactory.createConnection(UTIL.getConfiguration());
+         Admin admin = connection.getAdmin()) {
+      admin.createTable(
+          new HTableDescriptor("testWALObserver")
+              .addFamily(new HColumnDescriptor("foo")));
+
+      Counter rollWalRequests = (Counter)metric.get();
+      long prevCount = rollWalRequests.getCount();
+      assertTrue(prevCount > 0);
+
+      try (Table table = connection.getTable(TableName.valueOf("testWALObserver"))) {
+        table.put(new Put(foo).addColumn(foo, foo, foo));
+      }
+
+      assertEquals(1, rollWalRequests.getCount() - prevCount);
+    }
+  }
+
+  /**
+   * Helper for below tests
+   */
+  private void assertPreGetRequestsCounter(Class<?> coprocClass) {
+    // Find out the MetricRegistry used by the CP using the global registries
+    MetricRegistryInfo info = MetricsCoprocessor.createRegistryInfoForRegionCoprocessor(
+        coprocClass.getName());
+
+    Optional<MetricRegistry> registry =  MetricRegistries.global().get(info);
+    assertTrue(registry.isPresent());
+
+    Optional<Metric> metric = registry.get().get("preGetRequests");
+    assertTrue(metric.isPresent());
+
+    Counter preGetRequests = (Counter)metric.get();
+    assertEquals(2, preGetRequests.getCount());
+  }
+
+  @Test
+  public void testRegionObserverSingleRegion() throws IOException {
+    TableName tableName = TableName.valueOf("testRegionObserverSingleRegion");
+    try (Connection connection = ConnectionFactory.createConnection(UTIL.getConfiguration());
+         Admin admin = connection.getAdmin()) {
+      admin.createTable(
+          new HTableDescriptor(tableName)
+              .addFamily(new HColumnDescriptor(foo))
+              // add the coprocessor for the region
+              .addCoprocessor(CustomRegionObserver.class.getName()));
+      try (Table table = connection.getTable(tableName)) {
+        table.get(new Get(foo));
+        table.get(new Get(foo)); // 2 gets
+      }
+    }
+
+    assertPreGetRequestsCounter(CustomRegionObserver.class);
+  }
+
+  @Test
+  public void testRegionObserverMultiRegion() throws IOException {
+    TableName tableName = TableName.valueOf("testRegionObserverMultiRegion");
+    try (Connection connection = ConnectionFactory.createConnection(UTIL.getConfiguration());
+         Admin admin = connection.getAdmin()) {
+      admin.createTable(
+          new HTableDescriptor(tableName)
+              .addFamily(new HColumnDescriptor(foo))
+              // add the coprocessor for the region
+              .addCoprocessor(CustomRegionObserver.class.getName())
+          , new byte[][]{foo}); // create with 2 regions
+      try (Table table = connection.getTable(tableName);
+           RegionLocator locator = connection.getRegionLocator(tableName)) {
+        table.get(new Get(bar));
+        table.get(new Get(foo)); // 2 gets to 2 separate regions
+        assertEquals(2, locator.getAllRegionLocations().size());
+        assertNotEquals(locator.getRegionLocation(bar).getRegionInfo(),
+            locator.getRegionLocation(foo).getRegionInfo());
+      }
+    }
+
+    assertPreGetRequestsCounter(CustomRegionObserver.class);
+  }
+
+  @Test
+  public void testRegionObserverMultiTable() throws IOException {
+    TableName tableName1 = TableName.valueOf("testRegionObserverMultiTable1");
+    TableName tableName2 = TableName.valueOf("testRegionObserverMultiTable2");
+    try (Connection connection = ConnectionFactory.createConnection(UTIL.getConfiguration());
+         Admin admin = connection.getAdmin()) {
+      admin.createTable(
+          new HTableDescriptor(tableName1)
+              .addFamily(new HColumnDescriptor(foo))
+              // add the coprocessor for the region
+              .addCoprocessor(CustomRegionObserver.class.getName()));
+      admin.createTable(
+          new HTableDescriptor(tableName2)
+              .addFamily(new HColumnDescriptor(foo))
+              // add the coprocessor for the region
+              .addCoprocessor(CustomRegionObserver.class.getName()));
+      try (Table table1 = connection.getTable(tableName1);
+           Table table2 = connection.getTable(tableName2);) {
+        table1.get(new Get(bar));
+        table2.get(new Get(foo)); // 2 gets to 2 separate tables
+      }
+    }
+    assertPreGetRequestsCounter(CustomRegionObserver.class);
+  }
+
+  @Test
+  public void testRegionObserverMultiCoprocessor() throws IOException {
+    TableName tableName = TableName.valueOf("testRegionObserverMultiCoprocessor");
+    try (Connection connection = ConnectionFactory.createConnection(UTIL.getConfiguration());
+         Admin admin = connection.getAdmin()) {
+      admin.createTable(
+          new HTableDescriptor(tableName)
+              .addFamily(new HColumnDescriptor(foo))
+              // add the coprocessor for the region. We add two different coprocessors
+              .addCoprocessor(CustomRegionObserver.class.getName())
+              .addCoprocessor(CustomRegionObserver2.class.getName()));
+      try (Table table = connection.getTable(tableName)) {
+        table.get(new Get(foo));
+        table.get(new Get(foo)); // 2 gets
+      }
+    }
+
+    // we will have two counters coming from two coprocs, in two different MetricRegistries
+    assertPreGetRequestsCounter(CustomRegionObserver.class);
+    assertPreGetRequestsCounter(CustomRegionObserver2.class);
+  }
+
+  @Test
+  public void testRegionObserverAfterRegionClosed() throws IOException {
+    TableName tableName = TableName.valueOf("testRegionObserverAfterRegionClosed");
+    try (Connection connection = ConnectionFactory.createConnection(UTIL.getConfiguration());
+         Admin admin = connection.getAdmin()) {
+      admin.createTable(
+          new HTableDescriptor(tableName)
+              .addFamily(new HColumnDescriptor(foo))
+              // add the coprocessor for the region
+              .addCoprocessor(CustomRegionObserver.class.getName())
+          , new byte[][]{foo}); // create with 2 regions
+      try (Table table = connection.getTable(tableName)) {
+        table.get(new Get(foo));
+        table.get(new Get(foo)); // 2 gets
+      }
+
+      assertPreGetRequestsCounter(CustomRegionObserver.class);
+
+      // close one of the regions
+      try (RegionLocator locator = connection.getRegionLocator(tableName)) {
+        final HRegionLocation loc = locator.getRegionLocation(foo);
+        admin.closeRegion(loc.getServerName(), loc.getRegionInfo());
+
+        final HRegionServer server = UTIL.getMiniHBaseCluster().getRegionServer(loc.getServerName());
+        UTIL.waitFor(30000,new Predicate<IOException>() {
+          @Override
+          public boolean evaluate() throws IOException {
+            return server.getOnlineRegion(loc.getRegionInfo().getRegionName()) == null;
+          }
+        });
+        assertNull(server.getOnlineRegion(loc.getRegionInfo().getRegionName()));
+      }
+
+      // with only 1 region remaining, we should still be able to find the Counter
+      assertPreGetRequestsCounter(CustomRegionObserver.class);
+
+      // close the table
+      admin.disableTable(tableName);
+
+      MetricRegistryInfo info = MetricsCoprocessor.createRegistryInfoForRegionCoprocessor(
+          CustomRegionObserver.class.getName());
+
+      // ensure that MetricRegistry is deleted
+      Optional<MetricRegistry> registry =  MetricRegistries.global().get(info);
+      assertFalse(registry.isPresent());
+    }
+  }
+
+  @Test
+  public void testRegionObserverEndpoint() throws IOException, ServiceException {
+    TableName tableName = TableName.valueOf("testRegionObserverEndpoint");
+    try (Connection connection = ConnectionFactory.createConnection(UTIL.getConfiguration());
+         Admin admin = connection.getAdmin()) {
+      admin.createTable(
+          new HTableDescriptor(tableName)
+              .addFamily(new HColumnDescriptor(foo))
+              // add the coprocessor for the region
+              .addCoprocessor(CustomRegionEndpoint.class.getName()));
+
+      try (Table table = connection.getTable(tableName)) {
+        List<Put> mutations = Lists.newArrayList(new Put(foo), new Put(bar));
+        MutateRowsRequest.Builder mrmBuilder = MutateRowsRequest.newBuilder();
+
+        for (Mutation mutation : mutations) {
+          mrmBuilder.addMutationRequest(ProtobufUtil.toMutation(
+              ClientProtos.MutationProto.MutationType.PUT, mutation));
+        }
+
+        CoprocessorRpcChannel channel = table.coprocessorService(bar);
+        MultiRowMutationService.BlockingInterface service =
+            MultiRowMutationService.newBlockingStub(channel);
+        MutateRowsRequest mrm = mrmBuilder.build();
+        service.mutateRows(null, mrm);
+      }
+    }
+
+    // Find out the MetricRegistry used by the CP using the global registries
+    MetricRegistryInfo info = MetricsCoprocessor.createRegistryInfoForRegionCoprocessor(
+        CustomRegionEndpoint.class.getName());
+
+    Optional<MetricRegistry> registry =  MetricRegistries.global().get(info);
+    assertTrue(registry.isPresent());
+
+    Optional<Metric> metric = registry.get().get("EndpointExecution");
+    assertTrue(metric.isPresent());
+
+    Timer endpointExecutions = (Timer)metric.get();
+    assertEquals(1, endpointExecutions.getHistogram().getCount());
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3c3f101/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
index 50bc589..6a659cb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
@@ -58,6 +58,7 @@ import org.apache.hadoop.hbase.ipc.RpcServer;
 import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
 import org.apache.hadoop.hbase.ipc.RpcServerInterface;
 import org.apache.hadoop.hbase.ipc.ServerRpcController;
+import org.apache.hadoop.hbase.metrics.MetricRegistry;
 import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.RegionServerServices;
@@ -217,6 +218,11 @@ public class TestTokenAuthentication {
         public ConcurrentMap<String, Object> getSharedData() { return null; }
 
         @Override
+        public MetricRegistry getMetricRegistryForRegionServer() {
+          return null;
+        }
+
+        @Override
         public int getVersion() { return 0; }
 
         @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/a3c3f101/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index a1639fb..1e45249 100644
--- a/pom.xml
+++ b/pom.xml
@@ -81,6 +81,8 @@
     <module>hbase-external-blockcache</module>
     <module>hbase-shaded</module>
     <module>hbase-archetypes</module>
+    <module>hbase-metrics-api</module>
+    <module>hbase-metrics</module>
   </modules>
   <!--Add apache snapshots in case we want to use unreleased versions of plugins:
       e.g. surefire 2.18-SNAPSHOT-->
@@ -1430,6 +1432,30 @@
         <groupId>org.apache.hbase</groupId>
         <version>${project.version}</version>
       </dependency>
+      <dependency>
+        <artifactId>hbase-metrics-api</artifactId>
+        <groupId>org.apache.hbase</groupId>
+        <version>${project.version}</version>
+      </dependency>
+      <dependency>
+        <artifactId>hbase-metrics-api</artifactId>
+        <groupId>org.apache.hbase</groupId>
+        <version>${project.version}</version>
+        <type>test-jar</type>
+        <scope>test</scope>
+      </dependency>
+      <dependency>
+        <artifactId>hbase-metrics</artifactId>
+        <groupId>org.apache.hbase</groupId>
+        <version>${project.version}</version>
+      </dependency>
+      <dependency>
+        <artifactId>hbase-metrics</artifactId>
+        <groupId>org.apache.hbase</groupId>
+        <version>${project.version}</version>
+        <type>test-jar</type>
+        <scope>test</scope>
+      </dependency>
       <!-- General dependencies -->
       <dependency>
         <groupId>com.github.stephenc.findbugs</groupId>
@@ -1460,6 +1486,11 @@
         <version>${metrics-core.version}</version>
       </dependency>
       <dependency>
+        <groupId>io.dropwizard.metrics</groupId>
+        <artifactId>metrics-core</artifactId>
+        <version>3.1.2</version>
+      </dependency>
+      <dependency>
         <groupId>com.google.guava</groupId>
         <artifactId>guava</artifactId>
         <version>${guava.version}</version>


Mime
View raw message