ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From d...@apache.org
Subject ambari git commit: AMBARI-14051 Remove httpclient library dep from AbastractTimelineMetricsSink to respect timeout settings (dsen)
Date Tue, 24 Nov 2015 21:40:26 GMT
Repository: ambari
Updated Branches:
  refs/heads/branch-2.1 043bbf9bf -> a8e46494c


AMBARI-14051 Remove httpclient library dep from AbastractTimelineMetricsSink to respect timeout
settings (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a8e46494
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a8e46494
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a8e46494

Branch: refs/heads/branch-2.1
Commit: a8e46494ce935f042c4f9dd3bc7db7557ad9ac62
Parents: 043bbf9
Author: Dmytro Sen <dsen@apache.org>
Authored: Tue Nov 24 23:31:45 2015 +0200
Committer: Dmytro Sen <dsen@apache.org>
Committed: Tue Nov 24 23:38:44 2015 +0200

----------------------------------------------------------------------
 ambari-metrics/ambari-metrics-common/pom.xml    | 20 +++++---
 .../timeline/AbstractTimelineMetricsSink.java   | 38 +++++++++-------
 .../cache/HandleConnectExceptionTest.java       | 48 ++++++++++++--------
 .../flume/FlumeTimelineMetricsSinkTest.java     |  5 +-
 .../ambari-metrics-hadoop-sink/pom.xml          | 16 ++++---
 .../timeline/HadoopTimelineMetricsSink.java     |  4 --
 .../timeline/HadoopTimelineMetricsSinkTest.java | 19 ++++----
 .../ambari-metrics-kafka-sink/pom.xml           |  2 +-
 .../kafka/KafkaTimelineMetricsReporter.java     |  6 ---
 .../kafka/KafkaTimelineMetricsReporterTest.java | 14 +++---
 .../ambari-metrics-storm-sink/pom.xml           |  5 --
 .../sink/storm/StormTimelineMetricsSink.java    |  3 --
 .../storm/StormTimelineMetricsSinkTest.java     | 15 ++----
 13 files changed, 95 insertions(+), 100 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e46494/ambari-metrics/ambari-metrics-common/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/pom.xml b/ambari-metrics/ambari-metrics-common/pom.xml
index 31c6b2e..1f8702b 100644
--- a/ambari-metrics/ambari-metrics-common/pom.xml
+++ b/ambari-metrics/ambari-metrics-common/pom.xml
@@ -63,11 +63,6 @@
       <version>1.1.1</version>
     </dependency>
     <dependency>
-      <groupId>commons-httpclient</groupId>
-      <artifactId>commons-httpclient</artifactId>
-      <version>3.1</version>
-    </dependency>
-    <dependency>
       <groupId>org.codehaus.jackson</groupId>
       <artifactId>jackson-xc</artifactId>
       <version>1.9.13</version>
@@ -89,8 +84,19 @@
       <version>4.10</version>
     </dependency>
     <dependency>
-      <groupId>org.mockito</groupId>
-      <artifactId>mockito-all</artifactId>
+      <groupId>org.easymock</groupId>
+      <artifactId>easymock</artifactId>
+      <version>3.2</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.powermock</groupId>
+      <artifactId>powermock-api-easymock</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.powermock</groupId>
+      <artifactId>powermock-module-junit4</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e46494/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
index 78d9210..6d7c55f 100644
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
+++ b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
@@ -17,9 +17,6 @@
  */
 package org.apache.hadoop.metrics2.sink.timeline;
 
-import org.apache.commons.httpclient.HttpClient;
-import org.apache.commons.httpclient.methods.PostMethod;
-import org.apache.commons.httpclient.methods.StringRequestEntity;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.codehaus.jackson.map.AnnotationIntrospector;
@@ -28,7 +25,9 @@ import org.codehaus.jackson.map.annotate.JsonSerialize;
 import org.codehaus.jackson.xc.JaxbAnnotationIntrospector;
 
 import java.io.IOException;
-import java.net.ConnectException;
+import java.io.OutputStream;
+import java.net.HttpURLConnection;
+import java.net.URL;
 
 public abstract class AbstractTimelineMetricsSink {
   public static final String TAGS_FOR_PREFIX_PROPERTY_PREFIX = "tagsForPrefix.";
@@ -40,7 +39,6 @@ public abstract class AbstractTimelineMetricsSink {
   public static final int DEFAULT_POST_TIMEOUT_SECONDS = 10;
 
   protected final Log LOG;
-  private HttpClient httpClient = new HttpClient();
 
   protected static ObjectMapper mapper;
 
@@ -54,20 +52,30 @@ public abstract class AbstractTimelineMetricsSink {
 
   public AbstractTimelineMetricsSink() {
     LOG = LogFactory.getLog(this.getClass());
-    httpClient.getParams().setSoTimeout(getTimeoutSeconds() * 1000);
-    httpClient.getParams().setConnectionManagerTimeout(getTimeoutSeconds() * 1000);
   }
 
-  protected void emitMetrics(TimelineMetrics metrics) throws IOException {
+  protected void emitMetrics(TimelineMetrics metrics) {
     String connectUrl = getCollectorUri();
+    int timeout = getTimeoutSeconds() * 1000;
     try {
       String jsonData = mapper.writeValueAsString(metrics);
 
-      StringRequestEntity requestEntity = new StringRequestEntity(jsonData, "application/json",
"UTF-8");
+      HttpURLConnection connection =
+        (HttpURLConnection) new URL(connectUrl).openConnection();
 
-      PostMethod postMethod = new PostMethod(connectUrl);
-      postMethod.setRequestEntity(requestEntity);
-      int statusCode = httpClient.executeMethod(postMethod);
+      connection.setRequestMethod("POST");
+      connection.setRequestProperty("Content-Type", "application/json");
+      connection.setConnectTimeout(timeout);
+      connection.setReadTimeout(timeout);
+      connection.setDoOutput(true);
+
+      if (jsonData != null) {
+        try (OutputStream os = connection.getOutputStream()) {
+          os.write(jsonData.getBytes("UTF-8"));
+        }
+      }
+
+      int statusCode = connection.getResponseCode();
 
       if (statusCode != 200) {
         LOG.info("Unable to POST metrics to collector, " + connectUrl + ", " +
@@ -75,15 +83,11 @@ public abstract class AbstractTimelineMetricsSink {
       } else {
         LOG.debug("Metrics posted to Collector " + connectUrl);
       }
-    } catch (ConnectException e) {
+    } catch (IOException e) {
       throw new UnableToConnectException(e).setConnectUrl(connectUrl);
     }
   }
 
-  public void setHttpClient(HttpClient httpClient) {
-    this.httpClient = httpClient;
-  }
-
   abstract protected String getCollectorUri();
 
   abstract protected int getTimeoutSeconds();

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e46494/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/cache/HandleConnectExceptionTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/cache/HandleConnectExceptionTest.java
b/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/cache/HandleConnectExceptionTest.java
index 4f9b93e..4c1a2cb 100644
--- a/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/cache/HandleConnectExceptionTest.java
+++ b/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/cache/HandleConnectExceptionTest.java
@@ -18,12 +18,10 @@
 package org.apache.hadoop.metrics2.sink.timeline.cache;
 
 import java.io.IOException;
-import java.net.ConnectException;
-import java.net.InetSocketAddress;
-import java.net.SocketAddress;
+import java.io.OutputStream;
+import java.net.HttpURLConnection;
+import java.net.URL;
 
-import org.apache.commons.httpclient.HttpClient;
-import org.apache.commons.httpclient.HttpMethod;
 import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.metrics2.sink.timeline.UnableToConnectException;
@@ -31,27 +29,40 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
 
-@RunWith(MockitoJUnitRunner.class)
+import static org.easymock.EasyMock.expect;
+import static org.powermock.api.easymock.PowerMock.createNiceMock;
+import static org.powermock.api.easymock.PowerMock.expectNew;
+import static org.powermock.api.easymock.PowerMock.replayAll;
+
+@RunWith(PowerMockRunner.class)
+@PrepareForTest({AbstractTimelineMetricsSink.class, URL.class,
+  HttpURLConnection.class})
 public class HandleConnectExceptionTest {
   private static final String COLLECTOR_URL = "collector";
-  @Mock private HttpClient client;
   private TestTimelineMetricsSink sink;
   
-  @Before public void init(){
+  @Before
+  public void init(){
     sink = new TestTimelineMetricsSink();
-    sink.setHttpClient(client);
-    
+    OutputStream os = createNiceMock(OutputStream.class);
+    HttpURLConnection connection = createNiceMock(HttpURLConnection.class);
+    URL url = createNiceMock(URL.class);
+
     try {
-      Mockito.when(client.executeMethod(Mockito.<HttpMethod>any())).thenThrow(new ConnectException());
-    } catch (IOException e) {
+      expectNew(URL.class, "collector").andReturn(url);
+      expect(url.openConnection()).andReturn(connection).once();
+      expect(connection.getOutputStream()).andReturn(os).once();
+      expect(connection.getResponseCode()).andThrow(new IOException());
+
+      replayAll();
+    } catch (Exception e) {
       //no-op
     }
-  } 
-  
+  }
+
   @Test
   public void handleTest(){
     try{
@@ -63,6 +74,7 @@ public class HandleConnectExceptionTest {
       Assert.fail(e.getMessage());
     }
   }
+
   class TestTimelineMetricsSink extends AbstractTimelineMetricsSink{
     @Override
     protected String getCollectorUri() {
@@ -75,7 +87,7 @@ public class HandleConnectExceptionTest {
     }
 
     @Override
-    public void emitMetrics(TimelineMetrics metrics) throws IOException {
+    public void emitMetrics(TimelineMetrics metrics) {
       super.emitMetrics(metrics);
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e46494/ambari-metrics/ambari-metrics-flume-sink/src/test/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSinkTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-flume-sink/src/test/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSinkTest.java
b/ambari-metrics/ambari-metrics-flume-sink/src/test/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSinkTest.java
index eca742d..bd4ae6a 100644
--- a/ambari-metrics/ambari-metrics-flume-sink/src/test/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSinkTest.java
+++ b/ambari-metrics/ambari-metrics-flume-sink/src/test/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSinkTest.java
@@ -30,7 +30,6 @@ import static org.powermock.api.easymock.PowerMock.verifyAll;
 import java.net.InetAddress;
 import java.util.Collections;
 
-import org.apache.commons.httpclient.HttpClient;
 import org.apache.flume.Context;
 import org.apache.flume.instrumentation.util.JMXPollUtil;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
@@ -87,14 +86,12 @@ public class FlumeTimelineMetricsSinkTest {
     FlumeTimelineMetricsSink flumeTimelineMetricsSink = new FlumeTimelineMetricsSink();
     TimelineMetricsCache timelineMetricsCache = getTimelineMetricsCache(flumeTimelineMetricsSink);
     flumeTimelineMetricsSink.setPollFrequency(1);
-    HttpClient httpClient = EasyMock.createNiceMock(HttpClient.class);
-    flumeTimelineMetricsSink.setHttpClient(httpClient);
     mockStatic(JMXPollUtil.class);
     EasyMock.expect(JMXPollUtil.getAllMBeans()).andReturn(
         Collections.singletonMap("component1", Collections.singletonMap("key1", "42"))).once();
     flumeTimelineMetricsSink.start();
     flumeTimelineMetricsSink.stop();
-    replay(JMXPollUtil.class, timelineMetricsCache, httpClient);
+    replay(JMXPollUtil.class, timelineMetricsCache);
     flumeTimelineMetricsSink.start();
     Thread.sleep(5);
     flumeTimelineMetricsSink.stop();

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e46494/ambari-metrics/ambari-metrics-hadoop-sink/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-sink/pom.xml b/ambari-metrics/ambari-metrics-hadoop-sink/pom.xml
index ae821bf..16bfa15 100644
--- a/ambari-metrics/ambari-metrics-hadoop-sink/pom.xml
+++ b/ambari-metrics/ambari-metrics-hadoop-sink/pom.xml
@@ -124,12 +124,6 @@ limitations under the License.
       <scope>compile</scope>
     </dependency>
     <dependency>
-      <groupId>commons-httpclient</groupId>
-      <artifactId>commons-httpclient</artifactId>
-      <version>3.1</version>
-      <scope>compile</scope>
-    </dependency>
-    <dependency>
       <groupId>commons-codec</groupId>
       <artifactId>commons-codec</artifactId>
       <version>1.8</version>
@@ -175,6 +169,16 @@ limitations under the License.
       <version>3.2</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.powermock</groupId>
+      <artifactId>powermock-api-easymock</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.powermock</groupId>
+      <artifactId>powermock-module-junit4</artifactId>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
 </project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e46494/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
b/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
index 2d171d9..f23dc42 100644
--- a/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
+++ b/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.metrics2.sink.timeline;
 
-import java.io.IOException;
 import java.net.SocketAddress;
 import java.net.UnknownHostException;
 import java.util.ArrayList;
@@ -35,7 +34,6 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics2.AbstractMetric;
-import org.apache.hadoop.metrics2.MetricsException;
 import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.MetricsSink;
 import org.apache.hadoop.metrics2.MetricsTag;
@@ -212,8 +210,6 @@ public class HadoopTimelineMetricsSink extends AbstractTimelineMetricsSink
imple
       }
     } catch (UnableToConnectException uce) {
       LOG.warn("Unable to send metrics to collector by address:" + uce.getConnectUrl());
-    } catch (IOException io) {
-      throw new MetricsException("Failed to putMetrics", io);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e46494/ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java
b/ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java
index dddbbd0..a69b7c7 100644
--- a/ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java
+++ b/ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java
@@ -31,24 +31,29 @@ import static org.easymock.EasyMock.expectLastCall;
 import static org.easymock.EasyMock.replay;
 import static org.easymock.EasyMock.verify;
 
+import java.io.OutputStream;
+import java.net.URL;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Iterator;
 import java.util.List;
 
 import org.apache.commons.configuration.SubsetConfiguration;
-import org.apache.commons.httpclient.HttpClient;
-import org.apache.commons.httpclient.methods.PostMethod;
 import org.apache.hadoop.metrics2.AbstractMetric;
 import org.apache.hadoop.metrics2.MetricsRecord;
 import org.easymock.EasyMock;
 import org.easymock.IAnswer;
 import org.junit.Assert;
 import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
 
+@RunWith(PowerMockRunner.class)
 public class HadoopTimelineMetricsSinkTest {
 
   @Test
+  @PrepareForTest({URL.class, OutputStream.class})
   public void testPutMetrics() throws Exception {
     HadoopTimelineMetricsSink sink = new HadoopTimelineMetricsSink();
 
@@ -82,11 +87,6 @@ public class HadoopTimelineMetricsSinkTest {
       }
     }).once();
 
-
-    HttpClient httpClient = createNiceMock(HttpClient.class);
-
-    expect(httpClient.executeMethod(anyObject(PostMethod.class))).andReturn(200).once();
//metrics send only once due to caching
-
     AbstractMetric metric = createNiceMock(AbstractMetric.class);
     expect(metric.name()).andReturn("metricName").anyTimes();
     expect(metric.value()).andReturn(9.5687).anyTimes();
@@ -105,9 +105,8 @@ public class HadoopTimelineMetricsSinkTest {
     expect(record.metrics()).andReturn(Arrays.asList(metric)).anyTimes();
 
 
-    replay(conf, httpClient, record, metric);
+    replay(conf, record, metric);
 
-    sink.setHttpClient(httpClient);
     sink.init(conf);
 
     sink.putMetrics(record);
@@ -116,7 +115,7 @@ public class HadoopTimelineMetricsSinkTest {
 
     sink.putMetrics(record);
 
-    verify(conf, httpClient, record, metric);
+    verify(conf, record, metric);
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e46494/ambari-metrics/ambari-metrics-kafka-sink/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-kafka-sink/pom.xml b/ambari-metrics/ambari-metrics-kafka-sink/pom.xml
index 15230aa..755a94b 100644
--- a/ambari-metrics/ambari-metrics-kafka-sink/pom.xml
+++ b/ambari-metrics/ambari-metrics-kafka-sink/pom.xml
@@ -43,7 +43,7 @@ limitations under the License.
               <goal>copy-dependencies</goal>
             </goals>
             <configuration>
-              <includeArtifactIds>commons-codec,commons-collections,commons-httpclient,commons-lang,commons-logging,jackson-core-asl,jackson-mapper-asl,jackson-xc</includeArtifactIds>
+              <includeArtifactIds>commons-codec,commons-collections,commons-lang,commons-logging,jackson-core-asl,jackson-mapper-asl,jackson-xc</includeArtifactIds>
               <outputDirectory>${project.build.directory}/lib</outputDirectory>
             </configuration>
           </execution>

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e46494/ambari-metrics/ambari-metrics-kafka-sink/src/main/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporter.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-kafka-sink/src/main/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporter.java
b/ambari-metrics/ambari-metrics-kafka-sink/src/main/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporter.java
index 1d1b6df..ff2db1d 100644
--- a/ambari-metrics/ambari-metrics-kafka-sink/src/main/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporter.java
+++ b/ambari-metrics/ambari-metrics-kafka-sink/src/main/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporter.java
@@ -43,15 +43,11 @@ import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.metrics2.sink.timeline.cache.TimelineMetricsCache;
 
-import java.io.IOException;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
-import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
@@ -255,8 +251,6 @@ public class KafkaTimelineMetricsReporter extends AbstractTimelineMetricsSink
         timelineMetrics.setMetrics(metricsList);
         try {
           emitMetrics(timelineMetrics);
-        } catch (IOException e) {
-          LOG.error("Unexpected error", e);
         } catch (Throwable t) {
           LOG.error("Exception emitting metrics", t);
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e46494/ambari-metrics/ambari-metrics-kafka-sink/src/test/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporterTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-kafka-sink/src/test/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporterTest.java
b/ambari-metrics/ambari-metrics-kafka-sink/src/test/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporterTest.java
index 70f4850..6f2fc27 100644
--- a/ambari-metrics/ambari-metrics-kafka-sink/src/test/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporterTest.java
+++ b/ambari-metrics/ambari-metrics-kafka-sink/src/test/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporterTest.java
@@ -28,7 +28,6 @@ import com.yammer.metrics.core.MetricsRegistry;
 import com.yammer.metrics.core.Timer;
 import junit.framework.Assert;
 import kafka.utils.VerifiableProperties;
-import org.apache.commons.httpclient.HttpClient;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.cache.TimelineMetricsCache;
 import org.easymock.EasyMock;
@@ -38,6 +37,9 @@ import org.junit.runner.RunWith;
 import org.powermock.core.classloader.annotations.PowerMockIgnore;
 import org.powermock.core.classloader.annotations.PrepareForTest;
 import org.powermock.modules.junit4.PowerMockRunner;
+
+import java.io.OutputStream;
+import java.net.URL;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Properties;
@@ -48,7 +50,7 @@ import static org.powermock.api.easymock.PowerMock.replay;
 import static org.powermock.api.easymock.PowerMock.verifyAll;
 
 @RunWith(PowerMockRunner.class)
-@PrepareForTest({ Metrics.class, HttpClient.class,
+@PrepareForTest({ Metrics.class, URL.class, OutputStream.class,
   KafkaTimelineMetricsReporter.TimelineScheduledReporter.class })
 @PowerMockIgnore({"javax.management.*", "org.apache.log4j.*", "org.slf4j.*"})
 public class KafkaTimelineMetricsReporterTest {
@@ -90,9 +92,7 @@ public class KafkaTimelineMetricsReporterTest {
     EasyMock.expect(Metrics.defaultRegistry()).andReturn(registry).times(2);
     TimelineMetricsCache timelineMetricsCache = getTimelineMetricsCache(kafkaTimelineMetricsReporter);
     kafkaTimelineMetricsReporter.setMetricsCache(timelineMetricsCache);
-    HttpClient httpClient = EasyMock.createNiceMock(HttpClient.class);
-    kafkaTimelineMetricsReporter.setHttpClient(httpClient);
-    replay(Metrics.class, httpClient, timelineMetricsCache);
+    replay(Metrics.class, timelineMetricsCache);
     kafkaTimelineMetricsReporter.init(props);
     kafkaTimelineMetricsReporter.stopReporter();
     verifyAll();
@@ -104,10 +104,8 @@ public class KafkaTimelineMetricsReporterTest {
     EasyMock.expect(Metrics.defaultRegistry()).andReturn(registry).times(2);
     TimelineMetricsCache timelineMetricsCache = getTimelineMetricsCache(kafkaTimelineMetricsReporter);
     kafkaTimelineMetricsReporter.setMetricsCache(timelineMetricsCache);
-    HttpClient httpClient = EasyMock.createNiceMock(HttpClient.class);
-    kafkaTimelineMetricsReporter.setHttpClient(httpClient);
 
-    replay(Metrics.class, httpClient, timelineMetricsCache);
+    replay(Metrics.class, timelineMetricsCache);
     kafkaTimelineMetricsReporter.init(props);
 
     Assert.assertTrue(kafkaTimelineMetricsReporter.isExcludedMetric("a.b.c"));

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e46494/ambari-metrics/ambari-metrics-storm-sink/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-storm-sink/pom.xml b/ambari-metrics/ambari-metrics-storm-sink/pom.xml
index 7446167..de97c6e 100644
--- a/ambari-metrics/ambari-metrics-storm-sink/pom.xml
+++ b/ambari-metrics/ambari-metrics-storm-sink/pom.xml
@@ -102,7 +102,6 @@ limitations under the License.
               <include>org.codehaus.jackson:jackson-core-asl</include>
               <include>org.codehaus.jackson:jackson-xc</include>
               <include>org.apache.hadoop:hadoop-annotations</include>
-              <include>commons-httpclient:commons-httpclient</include>
               <include>commons-logging:commons-logging</include>
               <include>org.apache.commons:commons-lang3</include>
               <include>commons-codec:commons-codec</include>
@@ -114,10 +113,6 @@ limitations under the License.
               <shadedPattern>org.apache.hadoop.metrics2.sink.relocated.commons.logging</shadedPattern>
             </relocation>
             <relocation>
-              <pattern>org.apache.commons.httpclient</pattern>
-              <shadedPattern>org.apache.hadoop.metrics2.sink.relocated.commons.httpclient</shadedPattern>
-            </relocation>
-            <relocation>
               <pattern>org.apache.hadoop.classification</pattern>
               <shadedPattern>org.apache.hadoop.metrics2.sink.relocated.hadoop.classification</shadedPattern>
             </relocation>

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e46494/ambari-metrics/ambari-metrics-storm-sink/src/main/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsSink.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-storm-sink/src/main/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsSink.java
b/ambari-metrics/ambari-metrics-storm-sink/src/main/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsSink.java
index 0edc342..4208287 100644
--- a/ambari-metrics/ambari-metrics-storm-sink/src/main/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsSink.java
+++ b/ambari-metrics/ambari-metrics-storm-sink/src/main/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsSink.java
@@ -31,7 +31,6 @@ import org.apache.hadoop.metrics2.sink.timeline.UnableToConnectException;
 import org.apache.hadoop.metrics2.sink.timeline.cache.TimelineMetricsCache;
 import org.apache.hadoop.metrics2.sink.timeline.configuration.Configuration;
 
-import java.io.IOException;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.util.ArrayList;
@@ -107,8 +106,6 @@ public class StormTimelineMetricsSink extends AbstractTimelineMetricsSink
implem
         emitMetrics(timelineMetrics);
       } catch (UnableToConnectException uce) {
         LOG.warn("Unable to send metrics to collector by address:" + uce.getConnectUrl());
-      } catch (IOException e) {
-        LOG.error("Unexpected error", e);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e46494/ambari-metrics/ambari-metrics-storm-sink/src/test/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsSinkTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-storm-sink/src/test/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsSinkTest.java
b/ambari-metrics/ambari-metrics-storm-sink/src/test/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsSinkTest.java
index a0600e5..3f139da 100644
--- a/ambari-metrics/ambari-metrics-storm-sink/src/test/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsSinkTest.java
+++ b/ambari-metrics/ambari-metrics-storm-sink/src/test/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsSinkTest.java
@@ -29,8 +29,6 @@ import java.io.IOException;
 import java.net.SocketAddress;
 import java.util.Collections;
 
-import org.apache.commons.httpclient.HttpClient;
-import org.apache.commons.httpclient.methods.PostMethod;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.cache.TimelineMetricsCache;
 import org.junit.Test;
@@ -43,13 +41,11 @@ public class StormTimelineMetricsSinkTest {
     StormTimelineMetricsSink stormTimelineMetricsSink = new StormTimelineMetricsSink();
     TimelineMetricsCache timelineMetricsCache = createNiceMock(TimelineMetricsCache.class);
     stormTimelineMetricsSink.setMetricsCache(timelineMetricsCache);
-    HttpClient httpClient = createNiceMock(HttpClient.class);
-    stormTimelineMetricsSink.setHttpClient(httpClient);
-    replay(timelineMetricsCache, httpClient);
+    replay(timelineMetricsCache);
     stormTimelineMetricsSink.handleDataPoints(
         new IMetricsConsumer.TaskInfo("localhost", 1234, "testComponent", 42, 20000L, 60),
         Collections.singleton(new IMetricsConsumer.DataPoint("key1", "value1")));
-    verify(timelineMetricsCache, httpClient);
+    verify(timelineMetricsCache);
   }
 
   @Test
@@ -61,13 +57,10 @@ public class StormTimelineMetricsSinkTest {
     timelineMetricsCache.putTimelineMetric(anyObject(TimelineMetric.class));
     expectLastCall().once();
     stormTimelineMetricsSink.setMetricsCache(timelineMetricsCache);
-    HttpClient httpClient = createNiceMock(HttpClient.class);
-    stormTimelineMetricsSink.setHttpClient(httpClient);
-    expect(httpClient.executeMethod(anyObject(PostMethod.class))).andReturn(200).once();
-    replay(timelineMetricsCache, httpClient);
+    replay(timelineMetricsCache);
     stormTimelineMetricsSink.handleDataPoints(
         new IMetricsConsumer.TaskInfo("localhost", 1234, "testComponent", 42, 20000L, 60),
         Collections.singleton(new IMetricsConsumer.DataPoint("key1", 42)));
-    verify(timelineMetricsCache, httpClient);
+    verify(timelineMetricsCache);
   }
 }


Mime
View raw message