eagle-issues mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From "ASF GitHub Bot (JIRA)" <j...@apache.org>
Subject [jira] [Commented] (EAGLE-845) JMX Dashboard
Date Wed, 04 Jan 2017 07:52:58 GMT

    [ https://issues.apache.org/jira/browse/EAGLE-845?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15797493#comment-15797493
] 

ASF GitHub Bot commented on EAGLE-845:
--------------------------------------

Github user zombieJ commented on a diff in the pull request:

    https://github.com/apache/eagle/pull/753#discussion_r94540400
  
    --- Diff: eagle-hadoop-metric/src/main/webapp/app/apps/hadoop_metric/ctrls/overview.js
---
    @@ -21,52 +21,193 @@
     	 * `register` without params will load the module which using require
     	 */
     	register(function (hadoopMetricApp) {
    -		hadoopMetricApp.controller("overviewCtrl", function ($scope, PageConfig) {
    +		hadoopMetricApp.controller("overviewCtrl", function ($q, $wrapState, $scope, PageConfig,
METRIC, Time) {
    +			var cache = {};
    +			var mapRes = common.map2;
    +			$scope.site = $wrapState.param.siteId;
    +			var activeMasterInfo = METRIC.hbaseActiveMaster($scope.site);
    +			var metricMap = common.map1;
    +			metricMap.put("nonheap", "hadoop.memory.nonheapmemoryusage.used");
    +			metricMap.put("heap", "hadoop.memory.heapmemoryusage.used");
    +			metricMap.put("averageload", "hadoop.hbase.master.server.averageload");
    +			metricMap.put("ritcount", "hadoop.hbase.master.assignmentmanger.ritcount");
    +			metricMap.put("ritcountoverthreshold", "hadoop.hbase.master.assignmentmanger.ritcountoverthreshold");
    +			metricMap.put("AssignNumOps", "hadoop.hbase.master.assignmentmanger.assign_num_ops");
    +			metricMap.put("AssignMin", "hadoop.hbase.master.assignmentmanger.assign_min");
    +			metricMap.put("AssignMax", "hadoop.hbase.master.assignmentmanger.assign_max");
    +			metricMap.put("AssignPercentile75th", "hadoop.hbase.master.assignmentmanger.assign_75th_percentile");
    +			metricMap.put("AssignPercentile95th", "hadoop.hbase.master.assignmentmanger.assign_95th_percentile");
    +			metricMap.put("AssignPercentile99th", "hadoop.hbase.master.assignmentmanger.assign_99th_percentile");
    +			metricMap.put("BulkAssignNum_ops", "hadoop.hbase.master.assignmentmanger.bulkassign_num_ops");
    +			metricMap.put("BulkAssignMin", "hadoop.hbase.master.assignmentmanger.bulkassign_min");
    +			metricMap.put("BulkAssignMax", "hadoop.hbase.master.assignmentmanger.bulkassign_max");
    +			metricMap.put("BulkAssignPercentile75th", "hadoop.hbase.master.assignmentmanger.bulkassign_75th_percentile");
    +			metricMap.put("BulkAssignPercentile95th", "hadoop.hbase.master.assignmentmanger.bulkassign_95th_percentile");
    +			metricMap.put("BulkAssignPercentile99th", "hadoop.hbase.master.assignmentmanger.bulkassign_99th_percentile");
    +			metricMap.put("BalancerClusterNum_ops", "hadoop.hbase.master.balancer.balancercluster_num_ops");
    +			metricMap.put("BalancerClusterMin", "hadoop.hbase.master.balancer.balancercluster_min");
    +			metricMap.put("BalancerClusterMax", "hadoop.hbase.master.balancer.balancercluster_max");
    +			metricMap.put("BalancerClusterPercentile75th", "hadoop.hbase.master.balancer.balancercluster_75th_percentile");
    +			metricMap.put("BalancerClusterPercentile95th", "hadoop.hbase.master.balancer.balancercluster_95th_percentile");
    +			metricMap.put("BalancerClusterPercentile99th", "hadoop.hbase.master.balancer.balancercluster_99th_percentile");
    +			metricMap.put("HlogSplitTimeMin", "hadoop.hbase.master.filesystem.hlogsplittime_min");
    +			metricMap.put("HlogSplitTimeMax", "hadoop.hbase.master.filesystem.hlogsplittime_max");
    +			metricMap.put("HlogSplitTimePercentile75th", "hadoop.hbase.master.filesystem.hlogsplittime_75th_percentile");
    +			metricMap.put("HlogSplitTimePercentile95th", "hadoop.hbase.master.filesystem.hlogsplittime_95th_percentile");
    +			metricMap.put("HlogSplitTimePercentile99th", "hadoop.hbase.master.filesystem.hlogsplittime_99th_percentile");
    +			metricMap.put("HlogSplitSizeMin", "hadoop.hbase.master.filesystem.hlogsplitsize_min");
    +			metricMap.put("HlogSplitSizeMax", "hadoop.hbase.master.filesystem.hlogsplitsize_max");
    +			metricMap.put("MetaHlogSplitTimeMin", "hadoop.hbase.master.filesystem.metahlogsplittime_min");
    +			metricMap.put("MetaHlogSplitTimeMax", "hadoop.hbase.master.filesystem.metahlogsplittime_max");
    +			metricMap.put("MetaHlogSplitTimePercentile75th", "hadoop.hbase.master.filesystem.metahlogsplittime_75th_percentile");
    +			metricMap.put("MetaHlogSplitTimePercentile95th", "hadoop.hbase.master.filesystem.metahlogsplittime_95th_percentile");
    +			metricMap.put("MetaHlogSplitTimePercentile99th", "hadoop.hbase.master.filesystem.metahlogsplittime_99th_percentile");
    +			metricMap.put("MetaHlogSplitSizeMin", "hadoop.hbase.master.filesystem.metahlogsplitsize_min");
    +			metricMap.put("MetaHlogSplitSizeMax", "hadoop.hbase.master.filesystem.metahlogsplitsize_max");
    +
     			PageConfig.title = 'Overview';
    +			var storageOption = {
    +				animation: false,
    +				tooltip: {
    +					formatter: function (points) {
    +						return points[0].name + "<br/>" +
    +							$.map(points, function (point) {
    +								return '<span style="display:inline-block;margin-right:5px;border-radius:10px;width:9px;height:9px;background-color:'
+ point.color + '"></span> ' +
    +									point.seriesName + ": " +
    +									common.number.abbr(point.value, true, 0);
    +							}).reverse().join("<br/>");
    +					}
    +				},
    +				yAxis: [{
    +					axisLabel: {
    +						formatter: function (value) {
    +							return common.number.sizeFormat(value, 0);
    +						}
    +					}
    +				}]
    +			};
    +			$scope.metricList = {};
    +
    +			function generateHbaseMetric(name, param) {
    +				var startTime = Time.startTime();
    +				var endTime = Time.endTime();
    +				var interval = Time.diffInterval(startTime, endTime);
    +				var intervalMin = interval / 1000 / 60;
    +				var trendStartTime = Time.align(startTime, interval);
    +				var trendEndTime = Time.align(endTime, interval);
     
    -			$scope.commonOption = {};
    +				$scope.site = $wrapState.param.siteId;
     
    -			// Mock series data
    -			function mockMetric(name, option, count) {
    -				count = count || 1;
    -				var now = +new Date();
    +				var metrics = cache[name] = cache[name] || $q.all([activeMasterInfo._promise]).then(function
(res) {
    +						var hostname = cache[hostname] = cache[hostname] || res[0][0].tags.hostname;
    +						$scope.defaultHostname = $wrapState.param.hostname || hostname;
     
    +						var jobCond = {
    +							site: $scope.site,
    +							component: "hbasemaster",
    +							host: $scope.defaultHostname
    +						};
    +						return METRIC.aggMetricsToEntities(METRIC.hbaseMetricsAggregation(jobCond, name,
["site"], "avg(value)", intervalMin, trendStartTime, trendEndTime), param)
    +							._promise.then(function (list) {
    +								var metricFlag = $.map(list, function (metrics) {
    +									return metrics[0].flag;
    +								});
    +								return [metricFlag, list];
    +							});
    +
    +					});
    +				return metrics;
    +			}
    +
    +			function mergeMetricToOneSeries(metricTitle, metrics, legendName, dataOption, option)
{
     				var series = [];
    -				for (var i = 0 ; i < count ; i += 1) {
    -					var data = [];
     
    -					for(var j = 0 ; j < 30 ; j += 1) {
    -						data.push({x: now + j * 1000 * 60, y: Math.random() * 100});
    -					}
    +				$.each(metrics, function (i, metricMap) {
    +					series.push(METRIC.metricsToSeries(legendName[i], metricMap[0], option));
    +				});
     
    -					series.push($.extend({
    -						name: name + '_' + i,
    -						type: 'line',
    -						data: data,
    -						showSymbol: false,
    -					}, option));
    -				}
     
     				return {
    -					title: name,
    -					series: series
    +					title: metricTitle,
    +					series: series,
    +					option: dataOption || {}
     				};
     			}
     
    -			$scope.metricList = [
    -				mockMetric('name1', {}),
    -				mockMetric('name2', {smooth:true}),
    -				mockMetric('name3', {areaStyle: {normal: {}}}),
    -				mockMetric('name4', {type: 'bar'}),
    -				mockMetric('name1', {}, 2),
    -				mockMetric('name2', {smooth:true}, 2),
    -				mockMetric('name3', {areaStyle: {normal: {}}, stack: 'one'}, 2),
    -				mockMetric('name4', {type: 'bar', stack: 'one'}, 2),
    -				mockMetric('name1', {}, 3),
    -				mockMetric('name2', {smooth:true}, 3),
    -				mockMetric('name3', {areaStyle: {normal: {}}, stack: 'one'}, 3),
    -				mockMetric('name4', {type: 'bar', stack: 'one'}, 3),
    -			];
    +			// TODO: Optimize the chart count
    +			// TODO: ECharts dynamic refresh series bug: https://github.com/ecomfe/echarts/issues/4033
    +
    +
    +			$scope.refresh = function () {
    +				var hbaseservers = METRIC.hbasehostStatus({site: $scope.site});
    +				var metricspromies = [];
    +				var summaryPromises = [];
    +				for (var i = 0; i < metricMap.keys().length; i += 1) {
    +					var key = [];
    +					key = metricMap.keys()[i];
    --- End diff --
    
    Use `Object.keys`


> JMX Dashboard
> -------------
>
>                 Key: EAGLE-845
>                 URL: https://issues.apache.org/jira/browse/EAGLE-845
>             Project: Eagle
>          Issue Type: New Feature
>          Components: Hadoop JMX Monitor, Web UI
>            Reporter: JiJun Tang
>            Assignee: Lingang Deng
>




--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

Mime
View raw message