ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From d...@apache.org
Subject ambari git commit: AMBARI-11953 /var/lib/ambari-metrics-collector/hbase-tmp/ is not cleaned (dsen)
Date Tue, 16 Jun 2015 22:31:42 GMT
Repository: ambari
Updated Branches:
  refs/heads/branch-2.1 8cedc3bbc -> b7d74258f


AMBARI-11953 /var/lib/ambari-metrics-collector/hbase-tmp/ is not cleaned (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b7d74258
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b7d74258
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b7d74258

Branch: refs/heads/branch-2.1
Commit: b7d74258f3bd2fe7ab73f2c03d774cfc11be76ed
Parents: 8cedc3b
Author: Dmytro Sen <dsen@apache.org>
Authored: Wed Jun 17 01:29:48 2015 +0300
Committer: Dmytro Sen <dsen@apache.org>
Committed: Wed Jun 17 01:30:37 2015 +0300

----------------------------------------------------------------------
 .../0.1.0/package/scripts/ams_service.py        |   4 +
 .../0.1.0/package/scripts/params.py             |   3 +-
 .../0.1.0/package/scripts/params_linux.py       |   2 +
 .../AMBARI_METRICS/test_metrics_collector.py    | 267 +++++++++++++++++++
 .../python/stacks/2.0.6/FLUME/test_flume.py     |   6 +-
 .../python/stacks/2.0.6/configs/default.json    | 148 +++++++++-
 6 files changed, 427 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b7d74258/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams_service.py
b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams_service.py
index d348c50..2887e7c 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams_service.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams_service.py
@@ -50,6 +50,10 @@ def ams_service(name, action):
       cmd = format("{cmd} --distributed")
 
     if action == 'start':
+      if not params.hbase_tmp_dir.startswith('hdfs'):
+        Execute(format('{sudo} rm -f {hbase_tmp_dir}/*.tmp')
+        )
+
       daemon_cmd = format("{cmd} start")
       Execute(daemon_cmd,
               user=params.ams_user

http://git-wip-us.apache.org/repos/asf/ambari/blob/b7d74258/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
index 4c78f77..5e4da80 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
@@ -116,7 +116,8 @@ zookeeper_quorum_hosts = ','.join(ams_collector_hosts) if is_hbase_distributed
e
 
 ams_checkpoint_dir = config['configurations']['ams-site']['timeline.metrics.aggregator.checkpoint.dir']
 hbase_pid_dir = status_params.hbase_pid_dir
-hbase_tmp_dir = config['configurations']['ams-hbase-site']['hbase.tmp.dir']
+_hbase_tmp_dir = config['configurations']['ams-hbase-site']['hbase.tmp.dir']
+hbase_tmp_dir = substitute_vars(_hbase_tmp_dir, config['configurations']['ams-hbase-site'])
 # TODO UPGRADE default, update site during upgrade
 _local_dir_conf = default('/configurations/ams-hbase-site/hbase.local.dir', "${hbase.tmp.dir}/local")
 local_dir = substitute_vars(_local_dir_conf, config['configurations']['ams-hbase-site'])

http://git-wip-us.apache.org/repos/asf/ambari/blob/b7d74258/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py
b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py
index 6443fe8..838e987 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py
@@ -21,6 +21,7 @@ limitations under the License.
 from resource_management import *
 from resource_management.libraries.functions import conf_select
 from ambari_commons import OSCheck
+from ambari_commons.constants import AMBARI_SUDO_BINARY
 
 config = Script.get_config()
 
@@ -46,3 +47,4 @@ hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hbase_conf_dir = "/etc/ams-hbase/conf"
 
 limits_conf_dir = "/etc/security/limits.d"
+sudo = AMBARI_SUDO_BINARY

http://git-wip-us.apache.org/repos/asf/ambari/blob/b7d74258/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
b/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
new file mode 100644
index 0000000..e383f9c
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
@@ -0,0 +1,267 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, patch
+from stacks.utils.RMFTestCase import *
+
+
+@patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
+class TestOozieClient(RMFTestCase):
+  COMMON_SERVICES_PACKAGE_DIR = "AMBARI_METRICS/0.1.0/package"
+  STACK_VERSION = "2.0.6"
+
+  def test_start_default(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/metrics_collector.py",
+                       classname = "AmsCollector",
+                       command = "start",
+                       config_file="default.json",
+                       hdp_stack_version = self.STACK_VERSION,
+                       target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    self.maxDiff=None
+    self.assert_hbase_configure('master')
+    self.assert_hbase_configure('regionserver')
+    self.assert_ams('collector')
+    self.assertResourceCalled('Execute', '/usr/lib/ams-hbase/bin/hbase-daemon.sh --config
/etc/ams-hbase/conf start zookeeper',
+                              not_if = 'ls /var/run/ambari-metrics-collector//hbase-ams-zookeeper.pid
>/dev/null 2>&1 && ps `cat /var/run/ambari-metrics-collector//hbase-ams-zookeeper.pid`
>/dev/null 2>&1',
+                              user = 'ams'
+    )
+    self.assertResourceCalled('Execute', '/usr/lib/ams-hbase/bin/hbase-daemon.sh --config
/etc/ams-hbase/conf start master',
+                              not_if = 'ls /var/run/ambari-metrics-collector//hbase-ams-master.pid
>/dev/null 2>&1 && ps `cat /var/run/ambari-metrics-collector//hbase-ams-master.pid`
>/dev/null 2>&1',
+                              user = 'ams'
+    )
+    self.assertResourceCalled('Execute', '/usr/lib/ams-hbase/bin/hbase-daemon.sh --config
/etc/ams-hbase/conf start regionserver',
+                              not_if = 'ls /var/run/ambari-metrics-collector//hbase-ams-regionserver.pid
>/dev/null 2>&1 && ps `cat /var/run/ambari-metrics-collector//hbase-ams-regionserver.pid`
>/dev/null 2>&1',
+                              user = 'ams'
+    )
+    self.assertResourceCalled('Execute', '/usr/sbin/ambari-metrics-collector --config /etc/ambari-metrics-collector/conf
--distributed start',
+                              user = 'ams'
+    )
+    self.assertNoMoreResources()
+
+  def assert_ams(self, name=None):
+    self.assertResourceCalled('Directory', '/etc/ambari-metrics-collector/conf',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              recursive = True
+    )
+    self.assertResourceCalled('Directory', '/var/lib/ambari-metrics-collector/checkpoint',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              cd_access = 'a',
+                              recursive = True
+    )
+    self.assertResourceCalled('XmlConfig', 'ams-site.xml',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              conf_dir = '/etc/ambari-metrics-collector/conf',
+                              configurations = self.getConfig()['configurations']['ams-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['ams-hbase-site']
+    )
+    merged_ams_hbase_site = {}
+    merged_ams_hbase_site.update(self.getConfig()['configurations']['ams-hbase-site'])
+    merged_ams_hbase_site['phoenix.query.maxGlobalMemoryPercentage'] = '25'
+    merged_ams_hbase_site['phoenix.spool.directory'] = '/tmp'
+
+    self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              conf_dir = '/etc/ambari-metrics-collector/conf',
+                              configurations = merged_ams_hbase_site,
+                              configuration_attributes = self.getConfig()['configuration_attributes']['ams-hbase-site']
+    )
+    self.assertResourceCalled('File', '/etc/ambari-metrics-collector/conf/log4j.properties',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              content = "\n",
+                              mode=0644,
+    )
+    self.assertResourceCalled('File', '/etc/ambari-metrics-collector/conf/ams-env.sh',
+                              owner = 'ams',
+                              content = InlineTemplate(self.getConfig()['configurations']['ams-env']['content'])
+    )
+    self.assertResourceCalled('Directory', '/var/log/ambari-metrics-collector',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              cd_access = 'a',
+                              recursive = True
+    )
+    self.assertResourceCalled('Directory', '/var/run/ambari-metrics-collector',
+                              owner = 'ams',
+                              cd_access = 'a',
+                              group = 'hadoop',
+                              recursive = True
+    )
+    self.assertResourceCalled('File', '/usr/lib/ams-hbase/bin/hadoop',
+                              owner = 'ams',
+                              mode=0755
+    )
+    self.assertResourceCalled('Directory', '/etc/security/limits.d',
+                              owner = 'root',
+                              group = 'root',
+                              recursive = True
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/ams.conf',
+                              owner='root',
+                              group='root',
+                              mode=0644,
+                              content=Template("ams.conf.j2")
+    )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              mode=0644,
+                              conf_dir = '/etc/ambari-metrics-collector/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
+    )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              mode=0644,
+                              conf_dir = '/etc/ams-hbase/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
+    )
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              mode=0644,
+                              conf_dir = '/etc/ambari-metrics-collector/conf',
+                              configurations = self.getConfig()['configurations']['core-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['core-site']
+    )
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              mode=0644,
+                              conf_dir = '/etc/ams-hbase/conf',
+                              configurations = self.getConfig()['configurations']['core-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['core-site']
+    )
+
+  def assert_hbase_configure(self, name=None):
+    self.assertResourceCalled('Directory', '/etc/ams-hbase/conf',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              recursive = True
+    )
+    self.assertResourceCalled('Directory', 'hdfs://localhost:8020/apps/hbase/data/tmp',
+                              owner = 'ams',
+                              cd_access = 'a',
+                              recursive = True
+    )
+    self.assertResourceCalled('Directory', 'hdfs://localhost:8020/apps/hbase/data/tmp/local/jars',
+                              owner = 'ams',
+                              cd_access = 'a',
+                              group = 'hadoop',
+                              mode = 0775,
+                              recursive = True
+    )
+    self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              conf_dir = '/etc/ams-hbase/conf',
+                              configurations = self.getConfig()['configurations']['ams-hbase-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['ams-hbase-site']
+                              )
+    self.assertResourceCalled('Directory', 'hdfs://localhost:8020/apps/hbase/data/tmp/phoenix-spool',
+                              owner = 'ams',
+                              cd_access = 'a',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True
+    )
+    self.assertResourceCalled('XmlConfig', 'hbase-policy.xml',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              conf_dir = '/etc/ams-hbase/conf',
+                              configurations = self.getConfig()['configurations']['ams-hbase-policy'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['ams-hbase-site']
+    )
+    self.assertResourceCalled('File', '/etc/ams-hbase/conf/hbase-env.sh',
+                              owner = 'ams',
+                              content = InlineTemplate(self.getConfig()['configurations']['ams-hbase-env']['content'])
+                              )
+    self.assertResourceCalled('File', '/etc/ams-hbase/conf/hadoop-metrics2-hbase.properties',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              content = Template('hadoop-metrics2-hbase.properties.j2')
+                              )
+    self.assertResourceCalled('TemplateConfig', '/etc/ams-hbase/conf/regionservers',
+                              owner = 'ams',
+                              template_tag = None,
+                              )
+    if name == 'master':
+      self.assertResourceCalled('HdfsResource', 'hdfs://localhost:8020/apps/hbase/data',
+                                security_enabled = False,
+                                hadoop_bin_dir = '/usr/bin',
+                                keytab = UnknownConfigurationMock(),
+                                kinit_path_local = '/usr/bin/kinit',
+                                user = 'hdfs',
+                                owner = 'ams',
+                                mode = 0775,
+                                hadoop_conf_dir = '/etc/hadoop/conf',
+                                type = 'directory',
+                                action = ['create_on_execute'],
+                                hdfs_site=self.getConfig()['configurations']['hdfs-site'],
+                                principal_name=UnknownConfigurationMock(),
+                                default_fs='hdfs://c6401.ambari.apache.org:8020',
+                                )
+      self.assertResourceCalled('HdfsResource', '/amshbase/staging',
+                                security_enabled = False,
+                                hadoop_bin_dir = '/usr/bin',
+                                keytab = UnknownConfigurationMock(),
+                                kinit_path_local = '/usr/bin/kinit',
+                                user = 'hdfs',
+                                owner = 'ams',
+                                mode = 0711,
+                                hadoop_conf_dir = '/etc/hadoop/conf',
+                                type = 'directory',
+                                action = ['create_on_execute'],
+                                hdfs_site=self.getConfig()['configurations']['hdfs-site'],
+                                principal_name=UnknownConfigurationMock(),
+                                default_fs='hdfs://c6401.ambari.apache.org:8020',
+                                )
+      self.assertResourceCalled('HdfsResource', None,
+                                security_enabled = False,
+                                hadoop_bin_dir = '/usr/bin',
+                                keytab = UnknownConfigurationMock(),
+                                kinit_path_local = '/usr/bin/kinit',
+                                user = 'hdfs',
+                                hadoop_conf_dir = '/etc/hadoop/conf',
+                                action = ['execute'],
+                                hdfs_site=self.getConfig()['configurations']['hdfs-site'],
+                                principal_name=UnknownConfigurationMock(),
+                                default_fs='hdfs://c6401.ambari.apache.org:8020',
+                                )
+    self.assertResourceCalled('Directory', '/var/run/ambari-metrics-collector/',
+                              owner = 'ams',
+                              recursive = True
+    )
+    self.assertResourceCalled('Directory', '/var/log/ambari-metrics-collector',
+                              owner = 'ams',
+                              recursive = True
+    )
+    self.assertResourceCalled('File', '/etc/ams-hbase/conf/log4j.properties',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              mode = 0644,
+                              content = "\n"
+    )

http://git-wip-us.apache.org/repos/asf/ambari/blob/b7d74258/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py b/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py
index 5ce4a0c..9a53af4 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py
@@ -61,7 +61,7 @@ class TestFlumeHandler(RMFTestCase):
     self.assertTrue(set_desired_mock.call_args[0][0] == 'STARTED')
 
 
-    self.assertResourceCalled('Execute', "ambari-sudo.sh su flume -l -s /bin/bash -c 'export
 PATH=/bin JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/bin/flume-ng agent --name a1 --conf /etc/flume/conf/a1
--conf-file /etc/flume/conf/a1/flume.conf -Dflume.monitoring.type=ganglia -Dflume.monitoring.hosts=c6401.ambari.apache.org:8655
> /var/log/flume/a1.out 2>&1' &",
+    self.assertResourceCalled('Execute', "ambari-sudo.sh su flume -l -s /bin/bash -c 'export
 PATH=/bin JAVA_HOME=/usr/jdk64/jdk1.7.0_45 ; /usr/bin/flume-ng agent --name a1 --conf /etc/flume/conf/a1
--conf-file /etc/flume/conf/a1/flume.conf -Dflume.monitoring.type=org.apache.hadoop.metrics2.sink.flume.FlumeTimelineMetricsSink
-Dflume.monitoring.node=c6401.ambari.apache.org:6188 > /var/log/flume/a1.out 2>&1'
&",
         environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         wait_for_finish = False,
     )
@@ -217,6 +217,10 @@ class TestFlumeHandler(RMFTestCase):
                               owner="flume",
                               content=InlineTemplate(self.getConfig()['configurations']['flume-env']['content'])
     )
+    self.assertResourceCalled('File', "/etc/flume/conf/a1/flume-metrics2.properties",
+                              owner="flume",
+                              content=Template("flume-metrics2.properties.j2")
+    )
 
   def assert_configure_many(self):
     self.assertResourceCalled('Directory', '/var/run/flume')

http://git-wip-us.apache.org/repos/asf/ambari/blob/b7d74258/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/default.json b/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
index cbab92e..b40c44a 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
@@ -732,10 +732,129 @@
             "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks",

             "REPOSITORY_CONFIG_PASSWORD": "hadoop", 
             "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1"
+        },
+        "ams-hbase-env": {
+            "hbase_pid_dir": "/var/run/ambari-metrics-collector/",
+            "regionserver_xmn_size": "256m",
+            "max_open_files_limit": "32768",
+            "hbase_master_maxperm_size": "128m",
+            "hbase_regionserver_xmn_ratio": "0.2",
+            "hbase_master_heapsize": "512m",
+            "hbase_regionserver_heapsize": "512m",
+            "hbase_log_dir": "/var/log/ambari-metrics-collector",
+            "hbase_master_xmn_size": "256m",
+            "content": "\n"
+        },
+        "ams-log4j": {
+            "content": "\n"
+        },
+        "ams-hbase-site": {
+            "hbase.master.info.bindAddress": "0.0.0.0",
+            "hbase.zookeeper.property.dataDir": "${hbase.tmp.dir}/zookeeper",
+            "hbase.master.wait.on.regionservers.mintostart": "1",
+            "hbase.replication": "false",
+            "hbase.regionserver.global.memstore.lowerLimit": "0.3",
+            "hbase.hregion.memstore.block.multiplier": "4",
+            "hbase.hregion.memstore.flush.size": "134217728",
+            "hbase.rootdir": "hdfs://localhost:8020/apps/hbase/data",
+            "hbase.zookeeper.property.clientPort": "61181",
+            "phoenix.spool.directory": "${hbase.tmp.dir}/phoenix-spool",
+            "hbase.client.scanner.timeout.period": "900000",
+            "phoenix.groupby.maxCacheSize": "307200000",
+            "hbase.snapshot.enabled": "false",
+            "hbase.regionserver.global.memstore.upperLimit": "0.35",
+            "hbase_master_xmn_size": "128m",
+            "phoenix.query.spoolThresholdBytes": "12582912",
+            "zookeeper.session.timeout": "120000",
+            "hbase.tmp.dir": "hdfs://localhost:8020/apps/hbase/data/tmp",
+            "hfile.block.cache.size": "0.3",
+            "hbase.regionserver.port": "61320",
+            "hbase.regionserver.thread.compaction.small": "3",
+            "hbase.master.info.port": "61310",
+            "hbase.hregion.majorcompaction": "0",
+            "phoenix.query.maxGlobalMemoryPercentage": "15",
+            "hbase.zookeeper.quorum": "{{zookeeper_quorum_hosts}}",
+            "hbase.regionserver.info.port": "61330",
+            "hbase.hstore.blockingStoreFiles": "200",
+            "hbase.master.port": "61300",
+            "hbase.zookeeper.leaderport": "61388",
+            "hbase.regionserver.thread.compaction.large": "2",
+            "phoenix.query.timeoutMs": "1200000",
+            "hbase.local.dir": "${hbase.tmp.dir}/local",
+            "hbase.cluster.distributed": "false",
+            "zookeeper.session.timeout.localHBaseCluster": "20000",
+            "hbase.client.scanner.caching": "10000",
+            "phoenix.sequence.saltBuckets": "2",
+            "hbase.hstore.flusher.count": "2",
+            "hbase.zookeeper.peerport": "61288"
+        },
+        "ams-env": {
+            "ambari_metrics_user": "ams",
+            "metrics_monitor_log_dir": "/var/log/ambari-metrics-monitor",
+            "metrics_collector_log_dir": "/var/log/ambari-metrics-collector",
+            "metrics_monitor_pid_dir": "/var/run/ambari-metrics-monitor",
+            "content": "\n",
+            "metrics_collector_pid_dir": "/var/run/ambari-metrics-collector",
+            "metrics_collector_heapsize": "512m"
+        },
+        "ams-hbase-policy": {
+            "security.masterregion.protocol.acl": "*",
+            "security.admin.protocol.acl": "*",
+            "security.client.protocol.acl": "*"
+        },
+        "ams-hbase-log4j": {
+            "content": "\n"
+        },
+        "ams-site": {
+            "timeline.metrics.host.aggregator.minute.ttl": "604800",
+            "timeline.metrics.cluster.aggregator.daily.checkpointCutOffMultiplier": "1",
+            "timeline.metrics.cluster.aggregator.daily.ttl": "63072000",
+            "timeline.metrics.cluster.aggregator.minute.timeslice.interval": "30",
+            "timeline.metrics.service.resultset.fetchSize": "2000",
+            "timeline.metrics.service.checkpointDelay": "60",
+            "timeline.metrics.host.aggregator.hourly.disabled": "false",
+            "timeline.metrics.cluster.aggregator.daily.interval": "86400",
+            "timeline.metrics.cluster.aggregator.hourly.ttl": "31536000",
+            "timeline.metrics.host.aggregator.daily.disabled": "false",
+            "timeline.metrics.hbase.compression.scheme": "SNAPPY",
+            "timeline.metrics.cluster.aggregator.hourly.interval": "3600",
+            "phoenix.spool.directory": "/tmp",
+            "timeline.metrics.host.aggregator.ttl": "86400",
+            "timeline.metrics.cluster.aggregator.hourly.checkpointCutOffMultiplier": "2",
+            "timeline.metrics.service.cluster.aggregator.appIds": "datanode,nodemanager,hbase",
+            "timeline.metrics.service.webapp.address": "0.0.0.0:6188",
+            "timeline.metrics.cluster.aggregator.hourly.disabled": "false",
+            "timeline.metrics.aggregator.checkpoint.dir": "/var/lib/ambari-metrics-collector/checkpoint",
+            "timeline.metrics.hbase.data.block.encoding": "FAST_DIFF",
+            "timeline.metrics.cluster.aggregator.minute.ttl": "2592000",
+            "timeline.metrics.host.aggregator.minute.disabled": "false",
+            "phoenix.query.maxGlobalMemoryPercentage": "25",
+            "timeline.metrics.service.operation.mode": "distributed",
+            "timeline.metrics.host.aggregator.minute.checkpointCutOffMultiplier": "2",
+            "timeline.metrics.cluster.aggregator.minute.checkpointCutOffMultiplier": "2",
+            "timeline.metrics.host.aggregator.hourly.checkpointCutOffMultiplier": "2",
+            "timeline.metrics.cluster.aggregator.daily.disabled": "false",
+            "timeline.metrics.service.rpc.address": "0.0.0.0:60200",
+            "timeline.metrics.cluster.aggregator.minute.disabled": "false",
+            "timeline.metrics.host.aggregator.hourly.ttl": "2592000",
+            "timeline.metrics.host.aggregator.minute.interval": "120",
+            "timeline.metrics.service.default.result.limit": "5760",
+            "timeline.metrics.host.aggregator.daily.ttl": "31536000",
+            "timeline.metrics.host.aggregator.daily.checkpointCutOffMultiplier": "1",
+            "timeline.metrics.daily.aggregator.minute.interval": "86400",
+            "timeline.metrics.cluster.aggregator.minute.interval": "120",
+            "timeline.metrics.host.aggregator.hourly.interval": "3600"
         }
     },
     "configuration_attributes": {
-      "yarn-site": {
+        "ams-hbase-env": {},
+        "ams-hbase-security-site": {},
+        "ams-log4j": {},
+        "ams-hbase-site": {},
+        "ams-hbase-policy": {},
+        "ams-hbase-log4j": {},
+        "ams-site": {},
+        "yarn-site": {
         "final": {
           "yarn.nodemanager.disk-health-checker.min-healthy-disks": "true",
           "yarn.nodemanager.container-executor.class": "true",
@@ -801,6 +920,30 @@
       }
     },
     "configurationTags": {
+        "ams-hbase-env": {
+            "tag": "version1"
+        },
+        "ams-hbase-security-site": {
+            "tag": "version1"
+        },
+        "ams-hbase-site": {
+            "tag": "version1"
+        },
+        "ams-env": {
+            "tag": "version1"
+        },
+        "ams-site": {
+            "tag": "version1"
+        },
+        "ams-hbase-policy": {
+            "tag": "version1"
+        },
+        "ams-log4j": {
+            "tag": "version1"
+        },
+        "ams-hbase-log4j": {
+            "tag": "version1"
+        },
         "capacity-scheduler": {
             "tag": "version1"
         }, 
@@ -951,6 +1094,9 @@
         ],
         "falcon_server_hosts": [
             "c6402.ambari.apache.org"
+        ],
+        "metrics_collector_hosts": [
+            "c6401.ambari.apache.org"
         ]
     }
 }


Mime
View raw message