ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jlun...@apache.org
Subject ambari git commit: AMBARI-9993: Add support for management of Phoenix Query Server to HDP Stack - Addendum patch (Nick Dimiduk via jluniya)
Date Wed, 22 Apr 2015 00:34:53 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk 9b2f26698 -> f24531d1b


AMBARI-9993: Add support for management of Phoenix Query Server to HDP Stack - Addendum patch
(Nick Dimiduk via jluniya)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f24531d1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f24531d1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f24531d1

Branch: refs/heads/trunk
Commit: f24531d1bbab130fc89b09720c536f3c345bd038
Parents: 9b2f266
Author: Jayush Luniya <jluniya@hortonworks.com>
Authored: Tue Apr 21 17:34:47 2015 -0700
Committer: Jayush Luniya <jluniya@hortonworks.com>
Committed: Tue Apr 21 17:34:47 2015 -0700

----------------------------------------------------------------------
 .../HBASE/0.96.0.2.0/package/scripts/hbase.py   |   4 +-
 .../package/scripts/phoenix_queryserver.py      |   6 +-
 .../package/scripts/phoenix_service.py          |  32 ++-
 .../0.5.0.2.3/package/scripts/kms_service.py    |   3 +-
 .../2.0.6/HBASE/test_phoenix_queryserver.py     | 266 ++++++++++++++++++-
 5 files changed, 292 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f24531d1/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
index eb62f92..c5f9e7e 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
@@ -33,7 +33,7 @@ def hbase(name=None):
             configuration_attributes=params.config['configuration_attributes']['hbase-site']
   )
 
-# name is 'master' or 'regionserver' or 'client'
+# name is 'master' or 'regionserver' or 'queryserver' or 'client'
 @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def hbase(name=None):
   import params
@@ -155,7 +155,7 @@ def hbase(name=None):
       group=params.user_group,
       owner=params.hbase_user
     )
-  if name in ["master","regionserver"]:
+  if name in ["master","regionserver", "queryserver"]:
     params.HdfsDirectory(params.hbase_hdfs_root_dir,
                          action="create_delayed",
                          owner=params.hbase_user

http://git-wip-us.apache.org/repos/asf/ambari/blob/f24531d1/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
index db2687f..c12231a 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
@@ -20,7 +20,9 @@ limitations under the License.
 import sys
 from resource_management.libraries.script import Script
 from phoenix_service import phoenix_service
+from hbase import hbase
 
+# Note: Phoenix Query Server is only applicable to HDP-2.3 and above.
 class PhoenixQueryServer(Script):
 
   def install(self, env):
@@ -30,7 +32,9 @@ class PhoenixQueryServer(Script):
     return {"HDP": "phoenix-server"}
 
   def configure(self, env):
-    pass
+    import params
+    env.set_params(params)
+    hbase(name='queryserver')
 
   def start(self, env, rolling_restart=False):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/f24531d1/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_service.py
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_service.py
index 14132fb..8aa1994 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_service.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_service.py
@@ -21,19 +21,27 @@ limitations under the License.
 from resource_management.core.resources.system import Execute
 from resource_management.libraries.functions import check_process_status, format
 
+# Note: Phoenix Query Server is only applicable to HDP-2.3 and above.
 def phoenix_service(action = 'start'): # 'start', 'stop', 'status'
+    # Note: params/status_params should already be imported before calling phoenix_service()
     pid_file = format("{pid_dir}/phoenix-{hbase_user}-server.pid")
     no_op_test = format("ls {pid_file} >/dev/null 2>&1 && ps -p `cat {pid_file}`
>/dev/null 2>&1")
+
+    if action == "status":
+      check_process_status(pid_file)
+    else:
+      env = {'JAVA_HOME': format("{java64_home}"), 'HBASE_CONF_DIR': format("{hbase_conf_dir}")}
+      daemon_cmd = format("{phx_daemon_script} {action}")
+      if action == 'start':
+        Execute(daemon_cmd,
+                user=format("{hbase_user}"),
+                environment=env)
   
-    if action == 'start':
-      Execute(format("{phx_daemon_script} start"))
-  
-    elif action == 'stop':
-      daemon_cmd = format("{phx_daemon_script} stop")
-      Execute(daemon_cmd,
-        timeout = 30,
-        on_timeout = format("! ( {no_op_test} ) || {sudo} -H -E kill -9 `cat {pid_file}`"),
-      )
-      Execute(format("rm -f {pid_file}"))
-    elif action == 'status':
-      check_process_status(pid_file)
\ No newline at end of file
+      elif action == 'stop':
+        Execute(daemon_cmd,
+                timeout = 30,
+                on_timeout = format("! ( {no_op_test} ) || {sudo} -H -E kill -9 `cat {pid_file}`"),
+                user=format("{hbase_user}"),
+                environment=env
+        )
+        Execute(format("rm -f {pid_file}"))

http://git-wip-us.apache.org/repos/asf/ambari/blob/f24531d1/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_service.py
b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_service.py
index f022bd5..caf4f13 100644
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_service.py
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_service.py
@@ -25,8 +25,7 @@ from resource_management.core.exceptions import ComponentIsNotRunning
 from resource_management.core.logger import Logger
 
 def kms_service(action='start'):
-  import params
-
+  # Note: params/status_params should already be imported before calling kms_service()
   if action == 'start':
     no_op_test = format('ps -ef | grep proc_rangerkms | grep -v grep')
     cmd = format('{kms_home}/ranger-kms-services.sh start')

http://git-wip-us.apache.org/repos/asf/ambari/blob/f24531d1/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
index d3edd9f..1568588 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
@@ -35,7 +35,8 @@ class TestPhoenixQueryServer(RMFTestCase):
                    hdp_stack_version = self.STACK_VERSION,
                    target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    
+
+    self.assert_configure_default()
     self.assertNoMoreResources()
     
   def test_start_default(self):
@@ -46,6 +47,12 @@ class TestPhoenixQueryServer(RMFTestCase):
                    hdp_stack_version = self.STACK_VERSION,
                    target = RMFTestCase.TARGET_COMMON_SERVICES
     )
+    self.assert_configure_default()
+    self.assertResourceCalled('Execute', '/usr/hdp/current/phoenix-server/bin/queryserver.py
start',
+                            environment = {'JAVA_HOME': '/usr/jdk64/jdk1.8.0_40', 'HBASE_CONF_DIR':
'/etc/hbase/conf'},
+                            user = 'hbase'
+    )
+    self.assertNoMoreResources()
 
   def test_stop_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
@@ -59,6 +66,8 @@ class TestPhoenixQueryServer(RMFTestCase):
     self.assertResourceCalled('Execute', '/usr/hdp/current/phoenix-server/bin/queryserver.py
stop',
         on_timeout = '! ( ls /var/run/hbase/phoenix-hbase-server.pid >/dev/null 2>&1
&& ps -p `cat /var/run/hbase/phoenix-hbase-server.pid` >/dev/null 2>&1 )
|| ambari-sudo.sh -H -E kill -9 `cat /var/run/hbase/phoenix-hbase-server.pid`',
         timeout = 30,
+        environment = {'JAVA_HOME': '/usr/jdk64/jdk1.8.0_40', 'HBASE_CONF_DIR': '/etc/hbase/conf'},
+        user = 'hbase'
     )
     
     self.assertResourceCalled('Execute', 'rm -f /var/run/hbase/phoenix-hbase-server.pid',
@@ -73,7 +82,8 @@ class TestPhoenixQueryServer(RMFTestCase):
                    hdp_stack_version = self.STACK_VERSION,
                    target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    
+
+    self.assert_configure_secured()
     self.assertNoMoreResources()
     
   def test_start_secured(self):
@@ -84,6 +94,12 @@ class TestPhoenixQueryServer(RMFTestCase):
                    hdp_stack_version = self.STACK_VERSION,
                    target = RMFTestCase.TARGET_COMMON_SERVICES
     )
+    self.assert_configure_secured()
+    self.assertResourceCalled('Execute', '/usr/hdp/current/phoenix-server/bin/queryserver.py
start',
+                          environment = {'JAVA_HOME': '/usr/jdk64/jdk1.8.0_40', 'HBASE_CONF_DIR':
'/etc/hbase/conf'},
+                          user = 'hbase'
+    )
+    self.assertNoMoreResources()
 
   def test_stop_secured(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
@@ -97,6 +113,8 @@ class TestPhoenixQueryServer(RMFTestCase):
     self.assertResourceCalled('Execute', '/usr/hdp/current/phoenix-server/bin/queryserver.py
stop',
         on_timeout = '! ( ls /var/run/hbase/phoenix-hbase-server.pid >/dev/null 2>&1
&& ps -p `cat /var/run/hbase/phoenix-hbase-server.pid` >/dev/null 2>&1 )
|| ambari-sudo.sh -H -E kill -9 `cat /var/run/hbase/phoenix-hbase-server.pid`',
         timeout = 30,
+        environment = {'JAVA_HOME': '/usr/jdk64/jdk1.8.0_40', 'HBASE_CONF_DIR': '/etc/hbase/conf'},
+        user = 'hbase'
     )
     
     self.assertResourceCalled('Execute', 'rm -f /var/run/hbase/phoenix-hbase-server.pid',
@@ -158,3 +176,247 @@ class TestPhoenixQueryServer(RMFTestCase):
       user = 'hbase')
 
     self.assertNoMoreResources()
+
+  def assert_configure_default(self):
+    self.assertResourceCalled('Directory', '/etc/hbase',
+                              mode = 0755
+    )
+    self.assertResourceCalled('Directory', '/etc/hbase/conf',
+                              owner = 'hbase',
+                              group = 'hadoop',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/hadoop/hbase',
+                              owner = 'hbase',
+                              mode=0775,
+                              recursive = True,
+                              cd_access='a'
+    )
+    self.assertResourceCalled('Directory', '/hadoop/hbase/local',
+                              owner = 'hbase',
+                              group = 'hadoop',
+                              mode=0775,
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/hadoop/hbase/local/jars',
+                              owner = 'hbase',
+                              group = 'hadoop',
+                              mode=0775,
+                              recursive = True,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
+                              owner = 'hbase',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hbase/conf',
+                              configurations = self.getConfig()['configurations']['hbase-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['hbase-site']
+    )
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+                              owner = 'hbase',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hbase/conf',
+                              configurations = self.getConfig()['configurations']['core-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['core-site']
+    )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hbase',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hbase/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
+    )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
+    )
+    self.assertResourceCalled('XmlConfig', 'hbase-policy.xml',
+                              owner = 'hbase',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hbase/conf',
+                              configurations = self.getConfig()['configurations']['hbase-policy'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['hbase-policy']
+                              )
+    self.assertResourceCalled('File', '/etc/hbase/conf/hbase-env.sh',
+                              owner = 'hbase',
+                              content = InlineTemplate(self.getConfig()['configurations']['hbase-env']['content']),
+                              )
+    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics2-hbase.properties',
+                              owner = 'hbase',
+                              template_tag = 'GANGLIA-RS',
+                              )
+    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
+                              owner = 'hbase',
+                              template_tag = None,
+                              )
+    self.assertResourceCalled('Directory', '/var/run/hbase',
+                              owner = 'hbase',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hbase',
+                              owner = 'hbase',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('File',
+                              '/etc/hbase/conf/log4j.properties',
+                              mode=0644,
+                              group='hadoop',
+                              owner='hbase',
+                              content='log4jproperties\nline2'
+    )
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6405.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hbase',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0711,
+                              owner = 'hbase',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create'],
+                              )
+
+  def assert_configure_secured(self):
+    self.assertResourceCalled('Directory', '/etc/hbase',
+                              mode = 0755
+    )
+    self.assertResourceCalled('Directory', '/etc/hbase/conf',
+                              owner = 'hbase',
+                              group = 'hadoop',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/hadoop/hbase',
+                              owner = 'hbase',
+                              mode=0775,
+                              recursive = True,
+                              cd_access='a'
+    )
+    self.assertResourceCalled('Directory', '/hadoop/hbase/local',
+                              owner = 'hbase',
+                              group = 'hadoop',
+                              mode=0775,
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/hadoop/hbase/local/jars',
+                              owner = 'hbase',
+                              group = 'hadoop',
+                              mode=0775,
+                              recursive = True,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
+                              owner = 'hbase',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hbase/conf',
+                              configurations = self.getConfig()['configurations']['hbase-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['hbase-site']
+    )
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+                              owner = 'hbase',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hbase/conf',
+                              configurations = self.getConfig()['configurations']['core-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['core-site']
+    )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hbase',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hbase/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
+    )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
+    )
+    self.assertResourceCalled('XmlConfig', 'hbase-policy.xml',
+                              owner = 'hbase',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hbase/conf',
+                              configurations = self.getConfig()['configurations']['hbase-policy'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['hbase-policy']
+    )
+    self.assertResourceCalled('File', '/etc/hbase/conf/hbase-env.sh',
+                              owner = 'hbase',
+                              content = InlineTemplate(self.getConfig()['configurations']['hbase-env']['content']),
+                              )
+    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics2-hbase.properties',
+                              owner = 'hbase',
+                              template_tag = 'GANGLIA-RS',
+                              )
+    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
+                              owner = 'hbase',
+                              template_tag = None,
+                              )
+    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hbase_queryserver_jaas.conf',
+                              owner = 'hbase',
+                              template_tag = None,
+                              )
+    self.assertResourceCalled('Directory', '/var/run/hbase',
+                              owner = 'hbase',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hbase',
+                              owner = 'hbase',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('File',
+                              '/etc/hbase/conf/log4j.properties',
+                              mode=0644,
+                              group='hadoop',
+                              owner='hbase',
+                              content='log4jproperties\nline2'
+    )
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6405.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hbase',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0711,
+                              owner = 'hbase',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create'],
+                              )
\ No newline at end of file


Mime
View raw message