Return-Path: X-Original-To: apmail-ambari-commits-archive@www.apache.org Delivered-To: apmail-ambari-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 07E1D18A2C for ; Thu, 23 Apr 2015 10:55:50 +0000 (UTC) Received: (qmail 68275 invoked by uid 500); 23 Apr 2015 10:55:49 -0000 Delivered-To: apmail-ambari-commits-archive@ambari.apache.org Received: (qmail 68242 invoked by uid 500); 23 Apr 2015 10:55:49 -0000 Mailing-List: contact commits-help@ambari.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: ambari-dev@ambari.apache.org Delivered-To: mailing list commits@ambari.apache.org Received: (qmail 68233 invoked by uid 99); 23 Apr 2015 10:55:49 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 23 Apr 2015 10:55:49 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id A6E04E0414; Thu, 23 Apr 2015 10:55:49 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: aonishuk@apache.org To: commits@ambari.apache.org Message-Id: X-Mailer: ASF-Git Admin Mailer Subject: ambari git commit: AMBARI-10690. Hbase Ranger plugin fails to install with non-root agent (aonishuk) Date: Thu, 23 Apr 2015 10:55:49 +0000 (UTC) Repository: ambari Updated Branches: refs/heads/trunk dcfe0d462 -> 8d8b5280d AMBARI-10690. Hbase Ranger plugin fails to install with non-root agent (aonishuk) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8d8b5280 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8d8b5280 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8d8b5280 Branch: refs/heads/trunk Commit: 8d8b5280df608f8e218b7195880364097fa1d189 Parents: dcfe0d4 Author: Andrew Onishuk Authored: Thu Apr 23 13:55:43 2015 +0300 Committer: Andrew Onishuk Committed: Thu Apr 23 13:55:43 2015 +0300 ---------------------------------------------------------------------- .../libraries/functions/ranger_functions.py | 27 ++- .../0.96.0.2.0/package/scripts/params_linux.py | 116 +++++------ .../package/scripts/setup_ranger_hbase.py | 191 +++---------------- .../ranger-hbase-plugin-properties.xml | 50 ++++- .../stacks/2.0.6/configs/client-upgrade.json | 37 ++++ .../2.0.6/configs/default.hbasedecom.json | 37 ++++ .../python/stacks/2.0.6/configs/default.json | 39 +++- .../python/stacks/2.0.6/configs/hbase-2.2.json | 37 ++++ .../stacks/2.0.6/configs/hbase-check-2.2.json | 40 +++- .../stacks/2.0.6/configs/hbase-preupgrade.json | 63 +++++- .../stacks/2.0.6/configs/hbase-rs-2.2.json | 37 ++++ .../python/stacks/2.0.6/configs/secured.json | 37 ++++ 12 files changed, 480 insertions(+), 231 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ambari/blob/8d8b5280/ambari-common/src/main/python/resource_management/libraries/functions/ranger_functions.py ---------------------------------------------------------------------- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/ranger_functions.py b/ambari-common/src/main/python/resource_management/libraries/functions/ranger_functions.py index 99b8c2c..e47f1dc 100644 --- a/ambari-common/src/main/python/resource_management/libraries/functions/ranger_functions.py +++ b/ambari-common/src/main/python/resource_management/libraries/functions/ranger_functions.py @@ -23,6 +23,7 @@ from StringIO import StringIO as BytesIO import json from resource_management.core.logger import Logger import urllib2, base64, httplib +from resource_management import * class Rangeradmin: @@ -73,7 +74,31 @@ class Rangeradmin: except httplib.BadStatusLine: Logger.error("Ranger Admin service is not reachable, please restart the service and then try again") return None - + + + + def create_ranger_repository(self, component, repo_name, repo_properties, + ambari_ranger_admin, ambari_ranger_password, + admin_uname, admin_password, policy_user): + response_code, response_recieved = self.check_ranger_login_urllib2(self.urlLogin, 'test:test') + repo_data = json.dumps(repo_properties) + + if response_code is not None and response_code == 200: + ambari_ranger_admin, ambari_ranger_password = self.create_ambari_admin_user(ambari_ranger_admin, ambari_ranger_password, format("{admin_uname}:{admin_password}")) + ambari_username_password_for_ranger = ambari_ranger_admin + ':' + ambari_ranger_password + if ambari_ranger_admin != '' and ambari_ranger_password != '': + repo = self.get_repository_by_name_urllib2(repo_name, component, 'true', ambari_username_password_for_ranger) + if repo and repo['name'] == repo_name: + Logger.info('{0} Repository exist'.format(component.title())) + else: + response = self.create_repository_urllib2(repo_data, ambari_username_password_for_ranger, policy_user) + if response is not None: + Logger.info('{0} Repository created in Ranger admin'.format(component.title())) + else: + raise Fail('{0} Repository creation failed in Ranger admin'.format(component.title())) + else: + raise Fail('Ambari admin username and password are blank ') + def create_repository_urllib2(self, data, usernamepassword, policy_user): try: searchRepoURL = self.urlReposPub http://git-wip-us.apache.org/repos/asf/ambari/blob/8d8b5280/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py index eafcd89..eb903f4 100644 --- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py +++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py @@ -24,6 +24,7 @@ from resource_management.libraries.functions.version import format_hdp_stack_ver from resource_management.libraries.functions.default import default from resource_management import * import status_params +import json # server configurations config = Script.get_config() @@ -174,7 +175,7 @@ if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0: if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0: # Setting Flag value for ranger hbase plugin enable_ranger_hbase = False - ranger_plugin_enable = default("/configurations/ranger-hbase-plugin-properties/ranger-hbase-plugin-enabled","no") + ranger_plugin_enable = config['configurations']['ranger-hbase-plugin-properties']['ranger-hbase-plugin-enabled'] if ranger_plugin_enable.lower() == 'yes': enable_ranger_hbase = True elif ranger_plugin_enable.lower() == 'no': @@ -186,38 +187,20 @@ has_ranger_admin = not len(ranger_admin_hosts) == 0 ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0] - # ranger hbase properties -policymgr_mgr_url = default("/configurations/admin-properties/policymgr_external_url", "http://localhost:6080") -sql_connector_jar = default("/configurations/admin-properties/SQL_CONNECTOR_JAR", "/usr/share/java/mysql-connector-java.jar") -xa_audit_db_flavor = default("/configurations/admin-properties/DB_FLAVOR", "MYSQL") -xa_audit_db_name = default("/configurations/admin-properties/audit_db_name", "ranger_audit") -xa_audit_db_user = default("/configurations/admin-properties/audit_db_user", "rangerlogger") -xa_audit_db_password = default("/configurations/admin-properties/audit_db_password", "rangerlogger") -xa_db_host = default("/configurations/admin-properties/db_host", "localhost") +policymgr_mgr_url = config['configurations']['admin-properties']['policymgr_external_url'] +sql_connector_jar = config['configurations']['admin-properties']['SQL_CONNECTOR_JAR'] +xa_audit_db_flavor = config['configurations']['admin-properties']['DB_FLAVOR'] +xa_audit_db_name = config['configurations']['admin-properties']['audit_db_name'] +xa_audit_db_user = config['configurations']['admin-properties']['audit_db_user'] +xa_audit_db_password = config['configurations']['admin-properties']['audit_db_password'] +xa_db_host = config['configurations']['admin-properties']['db_host'] repo_name = str(config['clusterName']) + '_hbase' -db_enabled = default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.DB.IS_ENABLED", "false") -hdfs_enabled = default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.IS_ENABLED", "false") -hdfs_dest_dir = default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.DESTINATION_DIRECTORY", "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/app-type/time:yyyyMMdd") -hdfs_buffer_dir = default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY", "__REPLACE__LOG_DIR/hadoop/app-type/audit") -hdfs_archive_dir = default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY", "__REPLACE__LOG_DIR/hadoop/app-type/audit/archive") -hdfs_dest_file = default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FILE", "hostname-audit.log") -hdfs_dest_flush_int_sec = default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS", "900") -hdfs_dest_rollover_int_sec = default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS", "86400") -hdfs_dest_open_retry_int_sec = default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS", "60") -hdfs_buffer_file = default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FILE", "time:yyyyMMdd-HHmm.ss.log") -hdfs_buffer_flush_int_sec = default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS", "60") -hdfs_buffer_rollover_int_sec = default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS", "600") -hdfs_archive_max_file_count = default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT", "10") -ssl_keystore_file = default("/configurations/ranger-hbase-plugin-properties/SSL_KEYSTORE_FILE_PATH", "/etc/hadoop/conf/ranger-plugin-keystore.jks") -ssl_keystore_password = default("/configurations/ranger-hbase-plugin-properties/SSL_KEYSTORE_PASSWORD", "myKeyFilePassword") -ssl_truststore_file = default("/configurations/ranger-hbase-plugin-properties/SSL_TRUSTSTORE_FILE_PATH", "/etc/hadoop/conf/ranger-plugin-truststore.jks") -ssl_truststore_password = default("/configurations/ranger-hbase-plugin-properties/SSL_TRUSTSTORE_PASSWORD", "changeit") -grant_revoke = default("/configurations/ranger-hbase-plugin-properties/UPDATE_XAPOLICIES_ON_GRANT_REVOKE","true") -common_name_for_certificate = default("/configurations/ranger-hbase-plugin-properties/common.name.for.certificate", "-") + +common_name_for_certificate = config['configurations']['ranger-hbase-plugin-properties']['common.name.for.certificate'] zookeeper_znode_parent = config['configurations']['hbase-site']['zookeeper.znode.parent'] -hbase_zookeeoer_quorum = config['configurations']['hbase-site']['hbase.zookeeper.quorum'] +hbase_zookeeper_quorum = config['configurations']['hbase-site']['hbase.zookeeper.quorum'] hbase_zookeeper_property_clientPort = config['configurations']['hbase-site']['hbase.zookeeper.property.clientPort'] hbase_security_authentication = config['configurations']['hbase-site']['hbase.security.authentication'] hadoop_security_authentication = config['configurations']['core-site']['hadoop.security.authentication'] @@ -225,38 +208,55 @@ hadoop_security_authentication = config['configurations']['core-site']['hadoop.s repo_config_username = default("/configurations/ranger-hbase-plugin-properties/REPOSITORY_CONFIG_USERNAME", "hbase") repo_config_password = default("/configurations/ranger-hbase-plugin-properties/REPOSITORY_CONFIG_PASSWORD", "hbase") -admin_uname = default("/configurations/ranger-env/admin_username", "admin") -admin_password = default("/configurations/ranger-env/admin_password", "admin") -admin_uname_password = format("{admin_uname}:{admin_password}") +admin_uname = config['configurations']['ranger-env']['admin_username'] +admin_password = config['configurations']['ranger-env']['admin_password'] -ambari_ranger_admin = default("/configurations/ranger-env/ranger_admin_username", "amb_ranger_admin") -ambari_ranger_password = default("/configurations/ranger-env/ranger_admin_password", "ambari123") -policy_user = default("/configurations/ranger-hbase-plugin-properties/policy_user", "ambari-qa") +ambari_ranger_admin = config['configurations']['ranger-env']['ranger_admin_username'] +ambari_ranger_password = config['configurations']['ranger-env']['ranger_admin_password'] +policy_user = config['configurations']['ranger-hbase-plugin-properties']['policy_user'] #For curl command in ranger plugin to get db connector jdk_location = config['hostLevelParams']['jdk_location'] java_share_dir = '/usr/share/java' -if xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'mysql': - jdbc_symlink_name = "mysql-jdbc-driver.jar" - jdbc_jar_name = "mysql-connector-java.jar" -elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'oracle': - jdbc_jar_name = "ojdbc6.jar" - jdbc_symlink_name = "oracle-jdbc-driver.jar" -elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'postgres': - jdbc_jar_name = "postgresql.jar" - jdbc_symlink_name = "postgres-jdbc-driver.jar" -elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'sqlserver': - jdbc_jar_name = "sqljdbc4.jar" - jdbc_symlink_name = "mssql-jdbc-driver.jar" - -downloaded_custom_connector = format("{exec_tmp_dir}/{jdbc_jar_name}") - -driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}") -driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}") - -#Solr properties added for HDP2.3 - Ranger -solr_enabled = default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.SOLR.IS_ENABLED", "false") -solr_max_queue_size = default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.SOLR.MAX_QUEUE_SIZE", "1") -solr_max_flush_interval = default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS", "1000") -solr_url = default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.SOLR.SOLR_URL", "http://localhost:6083/solr/ranger_audits") +if has_ranger_admin: + if xa_audit_db_flavor.lower() == 'mysql': + jdbc_symlink_name = "mysql-jdbc-driver.jar" + jdbc_jar_name = "mysql-connector-java.jar" + elif xa_audit_db_flavor.lower() == 'oracle': + jdbc_jar_name = "ojdbc6.jar" + jdbc_symlink_name = "oracle-jdbc-driver.jar" + elif nxa_audit_db_flavor.lower() == 'postgres': + jdbc_jar_name = "postgresql.jar" + jdbc_symlink_name = "postgres-jdbc-driver.jar" + elif xa_audit_db_flavor.lower() == 'sqlserver': + jdbc_jar_name = "sqljdbc4.jar" + jdbc_symlink_name = "mssql-jdbc-driver.jar" + + downloaded_custom_connector = format("{exec_tmp_dir}/{jdbc_jar_name}") + + driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}") + driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}") + +hbase_ranger_plugin_config = { + 'username': repo_config_username, + 'password': repo_config_password, + 'hadoop.security.authentication': hadoop_security_authentication, + 'hbase.security.authentication': hbase_security_authentication, + 'hbase.zookeeper.property.clientPort': hbase_zookeeper_property_clientPort, + 'hbase.zookeeper.quorum': hbase_zookeeper_quorum, + 'zookeeper.znode.parent': zookeeper_znode_parent, + 'commonNameForCertificate': common_name_for_certificate, + 'hbase.master.kerberos.principal': master_jaas_princ if security_enabled else '' +} + +hbase_ranger_plugin_repo = { + 'isActive': 'true', + 'config': json.dumps(hbase_ranger_plugin_config), + 'description': 'hbase repo', + 'name': repo_name, + 'repositoryType': 'hbase', + 'assetType': '2' +} + + http://git-wip-us.apache.org/repos/asf/ambari/blob/8d8b5280/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/setup_ranger_hbase.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/setup_ranger_hbase.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/setup_ranger_hbase.py index 5f7a830..b9c6241 100644 --- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/setup_ranger_hbase.py +++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/setup_ranger_hbase.py @@ -36,173 +36,38 @@ def setup_ranger_hbase(): content = DownloadSource(params.driver_curl_source) ) - if not os.path.isfile(params.driver_curl_target): - Execute(('cp', '--remove-destination', params.downloaded_custom_connector, params.driver_curl_target), - path=["/bin", "/usr/bin/"], - sudo=True) + Execute(('cp', '--remove-destination', params.downloaded_custom_connector, params.driver_curl_target), + path=["/bin", "/usr/bin/"], + not_if=format("test -f {driver_curl_target}"), + sudo=True) - try: - command = 'hdp-select status hbase-client' - return_code, hdp_output = shell.call(command, timeout=20) - except Exception, e: - Logger.error(str(e)) - raise Fail('Unable to execute hdp-select command to retrieve the version.') - - if return_code != 0: - raise Fail('Unable to determine the current version because of a non-zero return code of {0}'.format(str(return_code))) - - hdp_version = re.sub('hbase-client - ', '', hdp_output).strip() - match = re.match('[0-9]+.[0-9]+.[0-9]+.[0-9]+-[0-9]+', hdp_version) - - if match is None: - raise Fail('Failed to get extracted version') - - file_path = '/usr/hdp/'+ hdp_version +'/ranger-hbase-plugin/install.properties' + hdp_version = get_hdp_version('hbase-client') + file_path = format('/usr/hdp/{hdp_version}/ranger-hbase-plugin/install.properties') + if not os.path.isfile(file_path): - raise Fail('Ranger HBase plugin install.properties file does not exist at {0}'.format(file_path)) + raise Fail(format('Ranger HBase plugin install.properties file does not exist at {file_path}')) - ranger_hbase_dict = ranger_hbase_properties() - hbase_repo_data = hbase_repo_properties() - - write_properties_to_file(file_path, ranger_hbase_dict) + ModifyPropertiesFile(file_path, + properties = params.config['configurations']['ranger-hbase-plugin-properties'] + ) if params.enable_ranger_hbase: - cmd = format('cd /usr/hdp/{hdp_version}/ranger-hbase-plugin/ && sh enable-hbase-plugin.sh') - ranger_adm_obj = Rangeradmin(url=ranger_hbase_dict['POLICY_MGR_URL']) - response_code, response_recieved = ranger_adm_obj.check_ranger_login_urllib2(ranger_hbase_dict['POLICY_MGR_URL'] + '/login.jsp', 'test:test') - - if response_code is not None and response_code == 200: - ambari_ranger_admin, ambari_ranger_password = ranger_adm_obj.create_ambari_admin_user(params.ambari_ranger_admin, params.ambari_ranger_password, params.admin_uname_password) - ambari_username_password_for_ranger = ambari_ranger_admin + ':' + ambari_ranger_password - if ambari_ranger_admin != '' and ambari_ranger_password != '': - repo = ranger_adm_obj.get_repository_by_name_urllib2(ranger_hbase_dict['REPOSITORY_NAME'], 'hbase', 'true', ambari_username_password_for_ranger) - if repo and repo['name'] == ranger_hbase_dict['REPOSITORY_NAME']: - Logger.info('Hbase Repository exist') - else: - response = ranger_adm_obj.create_repository_urllib2(hbase_repo_data, ambari_username_password_for_ranger, params.policy_user) - if response is not None: - Logger.info('Hbase Repository created in Ranger admin') - else: - Logger.info('Hbase Repository creation failed in Ranger admin') - else: - Logger.info('Ambari admin username and password are blank ') - else: - Logger.info('Ranger service is not started on given host') + cmd = ('enable-hbase-plugin.sh',) + + ranger_adm_obj = Rangeradmin(url=params.policymgr_mgr_url) + ranger_adm_obj.create_ranger_repository('hbase', params.repo_name, params.hbase_ranger_plugin_repo, + params.ambari_ranger_admin, params.ambari_ranger_password, + params.admin_uname, params.admin_password, + params.policy_user) else: - cmd = format('cd /usr/hdp/{hdp_version}/ranger-hbase-plugin/ && sh disable-hbase-plugin.sh') - - Execute(cmd, environment={'JAVA_HOME': params.java64_home}, logoutput=True) - else: - Logger.info('Ranger admin not installed') - - -def write_properties_to_file(file_path, value): - for key in value: - modify_config(file_path, key, value[key]) - - -def modify_config(filepath, variable, setting): - var_found = False - already_set = False - V=str(variable) - S=str(setting) - # use quotes if setting has spaces # - if ' ' in S: - S = '%s' % S - for line in fileinput.input(filepath, inplace = 1): - # process lines that look like config settings # - if not line.lstrip(' ').startswith('#') and '=' in line: - _infile_var = str(line.split('=')[0].rstrip(' ')) - _infile_set = str(line.split('=')[1].lstrip(' ').rstrip()) - # only change the first matching occurrence # - if var_found == False and _infile_var.rstrip(' ') == V: - var_found = True - # don't change it if it is already set # - if _infile_set.lstrip(' ') == S: - already_set = True - else: - line = "%s=%s\n" % (V, S) - sys.stdout.write(line) - - # Append the variable if it wasn't found # - if not var_found: - with open(filepath, "a") as f: - f.write("%s=%s\n" % (V, S)) - elif already_set == True: - pass - else: - pass - - return - -def ranger_hbase_properties(): - import params - - ranger_hbase_properties = dict() - - ranger_hbase_properties['POLICY_MGR_URL'] = params.policymgr_mgr_url - ranger_hbase_properties['SQL_CONNECTOR_JAR'] = params.sql_connector_jar - ranger_hbase_properties['XAAUDIT.DB.FLAVOUR'] = params.xa_audit_db_flavor - ranger_hbase_properties['XAAUDIT.DB.DATABASE_NAME'] = params.xa_audit_db_name - ranger_hbase_properties['XAAUDIT.DB.USER_NAME'] = params.xa_audit_db_user - ranger_hbase_properties['XAAUDIT.DB.PASSWORD'] = params.xa_audit_db_password - ranger_hbase_properties['XAAUDIT.DB.HOSTNAME'] = params.xa_db_host - ranger_hbase_properties['REPOSITORY_NAME'] = params.repo_name - ranger_hbase_properties['XAAUDIT.DB.IS_ENABLED'] = params.db_enabled - - ranger_hbase_properties['XAAUDIT.HDFS.IS_ENABLED'] = params.hdfs_enabled - ranger_hbase_properties['XAAUDIT.HDFS.DESTINATION_DIRECTORY'] = params.hdfs_dest_dir - ranger_hbase_properties['XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY'] = params.hdfs_buffer_dir - ranger_hbase_properties['XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY'] = params.hdfs_archive_dir - ranger_hbase_properties['XAAUDIT.HDFS.DESTINTATION_FILE'] = params.hdfs_dest_file - ranger_hbase_properties['XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS'] = params.hdfs_dest_flush_int_sec - ranger_hbase_properties['XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS'] = params.hdfs_dest_rollover_int_sec - ranger_hbase_properties['XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS'] = params.hdfs_dest_open_retry_int_sec - ranger_hbase_properties['XAAUDIT.HDFS.LOCAL_BUFFER_FILE'] = params.hdfs_buffer_file - ranger_hbase_properties['XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS'] = params.hdfs_buffer_flush_int_sec - ranger_hbase_properties['XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS'] = params.hdfs_buffer_rollover_int_sec - ranger_hbase_properties['XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT'] = params.hdfs_archive_max_file_count - - ranger_hbase_properties['SSL_KEYSTORE_FILE_PATH'] = params.ssl_keystore_file - ranger_hbase_properties['SSL_KEYSTORE_PASSWORD'] = params.ssl_keystore_password - ranger_hbase_properties['SSL_TRUSTSTORE_FILE_PATH'] = params.ssl_truststore_file - ranger_hbase_properties['SSL_TRUSTSTORE_PASSWORD'] = params.ssl_truststore_password - if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.3') >= 0: - ranger_hbase_properties['XAAUDIT.SOLR.IS_ENABLED'] = str(params.solr_enabled).lower() - ranger_hbase_properties['XAAUDIT.SOLR.MAX_QUEUE_SIZE'] = params.solr_max_queue_size - ranger_hbase_properties['XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS'] = params.solr_max_flush_interval - ranger_hbase_properties['XAAUDIT.SOLR.SOLR_URL'] = params.solr_url - - ranger_hbase_properties['UPDATE_XAPOLICIES_ON_GRANT_REVOKE'] = params.grant_revoke - - return ranger_hbase_properties - -def hbase_repo_properties(): - import params - - config_dict = dict() - config_dict['username'] = params.repo_config_username - config_dict['password'] = params.repo_config_password - config_dict['hadoop.security.authentication'] = params.hadoop_security_authentication - config_dict['hbase.security.authentication'] = params.hbase_security_authentication - config_dict['hbase.zookeeper.property.clientPort'] = params.hbase_zookeeper_property_clientPort - config_dict['hbase.zookeeper.quorum'] = params.hbase_zookeeoer_quorum - config_dict['zookeeper.znode.parent'] = params.zookeeper_znode_parent - config_dict['commonNameForCertificate'] = params.common_name_for_certificate - - if params.security_enabled: - config_dict['hbase.master.kerberos.principal'] = params.master_jaas_princ + cmd = ('disable-hbase-plugin.sh',) + + cmd_env = {'JAVA_HOME': params.java64_home, 'PWD': format('/usr/hdp/{hdp_version}/ranger-hbase-plugin'), 'PATH': format('/usr/hdp/{hdp_version}/ranger-hbase-plugin')} + + Execute(cmd, + environment=cmd_env, + logoutput=True, + sudo=True, + ) else: - config_dict['hbase.master.kerberos.principal'] = '' - - repo= dict() - repo['isActive'] = "true" - repo['config'] = json.dumps(config_dict) - repo['description'] = "hbase repo" - repo['name'] = params.repo_name - repo['repositoryType'] = "hbase" - repo['assetType'] = '2' - - data = json.dumps(repo) - - return data + Logger.info('Ranger admin not installed') \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/8d8b5280/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/configuration/ranger-hbase-plugin-properties.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/configuration/ranger-hbase-plugin-properties.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/configuration/ranger-hbase-plugin-properties.xml index f09d2d1..f60d06f 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/configuration/ranger-hbase-plugin-properties.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/configuration/ranger-hbase-plugin-properties.xml @@ -158,5 +158,53 @@ true - + + + POLICY_MGR_URL + {{policymgr_mgr_url}} + Policy Manager url + + + + SQL_CONNECTOR_JAR + {{sql_connector_jar}} + Location of DB client library (please check the location of the jar file) + + + + XAAUDIT.DB.FLAVOUR + {{xa_audit_db_flavor}} + The database type to be used (mysql/oracle) + + + + XAAUDIT.DB.DATABASE_NAME + {{xa_audit_db_name}} + Audit database name + + + + XAAUDIT.DB.USER_NAME + {{xa_audit_db_user}} + Audit database user + + + + XAAUDIT.DB.PASSWORD + {{xa_audit_db_password}} + Audit database password + + + + XAAUDIT.DB.HOSTNAME + {{xa_db_host}} + Audit database password + + + + REPOSITORY_NAME + {{repo_name}} + Ranger repository name + + \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/8d8b5280/ambari-server/src/test/python/stacks/2.0.6/configs/client-upgrade.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/client-upgrade.json b/ambari-server/src/test/python/stacks/2.0.6/configs/client-upgrade.json index 5aba635..956baad 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/configs/client-upgrade.json +++ b/ambari-server/src/test/python/stacks/2.0.6/configs/client-upgrade.json @@ -562,6 +562,43 @@ "smokeuser": "ambari-qa", "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/" + }, + "ranger-hbase-plugin-properties": { + "POLICY_MGR_URL": "{{policymgr_mgr_url}}", + "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900", + "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%", + "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit", + "common.name.for.certificate": "-", + "XAAUDIT.HDFS.IS_ENABLED": "false", + "SQL_CONNECTOR_JAR": "{{sql_connector_jar}}", + "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log", + "ranger-hbase-plugin-enabled": "Yes", + "REPOSITORY_NAME": "{{repo_name}}", + "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword", + "XAAUDIT.DB.IS_ENABLED": "true", + "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600", + "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits", + "XAAUDIT.DB.DATABASE_NAME": "{{xa_audit_db_name}}", + "XAAUDIT.DB.HOSTNAME": "{{xa_db_host}}", + "XAAUDIT.SOLR.IS_ENABLED": "false", + "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks", + "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60", + "XAAUDIT.DB.USER_NAME": "{{xa_audit_db_user}}", + "policy_user": "ambari-qa", + "UPDATE_XAPOLICIES_ON_GRANT_REVOKE": "true", + "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log", + "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400", + "XAAUDIT.DB.PASSWORD": "{{xa_audit_db_password}}", + "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10", + "SSL_TRUSTSTORE_PASSWORD": "changeit", + "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive", + "REPOSITORY_CONFIG_USERNAME": "hbase", + "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000", + "XAAUDIT.DB.FLAVOUR": "{{xa_audit_db_flavor}}", + "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60", + "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks", + "REPOSITORY_CONFIG_PASSWORD": "hbase", + "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1" } }, "configurationTags": { http://git-wip-us.apache.org/repos/asf/ambari/blob/8d8b5280/ambari-server/src/test/python/stacks/2.0.6/configs/default.hbasedecom.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/default.hbasedecom.json b/ambari-server/src/test/python/stacks/2.0.6/configs/default.hbasedecom.json index e40c3c4..e8cef10 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/configs/default.hbasedecom.json +++ b/ambari-server/src/test/python/stacks/2.0.6/configs/default.hbasedecom.json @@ -541,6 +541,43 @@ }, "sqoop-env": { "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\"" + }, + "ranger-hbase-plugin-properties": { + "POLICY_MGR_URL": "{{policymgr_mgr_url}}", + "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900", + "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%", + "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit", + "common.name.for.certificate": "-", + "XAAUDIT.HDFS.IS_ENABLED": "false", + "SQL_CONNECTOR_JAR": "{{sql_connector_jar}}", + "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log", + "ranger-hbase-plugin-enabled": "Yes", + "REPOSITORY_NAME": "{{repo_name}}", + "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword", + "XAAUDIT.DB.IS_ENABLED": "true", + "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600", + "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits", + "XAAUDIT.DB.DATABASE_NAME": "{{xa_audit_db_name}}", + "XAAUDIT.DB.HOSTNAME": "{{xa_db_host}}", + "XAAUDIT.SOLR.IS_ENABLED": "false", + "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks", + "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60", + "XAAUDIT.DB.USER_NAME": "{{xa_audit_db_user}}", + "policy_user": "ambari-qa", + "UPDATE_XAPOLICIES_ON_GRANT_REVOKE": "true", + "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log", + "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400", + "XAAUDIT.DB.PASSWORD": "{{xa_audit_db_password}}", + "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10", + "SSL_TRUSTSTORE_PASSWORD": "changeit", + "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive", + "REPOSITORY_CONFIG_USERNAME": "hbase", + "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000", + "XAAUDIT.DB.FLAVOUR": "{{xa_audit_db_flavor}}", + "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60", + "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks", + "REPOSITORY_CONFIG_PASSWORD": "hbase", + "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1" } }, "configuration_attributes": { http://git-wip-us.apache.org/repos/asf/ambari/blob/8d8b5280/ambari-server/src/test/python/stacks/2.0.6/configs/default.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/default.json b/ambari-server/src/test/python/stacks/2.0.6/configs/default.json index 8ed0af1..90cae08 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/configs/default.json +++ b/ambari-server/src/test/python/stacks/2.0.6/configs/default.json @@ -608,6 +608,43 @@ }, "flume-log4j": { "content": "log4jproperties\nline2" + }, + "ranger-hbase-plugin-properties": { + "POLICY_MGR_URL": "{{policymgr_mgr_url}}", + "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900", + "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%", + "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit", + "common.name.for.certificate": "-", + "XAAUDIT.HDFS.IS_ENABLED": "false", + "SQL_CONNECTOR_JAR": "{{sql_connector_jar}}", + "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log", + "ranger-hbase-plugin-enabled": "Yes", + "REPOSITORY_NAME": "{{repo_name}}", + "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword", + "XAAUDIT.DB.IS_ENABLED": "true", + "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600", + "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits", + "XAAUDIT.DB.DATABASE_NAME": "{{xa_audit_db_name}}", + "XAAUDIT.DB.HOSTNAME": "{{xa_db_host}}", + "XAAUDIT.SOLR.IS_ENABLED": "false", + "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks", + "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60", + "XAAUDIT.DB.USER_NAME": "{{xa_audit_db_user}}", + "policy_user": "ambari-qa", + "UPDATE_XAPOLICIES_ON_GRANT_REVOKE": "true", + "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log", + "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400", + "XAAUDIT.DB.PASSWORD": "{{xa_audit_db_password}}", + "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10", + "SSL_TRUSTSTORE_PASSWORD": "changeit", + "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive", + "REPOSITORY_CONFIG_USERNAME": "hbase", + "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000", + "XAAUDIT.DB.FLAVOUR": "{{xa_audit_db_flavor}}", + "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60", + "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks", + "REPOSITORY_CONFIG_PASSWORD": "hbase", + "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1" } }, "configuration_attributes": { @@ -736,7 +773,7 @@ }, "pig-properties": { "tag": "version1" - } + } }, "commandId": "7-1", "clusterHostInfo": { http://git-wip-us.apache.org/repos/asf/ambari/blob/8d8b5280/ambari-server/src/test/python/stacks/2.0.6/configs/hbase-2.2.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/hbase-2.2.json b/ambari-server/src/test/python/stacks/2.0.6/configs/hbase-2.2.json index 4241330..fa48c6d 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/configs/hbase-2.2.json +++ b/ambari-server/src/test/python/stacks/2.0.6/configs/hbase-2.2.json @@ -583,6 +583,43 @@ "oozie_tar_source": "/usr/hdp/current/oozie-client/oozie-sharelib.tar.gz", "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/" + }, +"ranger-hbase-plugin-properties": { + "POLICY_MGR_URL": "{{policymgr_mgr_url}}", + "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900", + "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%", + "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit", + "common.name.for.certificate": "-", + "XAAUDIT.HDFS.IS_ENABLED": "false", + "SQL_CONNECTOR_JAR": "{{sql_connector_jar}}", + "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log", + "ranger-hbase-plugin-enabled": "Yes", + "REPOSITORY_NAME": "{{repo_name}}", + "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword", + "XAAUDIT.DB.IS_ENABLED": "true", + "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600", + "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits", + "XAAUDIT.DB.DATABASE_NAME": "{{xa_audit_db_name}}", + "XAAUDIT.DB.HOSTNAME": "{{xa_db_host}}", + "XAAUDIT.SOLR.IS_ENABLED": "false", + "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks", + "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60", + "XAAUDIT.DB.USER_NAME": "{{xa_audit_db_user}}", + "policy_user": "ambari-qa", + "UPDATE_XAPOLICIES_ON_GRANT_REVOKE": "true", + "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log", + "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400", + "XAAUDIT.DB.PASSWORD": "{{xa_audit_db_password}}", + "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10", + "SSL_TRUSTSTORE_PASSWORD": "changeit", + "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive", + "REPOSITORY_CONFIG_USERNAME": "hbase", + "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000", + "XAAUDIT.DB.FLAVOUR": "{{xa_audit_db_flavor}}", + "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60", + "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks", + "REPOSITORY_CONFIG_PASSWORD": "hbase", + "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1" } }, "configurationTags": { http://git-wip-us.apache.org/repos/asf/ambari/blob/8d8b5280/ambari-server/src/test/python/stacks/2.0.6/configs/hbase-check-2.2.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/hbase-check-2.2.json b/ambari-server/src/test/python/stacks/2.0.6/configs/hbase-check-2.2.json index 318b5f4..88017b5 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/configs/hbase-check-2.2.json +++ b/ambari-server/src/test/python/stacks/2.0.6/configs/hbase-check-2.2.json @@ -529,9 +529,43 @@ "hbase_regionserver_heapsize": "1024m", "hbase_log_dir": "/var/log/hbase" }, - "ranger-hbase-plugin-properties": { - "ranger-hbase-plugin-enabled":"yes" - }, + "ranger-hbase-plugin-properties": { + "POLICY_MGR_URL": "{{policymgr_mgr_url}}", + "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900", + "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%", + "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit", + "common.name.for.certificate": "-", + "XAAUDIT.HDFS.IS_ENABLED": "false", + "SQL_CONNECTOR_JAR": "{{sql_connector_jar}}", + "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log", + "ranger-hbase-plugin-enabled": "Yes", + "REPOSITORY_NAME": "{{repo_name}}", + "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword", + "XAAUDIT.DB.IS_ENABLED": "true", + "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600", + "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits", + "XAAUDIT.DB.DATABASE_NAME": "{{xa_audit_db_name}}", + "XAAUDIT.DB.HOSTNAME": "{{xa_db_host}}", + "XAAUDIT.SOLR.IS_ENABLED": "false", + "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks", + "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60", + "XAAUDIT.DB.USER_NAME": "{{xa_audit_db_user}}", + "policy_user": "ambari-qa", + "UPDATE_XAPOLICIES_ON_GRANT_REVOKE": "true", + "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log", + "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400", + "XAAUDIT.DB.PASSWORD": "{{xa_audit_db_password}}", + "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10", + "SSL_TRUSTSTORE_PASSWORD": "changeit", + "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive", + "REPOSITORY_CONFIG_USERNAME": "hbase", + "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000", + "XAAUDIT.DB.FLAVOUR": "{{xa_audit_db_flavor}}", + "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60", + "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks", + "REPOSITORY_CONFIG_PASSWORD": "hbase", + "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1" + }, "ganglia-env": { "gmond_user": "nobody", "ganglia_runtime_dir": "/var/run/ganglia/hdp", http://git-wip-us.apache.org/repos/asf/ambari/blob/8d8b5280/ambari-server/src/test/python/stacks/2.0.6/configs/hbase-preupgrade.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/hbase-preupgrade.json b/ambari-server/src/test/python/stacks/2.0.6/configs/hbase-preupgrade.json index 6a06998..fc798c2 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/configs/hbase-preupgrade.json +++ b/ambari-server/src/test/python/stacks/2.0.6/configs/hbase-preupgrade.json @@ -82,10 +82,28 @@ "content": "\n# Set environment variables here.\n\n# The java implementation to use. Java 1.6 required.\nexport JAVA_HOME={{java64_home}}\n\n# HBase Configuration directory\nexport HBASE_CONF_DIR=${HBASE_CONF_DIR:-{{hbase_conf_dir}}}\n\n# Extra Java CLASSPATH elements. Optional.\nexport HBASE_CLASSPATH=${HBASE_CLASSPATH}\n\n\n# The maximum amount of heap to use, in MB. Default is 1000.\n# export HBASE_HEAPSIZE=1000\n\n# Extra Java runtime options.\n# Below are what we set by default. May only work with SUN JVM.\n# For more on why as well as other possible settings,\n# see http://wiki.apache.org/hadoop/PerformanceTuning\nexport SERVER_GC_OPTS=\"-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:{{log_dir}}/gc.log-`date +'%Y%m%d%H%M'`\"\n# Uncomment below to enable java garbage collection logging.\n# export HBASE_OPTS=\"$HBASE_OPTS -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:$HBASE_HOME/logs/gc-hbase.log\"\n\n# Uncomment and adjust to enable JMX exporting\n# See jmxremote.password and jmxremote.access in $JRE_HOME/lib/management to configure remote password access.\n# More details at: http://java.sun.com/javase/6/docs/technotes/guides/management/agent.html\n#\n# export HBASE_JMX_BASE=\"-Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false\"\n# If you want to configure BucketCache, specify '-XX: MaxDirectMemorySize=' with proper direct memory size\n# export HBASE_THRIFT_OPTS=\"$HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10103\"\n# export HBASE_ZOOKEEPER_OPTS=\"$HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10104\"\n\n# File naming hosts on which HRegionServers will run. $HBASE_HOME/conf/regionservers by default.\nexport HBASE_REGIONSERVERS=${HBASE_CONF_DIR}/regionservers\n\n# Extra ssh options. Empty by default.\n# export HBASE_SSH_OPTS=\"-o ConnectTimeout=1 -o SendEnv=HBASE_CONF_DIR\"\n\n# Where log files are stored. $HBASE_HOME/logs by default.\nexport HBASE_LOG_DIR={{l og_dir}}\n\n# A string representing this instance of hbase. $USER by default.\n# export HBASE_IDENT_STRING=$USER\n\n# The scheduling priority for daemon processes. See 'man nice'.\n# export HBASE_NICENESS=10\n\n# The directory where pid files are stored. /tmp by default.\nexport HBASE_PID_DIR={{pid_dir}}\n\n# Seconds to sleep between slave commands. Unset by default. This\n# can be useful in large clusters, where, e.g., slave rsyncs can\n# otherwise arrive faster than the master can service them.\n# export HBASE_SLAVE_SLEEP=0.1\n\n# Tell HBase whether it should manage it's own instance of Zookeeper or not.\nexport HBASE_MANAGES_ZK=false\n\n{% if security_enabled %}\nexport HBASE_OPTS=\"$HBASE_OPTS -XX:+UseConcMarkSweepGC -XX:ErrorFile={{log_dir}}/hs_err_pid%p.log -Djava.security.auth.login.config={{client_jaas_config_file}}\"\nexport HBASE_MASTER_OPTS=\"$HBASE_MASTER_OPTS -Xmx{{master_heapsize}} -Djava.security.auth.login.config={{master_jaas_config_file}}\"\nexport HBASE_REGIONSERV ER_OPTS=\"$HBASE_REGIONSERVER_OPTS -Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70 -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}} -Djava.security.auth.login.config={{regionserver_jaas_config_file}}\"\n{% else %}\nexport HBASE_OPTS=\"$HBASE_OPTS -XX:+UseConcMarkSweepGC -XX:ErrorFile={{log_dir}}/hs_err_pid%p.log\"\nexport HBASE_MASTER_OPTS=\"$HBASE_MASTER_OPTS -Xmx{{master_heapsize}}\"\nexport HBASE_REGIONSERVER_OPTS=\"$HBASE_REGIONSERVER_OPTS -Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70 -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}}\"\n{% endif %}", "hbase_regionserver_heapsize": "1024m", "hbase_log_dir": "/var/log/hbase" - }, - "ranger-hbase-plugin-properties": { - "ranger-hbase-plugin-enabled":"yes" - }, + }, + "core-site": { + "io.serializations": "org.apache.hadoop.io.serializer.WritableSerialization", + "proxyuser_group": "users", + "fs.trash.interval": "360", + "ha.failover-controller.active-standby-elector.zk.op.retries": "120", + "hadoop.http.authentication.simple.anonymous.allowed": "true", + "hadoop.security.authentication": "simple", + "io.compression.codecs": "org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec", + "ipc.client.connection.maxidletime": "30000", + "mapreduce.jobtracker.webinterface.trusted": "false", + "hadoop.security.authorization": "false", + "net.topology.script.file.name": "/etc/hadoop/conf/topology_script.py", + "ipc.server.tcpnodelay": "true", + "ipc.client.connect.max.retries": "50", + "hadoop.security.auth_to_local": "\n DEFAULT", + "io.file.buffer.size": "131072", + "hadoop.proxyuser.hdfs.hosts": "*", + "hadoop.proxyuser.hdfs.groups": "*", + "ipc.client.idlethreshold": "8000", + "fs.defaultFS": "hdfs://c6403.org:8020" + }, "cluster-env": { "security_enabled": "false", "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", @@ -104,6 +122,43 @@ "user_group": "hadoop", "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz" + }, + "ranger-hbase-plugin-properties": { + "POLICY_MGR_URL": "{{policymgr_mgr_url}}", + "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900", + "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%", + "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit", + "common.name.for.certificate": "-", + "XAAUDIT.HDFS.IS_ENABLED": "false", + "SQL_CONNECTOR_JAR": "{{sql_connector_jar}}", + "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log", + "ranger-hbase-plugin-enabled": "Yes", + "REPOSITORY_NAME": "{{repo_name}}", + "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword", + "XAAUDIT.DB.IS_ENABLED": "true", + "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600", + "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits", + "XAAUDIT.DB.DATABASE_NAME": "{{xa_audit_db_name}}", + "XAAUDIT.DB.HOSTNAME": "{{xa_db_host}}", + "XAAUDIT.SOLR.IS_ENABLED": "false", + "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks", + "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60", + "XAAUDIT.DB.USER_NAME": "{{xa_audit_db_user}}", + "policy_user": "ambari-qa", + "UPDATE_XAPOLICIES_ON_GRANT_REVOKE": "true", + "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log", + "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400", + "XAAUDIT.DB.PASSWORD": "{{xa_audit_db_password}}", + "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10", + "SSL_TRUSTSTORE_PASSWORD": "changeit", + "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive", + "REPOSITORY_CONFIG_USERNAME": "hbase", + "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000", + "XAAUDIT.DB.FLAVOUR": "{{xa_audit_db_flavor}}", + "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60", + "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks", + "REPOSITORY_CONFIG_PASSWORD": "hbase", + "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1" } }, "commandParams": { http://git-wip-us.apache.org/repos/asf/ambari/blob/8d8b5280/ambari-server/src/test/python/stacks/2.0.6/configs/hbase-rs-2.2.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/hbase-rs-2.2.json b/ambari-server/src/test/python/stacks/2.0.6/configs/hbase-rs-2.2.json index eca6d3b..3109b7a 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/configs/hbase-rs-2.2.json +++ b/ambari-server/src/test/python/stacks/2.0.6/configs/hbase-rs-2.2.json @@ -583,6 +583,43 @@ "oozie_tar_source": "/usr/hdp/current/oozie-client/oozie-sharelib.tar.gz", "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/" + }, + "ranger-hbase-plugin-properties": { + "POLICY_MGR_URL": "{{policymgr_mgr_url}}", + "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900", + "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%", + "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit", + "common.name.for.certificate": "-", + "XAAUDIT.HDFS.IS_ENABLED": "false", + "SQL_CONNECTOR_JAR": "{{sql_connector_jar}}", + "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log", + "ranger-hbase-plugin-enabled": "Yes", + "REPOSITORY_NAME": "{{repo_name}}", + "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword", + "XAAUDIT.DB.IS_ENABLED": "true", + "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600", + "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits", + "XAAUDIT.DB.DATABASE_NAME": "{{xa_audit_db_name}}", + "XAAUDIT.DB.HOSTNAME": "{{xa_db_host}}", + "XAAUDIT.SOLR.IS_ENABLED": "false", + "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks", + "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60", + "XAAUDIT.DB.USER_NAME": "{{xa_audit_db_user}}", + "policy_user": "ambari-qa", + "UPDATE_XAPOLICIES_ON_GRANT_REVOKE": "true", + "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log", + "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400", + "XAAUDIT.DB.PASSWORD": "{{xa_audit_db_password}}", + "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10", + "SSL_TRUSTSTORE_PASSWORD": "changeit", + "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive", + "REPOSITORY_CONFIG_USERNAME": "hbase", + "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000", + "XAAUDIT.DB.FLAVOUR": "{{xa_audit_db_flavor}}", + "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60", + "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks", + "REPOSITORY_CONFIG_PASSWORD": "hbase", + "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1" } }, "configurationTags": { http://git-wip-us.apache.org/repos/asf/ambari/blob/8d8b5280/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json b/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json index 3f4aad9..6cce47c 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json +++ b/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json @@ -624,6 +624,43 @@ }, "oozie-log4j": { "content": "log4jproperties\nline2" + }, + "ranger-hbase-plugin-properties": { + "POLICY_MGR_URL": "{{policymgr_mgr_url}}", + "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900", + "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%", + "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit", + "common.name.for.certificate": "-", + "XAAUDIT.HDFS.IS_ENABLED": "false", + "SQL_CONNECTOR_JAR": "{{sql_connector_jar}}", + "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log", + "ranger-hbase-plugin-enabled": "Yes", + "REPOSITORY_NAME": "{{repo_name}}", + "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword", + "XAAUDIT.DB.IS_ENABLED": "true", + "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600", + "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits", + "XAAUDIT.DB.DATABASE_NAME": "{{xa_audit_db_name}}", + "XAAUDIT.DB.HOSTNAME": "{{xa_db_host}}", + "XAAUDIT.SOLR.IS_ENABLED": "false", + "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks", + "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60", + "XAAUDIT.DB.USER_NAME": "{{xa_audit_db_user}}", + "policy_user": "ambari-qa", + "UPDATE_XAPOLICIES_ON_GRANT_REVOKE": "true", + "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log", + "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400", + "XAAUDIT.DB.PASSWORD": "{{xa_audit_db_password}}", + "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10", + "SSL_TRUSTSTORE_PASSWORD": "changeit", + "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive", + "REPOSITORY_CONFIG_USERNAME": "hbase", + "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000", + "XAAUDIT.DB.FLAVOUR": "{{xa_audit_db_flavor}}", + "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60", + "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks", + "REPOSITORY_CONFIG_PASSWORD": "hbase", + "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1" } }, "configuration_attributes": {