Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 1A960200CF6 for ; Mon, 18 Sep 2017 08:56:49 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id 191221609D8; Mon, 18 Sep 2017 06:56:49 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 8F0A91609E0 for ; Mon, 18 Sep 2017 08:56:46 +0200 (CEST) Received: (qmail 10650 invoked by uid 500); 18 Sep 2017 06:56:45 -0000 Mailing-List: contact commits-help@ambari.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: ambari-dev@ambari.apache.org Delivered-To: mailing list commits@ambari.apache.org Received: (qmail 10341 invoked by uid 99); 18 Sep 2017 06:56:45 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 18 Sep 2017 06:56:45 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 1A35BF5684; Mon, 18 Sep 2017 06:56:44 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: vsairam@apache.org To: commits@ambari.apache.org Date: Mon, 18 Sep 2017 06:56:47 -0000 Message-Id: In-Reply-To: <48049ce91b344e228d12b0cc416b1b35@git.apache.org> References: <48049ce91b344e228d12b0cc416b1b35@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [4/5] ambari git commit: AMBARI-21901.Add 0.7.x stack definition for Zeppelin(Prabhjyot Singh via Venkata Sairam) archived-at: Mon, 18 Sep 2017 06:56:49 -0000 http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py deleted file mode 100644 index 3242f26..0000000 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py +++ /dev/null @@ -1,258 +0,0 @@ -#!/usr/bin/env python -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -import functools -import os -import re -from resource_management.libraries.functions import StackFeature -from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import get_kinit_path -from resource_management.libraries.functions import stack_select -from resource_management.libraries.functions.default import default -from resource_management.libraries.functions.format import format -from resource_management.libraries.functions.get_stack_version import get_stack_version -from resource_management.libraries.functions.stack_features import check_stack_feature -from resource_management.libraries.functions.version import format_stack_version, get_major_version -from resource_management.libraries.resources.hdfs_resource import HdfsResource -from resource_management.libraries.script.script import Script - -def get_port_from_url(address): - if not (address is None): - return address.split(':')[-1] - else: - return address - -def extract_spark_version(spark_home): - try: - with open(spark_home + "/RELEASE") as fline: - return re.search('Spark (\d\.\d).+', fline.readline().rstrip()).group(1) - except: - pass - return None - - -# server configurations -config = Script.get_config() -stack_root = Script.get_stack_root() - -# e.g. /var/lib/ambari-agent/cache/stacks/HDP/2.2/services/zeppelin-stack/package -service_packagedir = os.path.realpath(__file__).split('/scripts')[0] - -zeppelin_dirname = 'zeppelin-server' - -install_dir = os.path.join(stack_root, "current") -executor_mem = config['configurations']['zeppelin-env']['zeppelin.executor.mem'] -executor_instances = config['configurations']['zeppelin-env'][ - 'zeppelin.executor.instances'] - -security_enabled = config['configurations']['cluster-env']['security_enabled'] - -spark_jar_dir = config['configurations']['zeppelin-env']['zeppelin.spark.jar.dir'] -spark_jar = format("{spark_jar_dir}/zeppelin-spark-0.5.5-SNAPSHOT.jar") -setup_view = True -temp_file = config['configurations']['zeppelin-env']['zeppelin.temp.file'] - -spark_home = "" -spark_version = None -spark2_home = "" -spark2_version = None -if 'spark-defaults' in config['configurations']: - spark_home = os.path.join(stack_root, "current", 'spark-client') - spark_version = extract_spark_version(spark_home) -if 'spark2-defaults' in config['configurations']: - spark2_home = os.path.join(stack_root, "current", 'spark2-client') - spark2_version = extract_spark_version(spark2_home) - -# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade -version = default("/commandParams/version", None) -stack_name = default("/hostLevelParams/stack_name", None) - -# params from zeppelin-config -zeppelin_port = str(config['configurations']['zeppelin-config']['zeppelin.server.port']) -zeppelin_interpreter = None -if 'zeppelin.interpreter.group.order' in config['configurations']['zeppelin-config']: - zeppelin_interpreter = str(config['configurations']['zeppelin-config'] - ['zeppelin.interpreter.group.order']).split(",") - -# params from zeppelin-env -zeppelin_user = config['configurations']['zeppelin-env']['zeppelin_user'] -zeppelin_group = config['configurations']['zeppelin-env']['zeppelin_group'] -zeppelin_log_dir = config['configurations']['zeppelin-env']['zeppelin_log_dir'] -zeppelin_pid_dir = config['configurations']['zeppelin-env']['zeppelin_pid_dir'] -zeppelin_log_file = os.path.join(zeppelin_log_dir, 'zeppelin-setup.log') -zeppelin_hdfs_user_dir = format("/user/{zeppelin_user}") - -zeppelin_dir = os.path.join(*[install_dir, zeppelin_dirname]) -conf_dir = "/etc/zeppelin/conf" -external_dependency_conf = "/etc/zeppelin/conf/external-dependency-conf" -notebook_dir = os.path.join(*[install_dir, zeppelin_dirname, 'notebook']) - -# zeppelin-env.sh -zeppelin_env_content = config['configurations']['zeppelin-env']['zeppelin_env_content'] - -# shiro.ini -shiro_ini_content = config['configurations']['zeppelin-shiro-ini']['shiro_ini_content'] - -# log4j.properties -log4j_properties_content = config['configurations']['zeppelin-log4j-properties']['log4j_properties_content'] - -# detect configs -master_configs = config['clusterHostInfo'] -java64_home = config['hostLevelParams']['java_home'] -ambari_host = str(master_configs['ambari_server_host'][0]) -zeppelin_host = str(master_configs['zeppelin_master_hosts'][0]) -ui_ssl_enabled = config['configurations']['zeppelin-config']['zeppelin.ssl'] - -# detect HS2 details, if installed - -hive_server_host = None -hive_metastore_host = '0.0.0.0' -hive_metastore_port = None -hive_server_port = None -hive_zookeeper_quorum = None -hive_server2_support_dynamic_service_discovery = None -is_hive_installed = False -hive_zookeeper_namespace = None -hive_interactive_zookeeper_namespace = None - -if 'hive_server_host' in master_configs and len(master_configs['hive_server_host']) != 0: - is_hive_installed = True - spark_hive_properties = { - 'hive.metastore.uris': default('/configurations/hive-site/hive.metastore.uris', '') - } - hive_server_host = str(master_configs['hive_server_host'][0]) - hive_metastore_host = str(master_configs['hive_metastore_host'][0]) - hive_metastore_port = str( - get_port_from_url(default('/configurations/hive-site/hive.metastore.uris', ''))) - hive_server_port = str(config['configurations']['hive-site']['hive.server2.thrift.http.port']) - hive_zookeeper_quorum = config['configurations']['hive-site']['hive.zookeeper.quorum'] - hive_zookeeper_namespace = config['configurations']['hive-site']['hive.server2.zookeeper.namespace'] - hive_server2_support_dynamic_service_discovery = config['configurations']['hive-site']['hive.server2.support.dynamic.service.discovery'] - -hive_server_interactive_hosts = None -if 'hive_server_interactive_hosts' in master_configs and len(master_configs['hive_server_interactive_hosts']) != 0: - hive_server_interactive_hosts = str(master_configs['hive_server_interactive_hosts'][0]) - hive_interactive_zookeeper_namespace = config['configurations']['hive-interactive-site']['hive.server2.zookeeper.namespace'] - hive_server_port = str(config['configurations']['hive-site']['hive.server2.thrift.http.port']) - hive_zookeeper_quorum = config['configurations']['hive-site']['hive.zookeeper.quorum'] - hive_server2_support_dynamic_service_discovery = config['configurations']['hive-site']['hive.server2.support.dynamic.service.discovery'] - -spark_thrift_server_hosts = None -spark_hive_thrift_port = None -spark_hive_principal = None -if 'spark_thriftserver_hosts' in master_configs and len(master_configs['spark_thriftserver_hosts']) != 0: - spark_thrift_server_hosts = str(master_configs['spark_thriftserver_hosts'][0]) - if config['configurations']['spark-hive-site-override']: - spark_hive_thrift_port = config['configurations']['spark-hive-site-override']['hive.server2.thrift.port'] - if config['configurations']['spark-thrift-sparkconf'] and \ - 'spark.sql.hive.hiveserver2.jdbc.url.principal' in config['configurations']['spark-thrift-sparkconf']: - spark_hive_principal = config['configurations']['spark-thrift-sparkconf']['spark.sql.hive.hiveserver2.jdbc.url.principal'] - -spark2_thrift_server_hosts = None -spark2_hive_thrift_port = None -spark2_hive_principal = None -if 'spark2_thriftserver_hosts' in master_configs and len(master_configs['spark2_thriftserver_hosts']) != 0: - spark2_thrift_server_hosts = str(master_configs['spark2_thriftserver_hosts'][0]) - if config['configurations']['spark2-hive-site-override']: - spark2_hive_thrift_port = config['configurations']['spark2-hive-site-override']['hive.server2.thrift.port'] - if config['configurations']['spark2-thrift-sparkconf'] and \ - 'spark.sql.hive.hiveserver2.jdbc.url.principal' in config['configurations']['spark2-thrift-sparkconf']: - spark2_hive_principal = config['configurations']['spark2-thrift-sparkconf']['spark.sql.hive.hiveserver2.jdbc.url.principal'] - - -# detect hbase details if installed -zookeeper_znode_parent = None -hbase_zookeeper_quorum = None -is_hbase_installed = False -if 'hbase_master_hosts' in master_configs and 'hbase-site' in config['configurations']: - is_hbase_installed = True - zookeeper_znode_parent = config['configurations']['hbase-site']['zookeeper.znode.parent'] - hbase_zookeeper_quorum = config['configurations']['hbase-site']['hbase.zookeeper.quorum'] - -# detect spark queue -if 'spark-defaults' in config['configurations'] and 'spark.yarn.queue' in config['configurations']['spark-defaults']: - spark_queue = config['configurations']['spark-defaults']['spark.yarn.queue'] -elif 'spark2-defaults' in config['configurations'] and 'spark.yarn.queue' in config['configurations']['spark2-defaults']: - spark_queue = config['configurations']['spark2-defaults']['spark.yarn.queue'] -else: - spark_queue = 'default' - -zeppelin_kerberos_keytab = config['configurations']['zeppelin-env']['zeppelin.server.kerberos.keytab'] -zeppelin_kerberos_principal = config['configurations']['zeppelin-env']['zeppelin.server.kerberos.principal'] -if 'zeppelin.interpreter.config.upgrade' in config['configurations']['zeppelin-config']: - zeppelin_interpreter_config_upgrade = config['configurations']['zeppelin-config']['zeppelin.interpreter.config.upgrade'] -else: - zeppelin_interpreter_config_upgrade = False - -# e.g. 2.3 -stack_version_unformatted = config['hostLevelParams']['stack_version'] - -# e.g. 2.3.0.0 -stack_version_formatted = format_stack_version(stack_version_unformatted) -major_stack_version = get_major_version(stack_version_formatted) - -# e.g. 2.3.0.0-2130 -full_stack_version = default("/commandParams/version", None) - -spark_client_version = get_stack_version('spark-client') - -hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", []) -livy_hosts = default("/clusterHostInfo/livy_server_hosts", []) -livy2_hosts = default("/clusterHostInfo/livy2_server_hosts", []) - -livy_livyserver_host = None -livy_livyserver_port = None -livy2_livyserver_host = None -livy2_livyserver_port = None -if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY, stack_version_formatted) and \ - len(livy_hosts) > 0: - livy_livyserver_host = str(livy_hosts[0]) - livy_livyserver_port = config['configurations']['livy-conf']['livy.server.port'] - -if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY2, stack_version_formatted) and \ - len(livy2_hosts) > 0: - livy2_livyserver_host = str(livy2_hosts[0]) - livy2_livyserver_port = config['configurations']['livy2-conf']['livy.server.port'] - -hdfs_user = config['configurations']['hadoop-env']['hdfs_user'] -security_enabled = config['configurations']['cluster-env']['security_enabled'] -hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab'] -kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None)) -hadoop_bin_dir = stack_select.get_hadoop_dir("bin") -hadoop_conf_dir = conf_select.get_hadoop_conf_dir() -hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name'] -hdfs_site = config['configurations']['hdfs-site'] -default_fs = config['configurations']['core-site']['fs.defaultFS'] - -# create partial functions with common arguments for every HdfsResource call -# to create hdfs directory we need to call params.HdfsResource in code -HdfsResource = functools.partial( - HdfsResource, - user=hdfs_user, - hdfs_resource_ignore_file="/var/lib/ambari-agent/data/.hdfs_resource_ignore", - security_enabled=security_enabled, - keytab=hdfs_user_keytab, - kinit_path_local=kinit_path_local, - hadoop_bin_dir=hadoop_bin_dir, - hadoop_conf_dir=hadoop_conf_dir, - principal_name=hdfs_principal_name, - hdfs_site=hdfs_site, - default_fs=default_fs -) http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/service_check.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/service_check.py deleted file mode 100644 index bd7c855..0000000 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/service_check.py +++ /dev/null @@ -1,39 +0,0 @@ -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agree in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" - -from resource_management.libraries.script.script import Script -from resource_management.libraries.functions.format import format -from resource_management.core.resources.system import Execute - -class ZeppelinServiceCheck(Script): - def service_check(self, env): - import params - env.set_params(params) - - if params.security_enabled: - zeppelin_kinit_cmd = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal}; ") - Execute(zeppelin_kinit_cmd, user=params.zeppelin_user) - - scheme = "https" if params.ui_ssl_enabled else "http" - Execute(format("curl -s -o /dev/null -w'%{{http_code}}' --negotiate -u: -k {scheme}://{zeppelin_host}:{zeppelin_port} | grep 200"), - tries = 10, - try_sleep=3, - logoutput=True) - -if __name__ == "__main__": - ZeppelinServiceCheck().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/spark2_config_template.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/spark2_config_template.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/spark2_config_template.py deleted file mode 100644 index 28a63c6..0000000 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/spark2_config_template.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env python -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -template = ''' -{ - "id": "2C4U48MY3_spark2", - "name": "spark2", - "group": "spark", - "properties": { - "spark.executor.memory": "", - "args": "", - "zeppelin.spark.printREPLOutput": "true", - "spark.cores.max": "", - "zeppelin.dep.additionalRemoteRepository": "spark-packages,http://dl.bintray.com/spark-packages/maven,false;", - "zeppelin.spark.importImplicit": "true", - "zeppelin.spark.sql.stacktrace": "false", - "zeppelin.spark.concurrentSQL": "false", - "zeppelin.spark.useHiveContext": "true", - "zeppelin.pyspark.python": "python", - "zeppelin.dep.localrepo": "local-repo", - "zeppelin.R.knitr": "true", - "zeppelin.spark.maxResult": "1000", - "master": "local[*]", - "spark.app.name": "Zeppelin", - "zeppelin.R.image.width": "100%", - "zeppelin.R.render.options": "out.format \u003d \u0027html\u0027, comment \u003d NA, echo \u003d FALSE, results \u003d \u0027asis\u0027, message \u003d F, warning \u003d F", - "zeppelin.R.cmd": "R" - }, - "status": "READY", - "interpreterGroup": [ - { - "name": "spark", - "class": "org.apache.zeppelin.spark.SparkInterpreter", - "defaultInterpreter": true - }, - { - "name": "sql", - "class": "org.apache.zeppelin.spark.SparkSqlInterpreter", - "defaultInterpreter": false - }, - { - "name": "dep", - "class": "org.apache.zeppelin.spark.DepInterpreter", - "defaultInterpreter": false - }, - { - "name": "pyspark", - "class": "org.apache.zeppelin.spark.PySparkInterpreter", - "defaultInterpreter": false - }, - { - "name": "r", - "class": "org.apache.zeppelin.spark.SparkRInterpreter", - "defaultInterpreter": false - } - ], - "dependencies": [], - "option": { - "remote": true, - "port": -1, - "perNoteSession": false, - "perNoteProcess": false, - "isExistingProcess": false, - "setPermission": false - } -} -''' \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/status_params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/status_params.py deleted file mode 100644 index 35360c6..0000000 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/status_params.py +++ /dev/null @@ -1,29 +0,0 @@ -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -Ambari Agent - -""" - -from resource_management.libraries.script import Script - -config = Script.get_config() - -zeppelin_pid_dir = config['configurations']['zeppelin-env']['zeppelin_pid_dir'] -zeppelin_user = config['configurations']['zeppelin-env']['zeppelin_user'] -zeppelin_group = config['configurations']['zeppelin-env']['zeppelin_group'] -zeppelin_log_dir = config['configurations']['zeppelin-env']['zeppelin_log_dir'] http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/templates/input.config-zeppelin.json.j2 ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/templates/input.config-zeppelin.json.j2 b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/templates/input.config-zeppelin.json.j2 deleted file mode 100644 index 2b373d5..0000000 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/templates/input.config-zeppelin.json.j2 +++ /dev/null @@ -1,48 +0,0 @@ -{# - # Licensed to the Apache Software Foundation (ASF) under one - # or more contributor license agreements. See the NOTICE file - # distributed with this work for additional information - # regarding copyright ownership. The ASF licenses this file - # to you under the Apache License, Version 2.0 (the - # "License"); you may not use this file except in compliance - # with the License. You may obtain a copy of the License at - # - # http://www.apache.org/licenses/LICENSE-2.0 - # - # Unless required by applicable law or agreed to in writing, software - # distributed under the License is distributed on an "AS IS" BASIS, - # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - # See the License for the specific language governing permissions and - # limitations under the License. - #} -{ - "input":[ - { - "type":"zeppelin", - "rowtype":"service", - "path":"{{default('/configurations/zeppelin-env/zeppelin_log_dir', '/var/log/zeppelin')}}/zeppelin-zeppelin-*.log" - } - ], - "filter":[ - { - "filter":"grok", - "conditions":{ - "fields":{ - "type":[ - "zeppelin" - ] - } - }, - "log4j_format":"", - "multiline_pattern":"^(%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{TIMESTAMP_ISO8601:logtime}\\])", - "message_pattern":"(?m)^%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}\\(\\{{"{"}}%{DATA:thread_name}\\{{"}"}}%{SPACE}%{JAVAFILE:file}\\[%{JAVAMETHOD:method}\\]:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}", - "post_map_values":{ - "logtime":{ - "map_date":{ - "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS" - } - } - } - } - ] -} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/quicklinks/quicklinks.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/quicklinks/quicklinks.json b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/quicklinks/quicklinks.json deleted file mode 100644 index c1d8491..0000000 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/quicklinks/quicklinks.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "name": "default", - "description": "default quick links configuration", - "configuration": { - "protocol": - { - "type":"https", - "checks":[ - { - "property":"zeppelin.ssl", - "desired":"true", - "site":"zeppelin-config" - } - ] - }, - - "links": [ - { - "name": "zeppelin_ui", - "label": "Zeppelin UI", - "requires_user_name": "false", - "component_name": "ZEPPELIN_MASTER", - "url":"%@://%@:%@/", - "port":{ - "http_property": "zeppelin.server.port", - "http_default_port": "9995", - "https_property": "zeppelin.server.port", - "https_default_port": "9995", - "regex": "^(\\d+)$", - "site": "zeppelin-config" - } - } - ] - } -} http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/role_command_order.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/role_command_order.json b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/role_command_order.json deleted file mode 100644 index 3b7d2d0..0000000 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/role_command_order.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "general_deps" : { - "_comment" : "dependencies for ZEPPELIN", - "ZEPPELIN_MASTER-START" : ["NAMENODE-START"], - "ZEPPELIN_SERVICE_CHECK-SERVICE_CHECK" : ["ZEPPELIN_MASTER-START"] - } -} http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/alerts.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/alerts.json b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/alerts.json deleted file mode 100644 index 8e9b6e7..0000000 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/alerts.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "ZEPPELIN": { - "service": [], - "ZEPPELIN_MASTER": [ - { - "name": "zeppelin_server_status", - "label": "Zeppelin Server Status", - "description": "This host-level alert is triggered if the Zeppelin server cannot be determined to be up and responding to client requests.", - "interval": 1, - "scope": "ANY", - "source": { - "type": "SCRIPT", - "path": "ZEPPELIN/0.6.0.2.5/package/scripts/alert_check_zeppelin.py" - } - } - ] - } -} http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/configuration/zeppelin-config.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/configuration/zeppelin-config.xml b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/configuration/zeppelin-config.xml deleted file mode 100644 index ca6b295..0000000 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/configuration/zeppelin-config.xml +++ /dev/null @@ -1,208 +0,0 @@ - - - - - - - zeppelin.server.addr - 0.0.0.0 - Server address - - - - zeppelin.server.port - 9995 - Server port.The subsequent port (e.g. 9996) should also be open as it will be - used by the web socket - - - - - zeppelin.server.ssl.port - 9995 - Server ssl port. (used when ssl property is set to true) - - - - - zeppelin.notebook.dir - notebook - notebook persist - - - - zeppelin.notebook.homescreen - - id of notebook to be displayed in homescreen. e.g.) 2A94M5J1Z Empty value - displays default home screen - - - - - zeppelin.notebook.homescreen.hide - false - hide homescreen notebook from list when this value set to true - - - - zeppelin.notebook.s3.user - user - user name for s3 folder structure. If S3 is used to store the notebooks, it is - necessary to use the following folder structure bucketname/username/notebook/ - - - - - zeppelin.notebook.s3.bucket - zeppelin - bucket name for notebook storage. If S3 is used to store the notebooks, it is - necessary to use the following folder structure bucketname/username/notebook/ - - - - - zeppelin.notebook.storage - org.apache.zeppelin.notebook.repo.HdfsNotebookRepo - notebook persistence layer implementation. If S3 is used, set this to - org.apache.zeppelin.notebook.repo.S3NotebookRepo instead. If S3 is used to store the - notebooks, it is necessary to use the following folder structure - bucketname/username/notebook/ - - - - - zeppelin.interpreter.dir - interpreter - Interpreter implementation base directory - - - - zeppelin.interpreters - org.apache.zeppelin.spark.SparkInterpreter,org.apache.zeppelin.spark.PySparkInterpreter,org.apache.zeppelin.spark.SparkSqlInterpreter,org.apache.zeppelin.spark.DepInterpreter,org.apache.zeppelin.markdown.Markdown,org.apache.zeppelin.angular.AngularInterpreter,org.apache.zeppelin.shell.ShellInterpreter,org.apache.zeppelin.jdbc.JDBCInterpreter,org.apache.zeppelin.phoenix.PhoenixInterpreter,org.apache.zeppelin.livy.LivySparkInterpreter,org.apache.zeppelin.livy.LivyPySparkInterpreter,org.apache.zeppelin.livy.LivySparkRInterpreter,org.apache.zeppelin.livy.LivySparkSQLInterpreter - Comma separated interpreter configurations. First interpreter become a - default - - - - - zeppelin.interpreter.group.order - spark,angular,jdbc,livy,md,sh - Comma separated interpreter configurations. First interpreter become default - - - - - zeppelin.interpreter.connect.timeout - 30000 - Interpreter process connect timeout in msec. - - - - zeppelin.ssl - false - Should SSL be used by the servers? - - - - zeppelin.ssl.client.auth - false - Should client authentication be used for SSL connections? - - - - zeppelin.ssl.keystore.path - conf/keystore - Path to keystore relative to Zeppelin home - - - - zeppelin.ssl.keystore.type - JKS - The format of the given keystore (e.g. JKS or PKCS12) - - - - zeppelin.ssl.keystore.password - change me - Keystore password. Can be obfuscated by the Jetty Password tool - - - - zeppelin.ssl.key.manager.password - change me - Key Manager password. Defaults to keystore password. Can be obfuscated. - - - - - zeppelin.ssl.truststore.path - conf/truststore - Path to truststore relative to Zeppelin home. Defaults to the keystore path - - - - - zeppelin.ssl.truststore.type - JKS - The format of the given truststore (e.g. JKS or PKCS12). Defaults to the same - type as the keystore type - - - - - zeppelin.ssl.truststore.password - change me - Truststore password. Can be obfuscated by the Jetty Password tool. Defaults to - the keystore password - - - - - zeppelin.server.allowed.origins - * - Allowed sources for REST and WebSocket requests (i.e. - http://onehost:8080,http://otherhost.com). If you leave * you are vulnerable to - https://issues.apache.org/jira/browse/ZEPPELIN-173 - - - - - zeppelin.anonymous.allowed - false - Anonymous user allowed by default - - - - zeppelin.notebook.public - false - Make notebook public by default when created, private otherwise - - - - zeppelin.websocket.max.text.message.size - 1024000 - Size in characters of the maximum text message to be received by websocket. Defaults to 1024000 - - - - zeppelin.interpreter.config.upgrade - true - If this is set to true, on every restart of Zeppelin server default interpreter parameters will be reset - - - http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/configuration/zeppelin-env.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/configuration/zeppelin-env.xml b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/configuration/zeppelin-env.xml deleted file mode 100644 index b393def..0000000 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/configuration/zeppelin-env.xml +++ /dev/null @@ -1,186 +0,0 @@ - - - - - - - - zeppelin_pid_dir - /var/run/zeppelin - Dir containing process ID file - - directory - false - true - - - - - zeppelin_user - zeppelin - USER - User zeppelin daemon runs as - - user - false - - - - - zeppelin_group - zeppelin - GROUP - zeppelin group - - user - false - - - - - zeppelin_log_dir - /var/log/zeppelin - Zeppelin Log dir - - - - zeppelin_env_content - This is the jinja template for zeppelin-env.sh file - -# export JAVA_HOME= -export JAVA_HOME={{java64_home}} -# export MASTER= # Spark master url. eg. spark://master_addr:7077. Leave empty if you want to use local mode. -export MASTER=yarn-client -export SPARK_YARN_JAR={{spark_jar}} -# export ZEPPELIN_JAVA_OPTS # Additional jvm options. for example, export ZEPPELIN_JAVA_OPTS="-Dspark.executor.memory=8g -Dspark.cores.max=16" -# export ZEPPELIN_MEM # Zeppelin jvm mem options Default -Xms1024m -Xmx1024m -XX:MaxPermSize=512m -# export ZEPPELIN_INTP_MEM # zeppelin interpreter process jvm mem options. Default -Xms1024m -Xmx1024m -XX:MaxPermSize=512m -# export ZEPPELIN_INTP_JAVA_OPTS # zeppelin interpreter process jvm options. -# export ZEPPELIN_SSL_PORT # ssl port (used when ssl environment variable is set to true) - -# export ZEPPELIN_LOG_DIR # Where log files are stored. PWD by default. -export ZEPPELIN_LOG_DIR={{zeppelin_log_dir}} -# export ZEPPELIN_PID_DIR # The pid files are stored. ${ZEPPELIN_HOME}/run by default. -export ZEPPELIN_PID_DIR={{zeppelin_pid_dir}} -# export ZEPPELIN_WAR_TEMPDIR # The location of jetty temporary directory. -# export ZEPPELIN_NOTEBOOK_DIR # Where notebook saved -# export ZEPPELIN_NOTEBOOK_HOMESCREEN # Id of notebook to be displayed in homescreen. ex) 2A94M5J1Z -# export ZEPPELIN_NOTEBOOK_HOMESCREEN_HIDE # hide homescreen notebook from list when this value set to "true". default "false" -# export ZEPPELIN_NOTEBOOK_S3_BUCKET # Bucket where notebook saved -# export ZEPPELIN_NOTEBOOK_S3_ENDPOINT # Endpoint of the bucket -# export ZEPPELIN_NOTEBOOK_S3_USER # User in bucket where notebook saved. For example bucket/user/notebook/2A94M5J1Z/note.json -# export ZEPPELIN_IDENT_STRING # A string representing this instance of zeppelin. $USER by default. -# export ZEPPELIN_NICENESS # The scheduling priority for daemons. Defaults to 0. -# export ZEPPELIN_INTERPRETER_LOCALREPO # Local repository for interpreter's additional dependency loading -# export ZEPPELIN_NOTEBOOK_STORAGE # Refers to pluggable notebook storage class, can have two classes simultaneously with a sync between them (e.g. local and remote). -# export ZEPPELIN_NOTEBOOK_ONE_WAY_SYNC # If there are multiple notebook storages, should we treat the first one as the only source of truth? -# export ZEPPELIN_NOTEBOOK_PUBLIC # Make notebook public by default when created, private otherwise -export ZEPPELIN_INTP_CLASSPATH_OVERRIDES="{{external_dependency_conf}}" -#### Spark interpreter configuration #### - -## Kerberos ticket refresh setting -## -export KINIT_FAIL_THRESHOLD=5 -export KERBEROS_REFRESH_INTERVAL=1d - -## Use provided spark installation ## -## defining SPARK_HOME makes Zeppelin run spark interpreter process using spark-submit -## -# export SPARK_HOME # (required) When it is defined, load it instead of Zeppelin embedded Spark libraries -# export SPARK_HOME={{spark_home}} -# export SPARK_SUBMIT_OPTIONS # (optional) extra options to pass to spark submit. eg) "--driver-memory 512M --executor-memory 1G". -# export SPARK_APP_NAME # (optional) The name of spark application. - -## Use embedded spark binaries ## -## without SPARK_HOME defined, Zeppelin still able to run spark interpreter process using embedded spark binaries. -## however, it is not encouraged when you can define SPARK_HOME -## -# Options read in YARN client mode -# export HADOOP_CONF_DIR # yarn-site.xml is located in configuration directory in HADOOP_CONF_DIR. -export HADOOP_CONF_DIR=/etc/hadoop/conf -# Pyspark (supported with Spark 1.2.1 and above) -# To configure pyspark, you need to set spark distribution's path to 'spark.home' property in Interpreter setting screen in Zeppelin GUI -# export PYSPARK_PYTHON # path to the python command. must be the same path on the driver(Zeppelin) and all workers. -# export PYTHONPATH - -## Spark interpreter options ## -## -# export ZEPPELIN_SPARK_USEHIVECONTEXT # Use HiveContext instead of SQLContext if set true. true by default. -# export ZEPPELIN_SPARK_CONCURRENTSQL # Execute multiple SQL concurrently if set true. false by default. -# export ZEPPELIN_SPARK_IMPORTIMPLICIT # Import implicits, UDF collection, and sql if set true. true by default. -# export ZEPPELIN_SPARK_MAXRESULT # Max number of Spark SQL result to display. 1000 by default. -# export ZEPPELIN_WEBSOCKET_MAX_TEXT_MESSAGE_SIZE # Size in characters of the maximum text message to be received by websocket. Defaults to 1024000 - - -#### HBase interpreter configuration #### - -## To connect to HBase running on a cluster, either HBASE_HOME or HBASE_CONF_DIR must be set - -# export HBASE_HOME= # (require) Under which HBase scripts and configuration should be -# export HBASE_CONF_DIR= # (optional) Alternatively, configuration directory can be set to point to the directory that has hbase-site.xml - -# export ZEPPELIN_IMPERSONATE_CMD # Optional, when user want to run interpreter as end web user. eg) 'sudo -H -u ${ZEPPELIN_IMPERSONATE_USER} bash -c ' - - - - - - zeppelin.executor.mem - 512m - Executor memory to use (e.g. 512m or 1g) - - - - zeppelin.executor.instances - 2 - Number of executor instances to use (e.g. 2) - - - - zeppelin.spark.jar.dir - /apps/zeppelin - Shared location where zeppelin spark jar will be copied to. Should be accesible - by all cluster nodes - - - - - - zeppelin.server.kerberos.principal - - - true - - - Kerberos principal name for the Zeppelin. - - KERBEROS_PRINCIPAL - - - - zeppelin.server.kerberos.keytab - - - true - - - Location of the kerberos keytab file for the Zeppelin. - - - - http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/configuration/zeppelin-log4j-properties.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/configuration/zeppelin-log4j-properties.xml b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/configuration/zeppelin-log4j-properties.xml deleted file mode 100644 index bca0091..0000000 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/configuration/zeppelin-log4j-properties.xml +++ /dev/null @@ -1,37 +0,0 @@ - - - - - - log4j_properties_content - This is the content for log4j.properties file - -log4j.rootLogger = INFO, dailyfile -log4j.appender.stdout = org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout = org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=%5p [%d] ({%t} %F[%M]:%L) - %m%n -log4j.appender.dailyfile.DatePattern=.yyyy-MM-dd -log4j.appender.dailyfile.Threshold = INFO -log4j.appender.dailyfile = org.apache.log4j.DailyRollingFileAppender -log4j.appender.dailyfile.File = ${zeppelin.log.file} -log4j.appender.dailyfile.layout = org.apache.log4j.PatternLayout -log4j.appender.dailyfile.layout.ConversionPattern=%5p [%d] ({%t} %F[%M]:%L) - %m%n - - - - \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/configuration/zeppelin-shiro-ini.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/configuration/zeppelin-shiro-ini.xml b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/configuration/zeppelin-shiro-ini.xml deleted file mode 100644 index 1ff3d9e..0000000 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/configuration/zeppelin-shiro-ini.xml +++ /dev/null @@ -1,97 +0,0 @@ - - - - - - shiro_ini_content - This is the jinja template for shiro.ini file - -[users] -# List of users with their password allowed to access Zeppelin. -# To use a different strategy (LDAP / Database / ...) check the shiro doc at http://shiro.apache.org/configuration.html#Configuration-INISections -admin = admin, admin -user1 = user1, role1, role2 -user2 = user2, role3 -user3 = user3, role2 - -# Sample LDAP configuration, for user Authentication, currently tested for single Realm -[main] -### A sample for configuring Active Directory Realm -#activeDirectoryRealm = org.apache.zeppelin.realm.ActiveDirectoryGroupRealm -#activeDirectoryRealm.systemUsername = userNameA - -#use either systemPassword or hadoopSecurityCredentialPath, more details in http://zeppelin.apache.org/docs/latest/security/shiroauthentication.html -#activeDirectoryRealm.systemPassword = passwordA -#activeDirectoryRealm.hadoopSecurityCredentialPath = jceks://file/user/zeppelin/zeppelin.jceks -#activeDirectoryRealm.searchBase = CN=Users,DC=SOME_GROUP,DC=COMPANY,DC=COM -#activeDirectoryRealm.url = ldap://ldap.test.com:389 -#activeDirectoryRealm.groupRolesMap = "CN=admin,OU=groups,DC=SOME_GROUP,DC=COMPANY,DC=COM":"admin","CN=finance,OU=groups,DC=SOME_GROUP,DC=COMPANY,DC=COM":"finance","CN=hr,OU=groups,DC=SOME_GROUP,DC=COMPANY,DC=COM":"hr" -#activeDirectoryRealm.authorizationCachingEnabled = false - -### A sample for configuring LDAP Directory Realm -#ldapRealm = org.apache.zeppelin.realm.LdapGroupRealm -## search base for ldap groups (only relevant for LdapGroupRealm): -#ldapRealm.contextFactory.environment[ldap.searchBase] = dc=COMPANY,dc=COM -#ldapRealm.contextFactory.url = ldap://ldap.test.com:389 -#ldapRealm.userDnTemplate = uid={0},ou=Users,dc=COMPANY,dc=COM -#ldapRealm.contextFactory.authenticationMechanism = SIMPLE - -### A sample PAM configuration -#pamRealm=org.apache.zeppelin.realm.PamRealm -#pamRealm.service=sshd - - -sessionManager = org.apache.shiro.web.session.mgt.DefaultWebSessionManager -### If caching of user is required then uncomment below lines -cacheManager = org.apache.shiro.cache.MemoryConstrainedCacheManager -securityManager.cacheManager = $cacheManager - -cookie = org.apache.shiro.web.servlet.SimpleCookie -cookie.name = JSESSIONID -#Uncomment the line below when running Zeppelin-Server in HTTPS mode -#cookie.secure = true -cookie.httpOnly = true -sessionManager.sessionIdCookie = $cookie - -securityManager.sessionManager = $sessionManager -# 86,400,000 milliseconds = 24 hour -securityManager.sessionManager.globalSessionTimeout = 86400000 -shiro.loginUrl = /api/login - -[roles] -role1 = * -role2 = * -role3 = * -admin = * - -[urls] -# This section is used for url-based security. -# You can secure interpreter, configuration and credential information by urls. Comment or uncomment the below urls that you want to hide. -# anon means the access is anonymous. -# authc means Form based Auth Security -# To enfore security, comment the line below and uncomment the next one -/api/version = anon -#/api/interpreter/** = authc, roles[admin] -#/api/configurations/** = authc, roles[admin] -#/api/credential/** = authc, roles[admin] -#/** = anon -/** = authc - - - - \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/kerberos.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/kerberos.json b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/kerberos.json deleted file mode 100644 index b605c9d..0000000 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/kerberos.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "services": [ - { - "name": "ZEPPELIN", - "identities": [ - { - "name": "/smokeuser" - }, - { - "name": "zeppelin_user", - "principal": { - "value": "${zeppelin-env/zeppelin_user}${principal_suffix}@${realm}", - "type" : "user", - "configuration": "zeppelin-env/zeppelin.server.kerberos.principal", - "local_username" : "${zeppelin-env/zeppelin_user}" - }, - "keytab": { - "file": "${keytab_dir}/zeppelin.server.kerberos.keytab", - "owner": { - "name": "${zeppelin-env/zeppelin_user}", - "access": "r" - }, - "group": { - "name": "${cluster-env/user_group}", - "access": "" - }, - "configuration": "zeppelin-env/zeppelin.server.kerberos.keytab" - } - } - ], - "components": [ - { - "name": "ZEPPELIN_MASTER" - } - ], - "configurations": [ - { - "zeppelin-env": { - "zeppelin.kerberos.enabled": "true" - } - }, - { - "core-site": { - "hadoop.proxyuser.${zeppelin-env/zeppelin_user}.groups": "*", - "hadoop.proxyuser.${zeppelin-env/zeppelin_user}.hosts": "*" - } - } - ] - } - ] -} http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/metainfo.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/metainfo.xml b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/metainfo.xml deleted file mode 100644 index 7f2f570..0000000 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/metainfo.xml +++ /dev/null @@ -1,111 +0,0 @@ - - - - 2.0 - - - ZEPPELIN - Zeppelin Notebook - A web-based notebook that enables interactive data analytics. It enables you to - make beautiful data-driven, interactive and collaborative documents with SQL, Scala - and more. - - 0.6.0.3.0 - - - ZEPPELIN_MASTER - Zeppelin Notebook - MASTER - 1 - true - - - PYTHON - 10000 - - - - SPARK/SPARK_CLIENT - host - - true - - - - YARN/YARN_CLIENT - host - - true - - - - - - zeppelin - true - - - - - - - - redhat7,amazon2015,redhat6,suse11,suse12 - - - zeppelin_${stack_version} - - - - - debian7,ubuntu12,ubuntu14,ubuntu16 - - - zeppelin-${stack_version} - - - - - - - - PYTHON - 300 - - - - HDFS - - - - zeppelin-config - zeppelin-env - zeppelin-shiro-ini - zeppelin-log4j-properties - - true - - - - quicklinks.json - true - - - - - http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/alert_check_zeppelin.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/alert_check_zeppelin.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/alert_check_zeppelin.py deleted file mode 100644 index e6d7a91..0000000 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/alert_check_zeppelin.py +++ /dev/null @@ -1,47 +0,0 @@ -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -import glob -import sys - -from resource_management.core.exceptions import ComponentIsNotRunning -from resource_management.libraries.functions.check_process_status import check_process_status -from resource_management.libraries.script import Script - -reload(sys) -sys.setdefaultencoding('utf8') -config = Script.get_config() - -zeppelin_pid_dir = config['configurations']['zeppelin-env']['zeppelin_pid_dir'] - -RESULT_CODE_OK = 'OK' -RESULT_CODE_CRITICAL = 'CRITICAL' -RESULT_CODE_UNKNOWN = 'UNKNOWN' - - -def execute(configurations={}, parameters={}, host_name=None): - try: - pid_file = glob.glob(zeppelin_pid_dir + '/zeppelin-*.pid')[0] - check_process_status(pid_file) - except ComponentIsNotRunning as ex: - return (RESULT_CODE_CRITICAL, [str(ex)]) - except: - return (RESULT_CODE_CRITICAL, ["Zeppelin is not running"]) - - return (RESULT_CODE_OK, ["Successful connection to Zeppelin"]) http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/livy2_config_template.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/livy2_config_template.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/livy2_config_template.py deleted file mode 100644 index 71d3817..0000000 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/livy2_config_template.py +++ /dev/null @@ -1,107 +0,0 @@ -#!/usr/bin/env python -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -template = ''' -{ - "id": "2C8A4SZ9T_livy2", - "status": "READY", - "group": "livy", - "name": "livy2", - "properties": { - "zeppelin.livy.keytab": "", - "zeppelin.livy.spark.sql.maxResult": "1000", - "livy.spark.executor.instances": "", - "livy.spark.executor.memory": "", - "livy.spark.dynamicAllocation.enabled": "", - "livy.spark.dynamicAllocation.cachedExecutorIdleTimeout": "", - "livy.spark.dynamicAllocation.initialExecutors": "", - "zeppelin.livy.session.create_timeout": "120", - "livy.spark.driver.memory": "", - "zeppelin.livy.displayAppInfo": "false", - "livy.spark.jars.packages": "", - "livy.spark.dynamicAllocation.maxExecutors": "", - "zeppelin.livy.concurrentSQL": "false", - "zeppelin.livy.principal": "", - "livy.spark.executor.cores": "", - "zeppelin.livy.url": "http://localhost:8998", - "zeppelin.livy.pull_status.interval.millis": "1000", - "livy.spark.driver.cores": "", - "livy.spark.dynamicAllocation.minExecutors": "" - }, - "interpreterGroup": [ - { - "class": "org.apache.zeppelin.livy.LivySparkInterpreter", - "editor": { - "editOnDblClick": false, - "language": "scala" - }, - "name": "spark", - "defaultInterpreter": false - }, - { - "class": "org.apache.zeppelin.livy.LivySparkSQLInterpreter", - "editor": { - "editOnDblClick": false, - "language": "sql" - }, - "name": "sql", - "defaultInterpreter": false - }, - { - "class": "org.apache.zeppelin.livy.LivyPySparkInterpreter", - "editor": { - "editOnDblClick": false, - "language": "python" - }, - "name": "pyspark", - "defaultInterpreter": false - }, - { - "class": "org.apache.zeppelin.livy.LivyPySpark3Interpreter", - "editor": { - "editOnDblClick": false, - "language": "python" - }, - "name": "pyspark3", - "defaultInterpreter": false - }, - { - "class": "org.apache.zeppelin.livy.LivySparkRInterpreter", - "editor": { - "editOnDblClick": false, - "language": "r" - }, - "name": "sparkr", - "defaultInterpreter": false - } - ], - "dependencies": [], - "option": { - "setPermission": false, - "remote": true, - "users": [], - "isExistingProcess": false, - "perUser": "scoped", - "isUserImpersonate": false, - "perNote": "shared", - "port": -1 - } -} -''' http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/master.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/master.py deleted file mode 100644 index ab798ec..0000000 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/master.py +++ /dev/null @@ -1,443 +0,0 @@ -#!/usr/bin/env python -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -import glob -import os -from resource_management.core.base import Fail -from resource_management.core.resources import Directory -from resource_management.core.resources.system import Execute, File -from resource_management.core.source import InlineTemplate -from resource_management.core import sudo -from resource_management.core.logger import Logger -from resource_management.core.source import StaticFile -from resource_management.libraries import XmlConfig -from resource_management.libraries.functions.check_process_status import check_process_status -from resource_management.libraries.functions.format import format -from resource_management.libraries.functions import stack_select -from resource_management.libraries.functions import StackFeature -from resource_management.libraries.functions.decorator import retry -from resource_management.libraries.functions.stack_features import check_stack_feature -from resource_management.libraries.functions.version import format_stack_version -from resource_management.libraries.script.script import Script - -class Master(Script): - - def install(self, env): - import params - env.set_params(params) - self.install_packages(env) - - self.create_zeppelin_log_dir(env) - - if params.spark_version: - Execute('echo spark_version:' + str(params.spark_version) + ' detected for spark_home: ' - + params.spark_home + ' >> ' + params.zeppelin_log_file, user=params.zeppelin_user) - if params.spark2_version: - Execute('echo spark2_version:' + str(params.spark2_version) + ' detected for spark2_home: ' - + params.spark2_home + ' >> ' + params.zeppelin_log_file, user=params.zeppelin_user) - - def create_zeppelin_dir(self, params): - params.HdfsResource(format("/user/{zeppelin_user}"), - type="directory", - action="create_on_execute", - owner=params.zeppelin_user, - recursive_chown=True, - recursive_chmod=True - ) - params.HdfsResource(format("/user/{zeppelin_user}/test"), - type="directory", - action="create_on_execute", - owner=params.zeppelin_user, - recursive_chown=True, - recursive_chmod=True - ) - params.HdfsResource(format("/apps/zeppelin"), - type="directory", - action="create_on_execute", - owner=params.zeppelin_user, - recursive_chown=True, - recursive_chmod=True - ) - - spark_deps_full_path = self.get_zeppelin_spark_dependencies()[0] - spark_dep_file_name = os.path.basename(spark_deps_full_path) - - params.HdfsResource(params.spark_jar_dir + "/" + spark_dep_file_name, - type="file", - action="create_on_execute", - source=spark_deps_full_path, - group=params.zeppelin_group, - owner=params.zeppelin_user, - mode=0444, - replace_existing_files=True, - ) - - params.HdfsResource(None, action="execute") - - def create_zeppelin_log_dir(self, env): - import params - env.set_params(params) - Directory([params.zeppelin_log_dir], - owner=params.zeppelin_user, - group=params.zeppelin_group, - cd_access="a", - create_parents=True, - mode=0755 - ) - - def create_zeppelin_hdfs_conf_dir(self, env): - import params - env.set_params(params) - Directory([params.external_dependency_conf], - owner=params.zeppelin_user, - group=params.zeppelin_group, - cd_access="a", - create_parents=True, - mode=0755 - ) - - def chown_zeppelin_pid_dir(self, env): - import params - env.set_params(params) - Execute(("chown", "-R", format("{zeppelin_user}") + ":" + format("{zeppelin_group}"), params.zeppelin_pid_dir), - sudo=True) - - def configure(self, env): - import params - import status_params - env.set_params(params) - env.set_params(status_params) - self.create_zeppelin_log_dir(env) - - # create the pid and zeppelin dirs - Directory([params.zeppelin_pid_dir, params.zeppelin_dir], - owner=params.zeppelin_user, - group=params.zeppelin_group, - cd_access="a", - create_parents=True, - mode=0755 - ) - self.chown_zeppelin_pid_dir(env) - - # write out zeppelin-site.xml - XmlConfig("zeppelin-site.xml", - conf_dir=params.conf_dir, - configurations=params.config['configurations']['zeppelin-config'], - owner=params.zeppelin_user, - group=params.zeppelin_group - ) - # write out zeppelin-env.sh - env_content = InlineTemplate(params.zeppelin_env_content) - File(format("{params.conf_dir}/zeppelin-env.sh"), content=env_content, - owner=params.zeppelin_user, group=params.zeppelin_group) - - # write out shiro.ini - shiro_ini_content = InlineTemplate(params.shiro_ini_content) - File(format("{params.conf_dir}/shiro.ini"), content=shiro_ini_content, - owner=params.zeppelin_user, group=params.zeppelin_group) - - # write out log4j.properties - File(format("{params.conf_dir}/log4j.properties"), content=params.log4j_properties_content, - owner=params.zeppelin_user, group=params.zeppelin_group) - - self.create_zeppelin_hdfs_conf_dir(env) - - if len(params.hbase_master_hosts) > 0 and params.is_hbase_installed: - # copy hbase-site.xml - XmlConfig("hbase-site.xml", - conf_dir=params.external_dependency_conf, - configurations=params.config['configurations']['hbase-site'], - configuration_attributes=params.config['configuration_attributes']['hbase-site'], - owner=params.zeppelin_user, - group=params.zeppelin_group, - mode=0644) - - XmlConfig("hdfs-site.xml", - conf_dir=params.external_dependency_conf, - configurations=params.config['configurations']['hdfs-site'], - configuration_attributes=params.config['configuration_attributes']['hdfs-site'], - owner=params.zeppelin_user, - group=params.zeppelin_group, - mode=0644) - - XmlConfig("core-site.xml", - conf_dir=params.external_dependency_conf, - configurations=params.config['configurations']['core-site'], - configuration_attributes=params.config['configuration_attributes']['core-site'], - owner=params.zeppelin_user, - group=params.zeppelin_group, - mode=0644) - - def stop(self, env, upgrade_type=None): - import params - self.create_zeppelin_log_dir(env) - self.chown_zeppelin_pid_dir(env) - Execute(params.zeppelin_dir + '/bin/zeppelin-daemon.sh stop >> ' + params.zeppelin_log_file, - user=params.zeppelin_user) - - def start(self, env, upgrade_type=None): - import params - import status_params - self.configure(env) - - Execute(("chown", "-R", format("{zeppelin_user}") + ":" + format("{zeppelin_group}"), "/etc/zeppelin"), - sudo=True) - Execute(("chown", "-R", format("{zeppelin_user}") + ":" + format("{zeppelin_group}"), - os.path.join(params.zeppelin_dir, "notebook")), sudo=True) - - if params.security_enabled: - zeppelin_kinit_cmd = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal}; ") - Execute(zeppelin_kinit_cmd, user=params.zeppelin_user) - - zeppelin_spark_dependencies = self.get_zeppelin_spark_dependencies() - if zeppelin_spark_dependencies and os.path.exists(zeppelin_spark_dependencies[0]): - self.create_zeppelin_dir(params) - - # if first_setup: - if not glob.glob(params.conf_dir + "/interpreter.json") and \ - not os.path.exists(params.conf_dir + "/interpreter.json"): - Execute(params.zeppelin_dir + '/bin/zeppelin-daemon.sh start >> ' - + params.zeppelin_log_file, user=params.zeppelin_user) - self.check_zeppelin_server() - self.update_zeppelin_interpreter() - - self.update_kerberos_properties() - - Execute(params.zeppelin_dir + '/bin/zeppelin-daemon.sh restart >> ' - + params.zeppelin_log_file, user=params.zeppelin_user) - pidfile = glob.glob(os.path.join(status_params.zeppelin_pid_dir, - 'zeppelin-' + params.zeppelin_user + '*.pid'))[0] - Logger.info(format("Pid file is: {pidfile}")) - - def status(self, env): - import status_params - env.set_params(status_params) - - try: - pid_file = glob.glob(status_params.zeppelin_pid_dir + '/zeppelin-' + - status_params.zeppelin_user + '*.pid')[0] - except IndexError: - pid_file = '' - check_process_status(pid_file) - - def get_interpreter_settings(self): - import params - import json - - interpreter_config = os.path.join(params.conf_dir, "interpreter.json") - config_content = sudo.read_file(interpreter_config) - config_data = json.loads(config_content) - return config_data - - def pre_upgrade_restart(self, env, upgrade_type=None): - Logger.info("Executing Stack Upgrade pre-restart") - import params - env.set_params(params) - - if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, format_stack_version(params.version)): - stack_select.select_packages(params.version) - - def set_interpreter_settings(self, config_data): - import params - import json - - interpreter_config = os.path.join(params.conf_dir, "interpreter.json") - File(interpreter_config, - group=params.zeppelin_group, - owner=params.zeppelin_user, - content=json.dumps(config_data, indent=2) - ) - - def update_kerberos_properties(self): - import params - config_data = self.get_interpreter_settings() - interpreter_settings = config_data['interpreterSettings'] - for interpreter_setting in interpreter_settings: - interpreter = interpreter_settings[interpreter_setting] - if interpreter['group'] == 'livy' and params.livy_livyserver_host: - if params.zeppelin_kerberos_principal and params.zeppelin_kerberos_keytab and params.security_enabled: - interpreter['properties']['zeppelin.livy.principal'] = params.zeppelin_kerberos_principal - interpreter['properties']['zeppelin.livy.keytab'] = params.zeppelin_kerberos_keytab - else: - interpreter['properties']['zeppelin.livy.principal'] = "" - interpreter['properties']['zeppelin.livy.keytab'] = "" - elif interpreter['group'] == 'spark': - if params.zeppelin_kerberos_principal and params.zeppelin_kerberos_keytab and params.security_enabled: - interpreter['properties']['spark.yarn.principal'] = params.zeppelin_kerberos_principal - interpreter['properties']['spark.yarn.keytab'] = params.zeppelin_kerberos_keytab - else: - interpreter['properties']['spark.yarn.principal'] = "" - interpreter['properties']['spark.yarn.keytab'] = "" - elif interpreter['group'] == 'jdbc': - if params.zeppelin_kerberos_principal and params.zeppelin_kerberos_keytab and params.security_enabled: - interpreter['properties']['zeppelin.jdbc.auth.type'] = "KERBEROS" - interpreter['properties']['zeppelin.jdbc.principal'] = params.zeppelin_kerberos_principal - interpreter['properties']['zeppelin.jdbc.keytab.location'] = params.zeppelin_kerberos_keytab - if params.zookeeper_znode_parent \ - and params.hbase_zookeeper_quorum \ - and params.zookeeper_znode_parent not in interpreter['properties']['phoenix.url']: - interpreter['properties']['phoenix.url'] = "jdbc:phoenix:" + \ - params.hbase_zookeeper_quorum + ':' + \ - params.zookeeper_znode_parent - else: - interpreter['properties']['zeppelin.jdbc.auth.type'] = "" - interpreter['properties']['zeppelin.jdbc.principal'] = "" - interpreter['properties']['zeppelin.jdbc.keytab.location'] = "" - elif interpreter['group'] == 'sh': - if params.zeppelin_kerberos_principal and params.zeppelin_kerberos_keytab and params.security_enabled: - interpreter['properties']['zeppelin.shell.auth.type'] = "KERBEROS" - interpreter['properties']['zeppelin.shell.principal'] = params.zeppelin_kerberos_principal - interpreter['properties']['zeppelin.shell.keytab.location'] = params.zeppelin_kerberos_keytab - else: - interpreter['properties']['zeppelin.shell.auth.type'] = "" - interpreter['properties']['zeppelin.shell.principal'] = "" - interpreter['properties']['zeppelin.shell.keytab.location'] = "" - - self.set_interpreter_settings(config_data) - - def update_zeppelin_interpreter(self): - import params - config_data = self.get_interpreter_settings() - interpreter_settings = config_data['interpreterSettings'] - - if 'spark2-defaults' in params.config['configurations']: - spark2_config = self.get_spark2_interpreter_config() - config_id = spark2_config["id"] - interpreter_settings[config_id] = spark2_config - - if params.livy2_livyserver_host: - livy2_config = self.get_livy2_interpreter_config() - config_id = livy2_config["id"] - interpreter_settings[config_id] = livy2_config - - if params.zeppelin_interpreter: - settings_to_delete = [] - for settings_key, interpreter in interpreter_settings.items(): - if interpreter['group'] not in params.zeppelin_interpreter: - settings_to_delete.append(settings_key) - - for key in settings_to_delete: - del interpreter_settings[key] - - hive_interactive_properties_key = 'hive_interactive' - for setting_key in interpreter_settings.keys(): - interpreter = interpreter_settings[setting_key] - if interpreter['group'] == 'jdbc': - interpreter['dependencies'] = [] - - if not params.hive_server_host and params.hive_server_interactive_hosts: - hive_interactive_properties_key = 'hive' - - if params.hive_server_host: - interpreter['properties']['hive.driver'] = 'org.apache.hive.jdbc.HiveDriver' - interpreter['properties']['hive.user'] = 'hive' - interpreter['properties']['hive.password'] = '' - if params.hive_server2_support_dynamic_service_discovery: - interpreter['properties']['hive.url'] = 'jdbc:hive2://' + \ - params.hive_zookeeper_quorum + \ - '/;' + 'serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=' + \ - params.hive_zookeeper_namespace - else: - interpreter['properties']['hive.url'] = 'jdbc:hive2://' + \ - params.hive_server_host + \ - ':' + params.hive_server_port - if params.hive_server_interactive_hosts: - interpreter['properties'][hive_interactive_properties_key + '.driver'] = 'org.apache.hive.jdbc.HiveDriver' - interpreter['properties'][hive_interactive_properties_key + '.user'] = 'hive' - interpreter['properties'][hive_interactive_properties_key + '.password'] = '' - if params.hive_server2_support_dynamic_service_discovery: - interpreter['properties'][hive_interactive_properties_key + '.url'] = 'jdbc:hive2://' + \ - params.hive_zookeeper_quorum + \ - '/;' + 'serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=' + \ - params.hive_interactive_zookeeper_namespace - else: - interpreter['properties'][hive_interactive_properties_key + '.url'] = 'jdbc:hive2://' + \ - params.hive_server_interactive_hosts + \ - ':' + params.hive_server_port - - - if params.zookeeper_znode_parent \ - and params.hbase_zookeeper_quorum: - interpreter['properties']['phoenix.driver'] = 'org.apache.phoenix.jdbc.PhoenixDriver' - interpreter['properties']['phoenix.hbase.client.retries.number'] = '1' - interpreter['properties']['phoenix.user'] = 'phoenixuser' - interpreter['properties']['phoenix.password'] = '' - interpreter['properties']['phoenix.url'] = "jdbc:phoenix:" + \ - params.hbase_zookeeper_quorum + ':' + \ - params.zookeeper_znode_parent - - elif interpreter['group'] == 'livy' and interpreter['name'] == 'livy': - if params.livy_livyserver_host: - interpreter['properties']['zeppelin.livy.url'] = "http://" + params.livy_livyserver_host + \ - ":" + params.livy_livyserver_port - else: - del interpreter_settings[setting_key] - - elif interpreter['group'] == 'livy' and interpreter['name'] == 'livy2': - if params.livy2_livyserver_host: - interpreter['properties']['zeppelin.livy.url'] = "http://" + params.livy2_livyserver_host + \ - ":" + params.livy2_livyserver_port - else: - del interpreter_settings[setting_key] - - - elif interpreter['group'] == 'spark' and interpreter['name'] == 'spark': - if 'spark-env' in params.config['configurations']: - interpreter['properties']['master'] = "yarn-client" - interpreter['properties']['SPARK_HOME'] = "/usr/hdp/current/spark-client/" - else: - del interpreter_settings[setting_key] - - elif interpreter['group'] == 'spark' and interpreter['name'] == 'spark2': - if 'spark2-env' in params.config['configurations']: - interpreter['properties']['master'] = "yarn-client" - interpreter['properties']['SPARK_HOME'] = "/usr/hdp/current/spark2-client/" - else: - del interpreter_settings[setting_key] - - self.set_interpreter_settings(config_data) - - @retry(times=30, sleep_time=5, err_class=Fail) - def check_zeppelin_server(self): - import params - path = params.conf_dir + "/interpreter.json" - if os.path.exists(path) and os.path.getsize(path): - Logger.info("interpreter.json found. Zeppelin server started.") - else: - raise Fail("interpreter.json not found. waiting for Zeppelin server to start...") - - def get_zeppelin_spark_dependencies(self): - import params - return glob.glob(params.zeppelin_dir + '/interpreter/spark/dep/zeppelin-spark-dependencies*.jar') - - def get_spark2_interpreter_config(self): - import spark2_config_template - import json - - return json.loads(spark2_config_template.template) - - def get_livy2_interpreter_config(self): - import livy2_config_template - import json - - return json.loads(livy2_config_template.template) - -if __name__ == "__main__": - Master().execute()