Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 945A2200CB5 for ; Wed, 28 Jun 2017 02:24:05 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id 93062160BFE; Wed, 28 Jun 2017 00:24:05 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 4DEBA160C0B for ; Wed, 28 Jun 2017 02:24:03 +0200 (CEST) Received: (qmail 88194 invoked by uid 500); 28 Jun 2017 00:24:02 -0000 Mailing-List: contact commits-help@ambari.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: ambari-dev@ambari.apache.org Delivered-To: mailing list commits@ambari.apache.org Received: (qmail 87985 invoked by uid 99); 28 Jun 2017 00:24:02 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 28 Jun 2017 00:24:02 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 85A02F324D; Wed, 28 Jun 2017 00:23:59 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: alejandro@apache.org To: commits@ambari.apache.org Date: Wed, 28 Jun 2017 00:24:18 -0000 Message-Id: <8c30386139634ebcbe031f68e32aacac@git.apache.org> In-Reply-To: <452d25062466449ebd977f81b4bcc9cc@git.apache.org> References: <452d25062466449ebd977f81b4bcc9cc@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [20/51] [partial] ambari git commit: AMBARI-21349. Create BigInsights Stack Skeleton in Ambari 2.5 (alejandro) archived-at: Wed, 28 Jun 2017 00:24:05 -0000 http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/PIG/package/scripts/params_linux.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/PIG/package/scripts/params_linux.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/PIG/package/scripts/params_linux.py new file mode 100755 index 0000000..8280a1f --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/PIG/package/scripts/params_linux.py @@ -0,0 +1,88 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Ambari Agent + +""" + +from resource_management.libraries.script.script import Script +from resource_management.libraries.resources.hdfs_resource import HdfsResource +from resource_management.libraries.functions import conf_select +from resource_management.libraries.functions import stack_select +from resource_management.libraries.functions.version import format_stack_version +from resource_management.libraries.functions.default import default +from resource_management.libraries.functions import get_kinit_path + +# server configurations +config = Script.get_config() +tmp_dir = Script.get_tmp_dir() + +stack_name = default("/hostLevelParams/stack_name", None) + +stack_version_unformatted = str(config['hostLevelParams']['stack_version']) +iop_stack_version = format_stack_version(stack_version_unformatted) + +# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade +version = default("/commandParams/version", None) + +# hadoop default parameters +hadoop_conf_dir = conf_select.get_hadoop_conf_dir() +hadoop_bin_dir = stack_select.get_hadoop_dir("bin") + +# hadoop parameters for 2.2+ +pig_conf_dir = "/usr/iop/current/pig-client/conf" +hadoop_home = stack_select.get_hadoop_dir("home") +pig_bin_dir = '/usr/iop/current/pig-client/bin' + +hdfs_user = config['configurations']['hadoop-env']['hdfs_user'] +hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name'] +hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab'] +smokeuser = config['configurations']['cluster-env']['smokeuser'] +smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name'] +user_group = config['configurations']['cluster-env']['user_group'] +security_enabled = config['configurations']['cluster-env']['security_enabled'] +smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab'] +kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None)) +pig_env_sh_template = config['configurations']['pig-env']['content'] + +# not supporting 32 bit jdk. +java64_home = config['hostLevelParams']['java_home'] + +pig_properties = config['configurations']['pig-properties']['content'] + +log4j_props = config['configurations']['pig-log4j']['content'] + + + +hdfs_site = config['configurations']['hdfs-site'] +default_fs = config['configurations']['core-site']['fs.defaultFS'] + +import functools +#create partial functions with common arguments for every HdfsResource call +#to create hdfs directory we need to call params.HdfsResource in code +HdfsResource = functools.partial( + HdfsResource, + user=hdfs_user, + security_enabled = security_enabled, + keytab = hdfs_user_keytab, + kinit_path_local = kinit_path_local, + hadoop_bin_dir = hadoop_bin_dir, + hadoop_conf_dir = hadoop_conf_dir, + principal_name = hdfs_principal_name, + hdfs_site = hdfs_site, + default_fs = default_fs + ) http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/PIG/package/scripts/pig.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/PIG/package/scripts/pig.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/PIG/package/scripts/pig.py new file mode 100755 index 0000000..ea1e205 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/PIG/package/scripts/pig.py @@ -0,0 +1,61 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Ambari Agent + +""" +import os +from resource_management import * +from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl + +@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT) +def pig(): + import params + + Directory( params.pig_conf_dir, + create_parents = True, + owner = params.hdfs_user, + group = params.user_group + ) + + File(format("{pig_conf_dir}/pig-env.sh"), + owner=params.hdfs_user, + mode=0755, + content=InlineTemplate(params.pig_env_sh_template) + ) + + # pig_properties is always set to a default even if it's not in the payload + File(format("{params.pig_conf_dir}/pig.properties"), + mode=0644, + group=params.user_group, + owner=params.hdfs_user, + content=params.pig_properties + ) + + if (params.log4j_props != None): + File(format("{params.pig_conf_dir}/log4j.properties"), + mode=0644, + group=params.user_group, + owner=params.hdfs_user, + content=params.log4j_props + ) + elif (os.path.exists(format("{params.pig_conf_dir}/log4j.properties"))): + File(format("{params.pig_conf_dir}/log4j.properties"), + mode=0644, + group=params.user_group, + owner=params.hdfs_user + ) http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/PIG/package/scripts/pig_client.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/PIG/package/scripts/pig_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/PIG/package/scripts/pig_client.py new file mode 100755 index 0000000..bd08b56 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/PIG/package/scripts/pig_client.py @@ -0,0 +1,59 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Ambari Agent + +""" + +import sys +import os +from resource_management import * +from resource_management.libraries.functions import conf_select +from resource_management.libraries.functions import stack_select +from pig import pig + +from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl + +class PigClient(Script): + def configure(self, env): + import params + env.set_params(params) + pig() + + def status(self, env): + raise ClientComponentHasNoStatus() + +@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT) +class PigClientLinux(PigClient): + def get_component_name(self): + return "hadoop-client" + + def pre_upgrade_restart(self, env, upgrade_type=None): + import params + env.set_params(params) + + if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0: + conf_select.select(params.stack_name, "pig", params.version) + conf_select.select(params.stack_name, "hadoop", params.version) + stack_select.select("hadoop-client", params.version) # includes pig-client + + def install(self, env): + self.install_packages(env) + self.configure(env) + +if __name__ == "__main__": + PigClient().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/PIG/package/scripts/service_check.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/PIG/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/PIG/package/scripts/service_check.py new file mode 100755 index 0000000..802fd74 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/PIG/package/scripts/service_check.py @@ -0,0 +1,123 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Ambari Agent + +""" + +import os + +from resource_management.core.resources.system import Execute, File +from resource_management.core.source import InlineTemplate, StaticFile +from resource_management.libraries.functions.copy_tarball import copy_to_hdfs +from resource_management.libraries.functions.format import format +from resource_management.libraries.functions.version import compare_versions +from resource_management.libraries.resources.execute_hadoop import ExecuteHadoop +from resource_management.libraries.resources.hdfs_resource import HdfsResource +from resource_management.libraries.script.script import Script + +from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl + +class PigServiceCheck(Script): + pass + +@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT) +class PigServiceCheckLinux(PigServiceCheck): + def service_check(self, env): + import params + env.set_params(params) + + input_file = format('/user/{smokeuser}/passwd') + output_dir = format('/user/{smokeuser}/pigsmoke.out') + + # cleanup output + params.HdfsResource(output_dir, + type="directory", + action="delete_on_execute", + owner=params.smokeuser, + ) + # re-create input. Be able to delete it if it already exists + params.HdfsResource(input_file, + type="file", + source="/etc/passwd", + action="create_on_execute", + owner=params.smokeuser, + ) + params.HdfsResource(None, action="execute") + + + + File( format("{tmp_dir}/pigSmoke.sh"), + content = StaticFile("pigSmoke.sh"), + mode = 0755 + ) + + # check for Pig-on-M/R + Execute( format("pig {tmp_dir}/pigSmoke.sh"), + tries = 3, + try_sleep = 5, + path = format('{pig_bin_dir}:/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'), + user = params.smokeuser, + logoutput = True + ) + + test_cmd = format("fs -test -e {output_dir}") + ExecuteHadoop( test_cmd, + user = params.smokeuser, + conf_dir = params.hadoop_conf_dir, + bin_dir = params.hadoop_bin_dir + ) + + if params.iop_stack_version != "" and compare_versions(params.iop_stack_version, '4.0') >= 0: + # cleanup results from previous test + # cleanup output + params.HdfsResource(output_dir, + type="directory", + action="delete_on_execute", + owner=params.smokeuser, + ) + # re-create input. Be able to delete it firstly if it already exists + params.HdfsResource(input_file, + type="file", + source="/etc/passwd", + action="create_on_execute", + owner=params.smokeuser, + ) + params.HdfsResource(None, action="execute") + + if params.security_enabled: + kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};") + Execute(kinit_cmd, + user=params.smokeuser + ) + + Execute(format("pig {tmp_dir}/pigSmoke.sh"), + tries = 3, + try_sleep = 5, + path = format('{pig_bin_dir}:/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'), + user = params.smokeuser, + logoutput = True + ) + + ExecuteHadoop(test_cmd, + user = params.smokeuser, + conf_dir = params.hadoop_conf_dir, + bin_dir = params.hadoop_bin_dir + ) + +if __name__ == "__main__": + PigServiceCheck().execute() \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/configuration/slider-client.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/configuration/slider-client.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/configuration/slider-client.xml new file mode 100755 index 0000000..fdeceae --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/configuration/slider-client.xml @@ -0,0 +1,60 @@ + + + + + + + + + + + + http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/configuration/slider-env.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/configuration/slider-env.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/configuration/slider-env.xml new file mode 100755 index 0000000..d5bab25 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/configuration/slider-env.xml @@ -0,0 +1,43 @@ + + + + + + + + content + This is the jinja template for slider-env.sh file + +# Set Slider-specific environment variables here. + +# The only required environment variable is JAVA_HOME. All others are +# optional. When running a distributed configuration it is best to +# set JAVA_HOME in this file, so that it is correctly defined on +# remote nodes. + +# The java implementation to use. Required. +export JAVA_HOME={{java64_home}} +# The hadoop conf directory. Optional as slider-client.xml can be edited to add properties. +export HADOOP_CONF_DIR={{hadoop_conf_dir}} + + + + \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/configuration/slider-log4j.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/configuration/slider-log4j.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/configuration/slider-log4j.xml new file mode 100755 index 0000000..709867c --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/configuration/slider-log4j.xml @@ -0,0 +1,89 @@ + + + + + + + + content + Custom log4j.properties + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# Define some default values that can be overridden by system properties +log4j.rootLogger=INFO,stdout +log4j.threshhold=ALL +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout + +# log layout skips stack-trace creation operations by avoiding line numbers and method +log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} - %m%n + +# debug edition is much more expensive +#log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} (%F:%M(%L)) - %m%n + + +log4j.appender.subprocess=org.apache.log4j.ConsoleAppender +log4j.appender.subprocess.layout=org.apache.log4j.PatternLayout +log4j.appender.subprocess.layout.ConversionPattern=[%c{1}]: %m%n +#log4j.logger.org.apache.slider.yarn.appmaster.SliderAppMasterer.master=INFO,subprocess + +# for debugging Slider +#log4j.logger.org.apache.slider=DEBUG +#log4j.logger.org.apache.slider=DEBUG + +# uncomment to debug service lifecycle issues +#log4j.logger.org.apache.hadoop.yarn.service.launcher=DEBUG +#log4j.logger.org.apache.hadoop.yarn.service=DEBUG + +# uncomment for YARN operations +#log4j.logger.org.apache.hadoop.yarn.client=DEBUG + +# uncomment this to debug security problems +#log4j.logger.org.apache.hadoop.security=DEBUG + +#crank back on some noise +log4j.logger.org.apache.hadoop.util.NativeCodeLoader=ERROR +log4j.logger.org.apache.hadoop.hdfs=WARN + + +log4j.logger.org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor=WARN +log4j.logger.org.apache.hadoop.yarn.server.nodemanager.NodeStatusUpdaterImpl=WARN +log4j.logger.org.apache.zookeeper=WARN + + + + http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/metainfo.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/metainfo.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/metainfo.xml new file mode 100755 index 0000000..6e1addf --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/metainfo.xml @@ -0,0 +1,135 @@ + + + + 2.0 + + + SLIDER + Slider + A framework for deploying, managing and monitoring existing distributed applications on YARN + 0.80.0 + + + SLIDER + Slider + CLIENT + 0+ + true + + + HDFS/HDFS_CLIENT + host + + true + + + + YARN/YARN_CLIENT + host + + true + + + + + + PYTHON + 1200 + + + + xml + slider-client.xml + slider-client + + + xml + core-site.xml + core-site + + + xml + hdfs-site.xml + hdfs-site + + + xml + yarn-site.xml + yarn-site + + + env + slider-env.sh + slider-env + + + env + log4j.properties + slider-log4j + + + + + + + redhat6,suse11 + + + slider* + + + + + + ubuntu12 + + + slider* + + + + + + + + + PYTHON + 300 + + + + YARN + HDFS + ZOOKEEPER + + + + slider-log4j + slider-client + slider-env + + + + + http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/files/hbaseSmokeVerify.sh ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/files/hbaseSmokeVerify.sh b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/files/hbaseSmokeVerify.sh new file mode 100755 index 0000000..19276f3 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/files/hbaseSmokeVerify.sh @@ -0,0 +1,34 @@ +#!/usr/bin/env bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +conf_dir=$1 +data=$2 +hbase_cmd=$3 +echo "scan 'ambarismoketest'" | $hbase_cmd --config $conf_dir shell > /tmp/hbase_chk_verify +cat /tmp/hbase_chk_verify +echo "Looking for $data" +awk '/id/,/date/' /tmp/hbase_chk_verify +if [ "$?" -ne 0 ] +then + exit 1 +fi + +grep -q '1 row(s)' /tmp/hbase_chk_verify http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/__init__.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/__init__.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/__init__.py new file mode 100755 index 0000000..5561e10 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/__init__.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/params.py new file mode 100755 index 0000000..ff12274 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/params.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +from resource_management.libraries.functions import conf_select +from resource_management.libraries.functions.version import format_stack_version, compare_versions +from resource_management.libraries.functions.default import default +from resource_management import * +# server configurations +config = Script.get_config() + +stack_name = default("/hostLevelParams/stack_name", None) + +# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade +version = default("/commandParams/version", None) + +stack_version_unformatted = str(config['hostLevelParams']['stack_version']) +stack_version = format_stack_version(stack_version_unformatted) + +slider_bin_dir = '/usr/iop/current/slider-client/bin' + +#hadoop params + +slider_conf_dir = "/usr/iop/current/slider-client/conf" +hadoop_conf_dir = conf_select.get_hadoop_conf_dir() +smokeuser = config['configurations']['cluster-env']['smokeuser'] +smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name'] +security_enabled = config['configurations']['cluster-env']['security_enabled'] +smokeuser_keytab = config['configurations']['cluster-env']['smokeuser_keytab'] +kinit_path_local = functions.get_kinit_path() +slider_env_sh_template = config['configurations']['slider-env']['content'] + +java64_home = config['hostLevelParams']['java_home'] +log4j_props = config['configurations']['slider-log4j']['content'] +slider_cmd = format("{slider_bin_dir}/slider") +#storm_slider_conf_dir= '/usr/iop/current/storm-slider-client/conf' +slider_home_dir= '/usr/iop/current/slider-client' http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/service_check.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/service_check.py new file mode 100755 index 0000000..9c45d1c --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/service_check.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +from resource_management import * + +class SliderServiceCheck(Script): + + def service_check(self, env): + import params + env.set_params(params) + smokeuser_kinit_cmd = format( + "{kinit_path_local} -kt {smokeuser_keytab} {smokeuser_principal};") if params.security_enabled else "" + + servicecheckcmd = format("{smokeuser_kinit_cmd} {slider_cmd} list") + + Execute(servicecheckcmd, + tries=3, + try_sleep=5, + user=params.smokeuser, + logoutput=True + ) + + +if __name__ == "__main__": + SliderServiceCheck().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/slider.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/slider.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/slider.py new file mode 100755 index 0000000..076e451 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/slider.py @@ -0,0 +1,60 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Ambari Agent + +""" +import os +from resource_management import * + +def slider(): + import params + + Directory(params.slider_conf_dir, + create_parents=True + ) + + slider_client_config = params.config['configurations']['slider-client'] if 'configurations' in params.config and 'slider-client' in params.config['configurations'] else {} + + XmlConfig("slider-client.xml", + conf_dir=params.slider_conf_dir, + configurations=slider_client_config + ) + + File(format("{slider_conf_dir}/slider-env.sh"), + mode=0755, + content=InlineTemplate(params.slider_env_sh_template) + ) + """ + Directory(params.storm_slider_conf_dir, + create_parents=True + ) + + File(format("{storm_slider_conf_dir}/storm-slider-env.sh"), + mode=0755, + content=Template('storm-slider-env.sh.j2') + ) + """ + if (params.log4j_props != None): + File(format("{params.slider_conf_dir}/log4j.properties"), + mode=0644, + content=params.log4j_props + ) + elif (os.path.exists(format("{params.slider_conf_dir}/log4j.properties"))): + File(format("{params.slider_conf_dir}/log4j.properties"), + mode=0644 + ) http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/slider_client.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/slider_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/slider_client.py new file mode 100755 index 0000000..11e3cd3 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/slider_client.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +from resource_management import * +from resource_management.libraries.functions import conf_select +from resource_management.libraries.functions import stack_select +from slider import slider + + +class SliderClient(Script): + + def get_component_name(self): + return "slider-client" + + def pre_upgrade_restart(self, env, upgrade_type=None): + import params + env.set_params(params) + + if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0: + conf_select.select(params.stack_name, "slider", params.version) + stack_select.select("slider-client", params.version) + #Execute(format("stack-select set slider-client {version}")) + + # also set all of the hadoop clients since slider client is upgraded as + # part of the final "CLIENTS" group and we need to ensure that + # hadoop-client is also set + conf_select.select(params.stack_name, "hadoop", params.version) + stack_select.select("hadoop-client", params.version) + #Execute(format("stack-select set hadoop-client {version}")) + + def install(self, env): + self.install_packages(env) + self.configure(env) + + def configure(self, env): + import params + env.set_params(params) + slider() + + def status(self, env): + raise ClientComponentHasNoStatus() + + +if __name__ == "__main__": + SliderClient().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/templates/storm-slider-env.sh.j2 ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/templates/storm-slider-env.sh.j2 b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/templates/storm-slider-env.sh.j2 new file mode 100755 index 0000000..8022a4b --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/templates/storm-slider-env.sh.j2 @@ -0,0 +1,38 @@ +{# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#} +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#/* +# * Licensed to the Apache Software Foundation (ASF) under one +# * or more contributor license agreements. See the NOTICE file +# * distributed with this work for additional information +# * regarding copyright ownership. The ASF licenses this file +# * to you under the Apache License, Version 2.0 (the +# * "License"); you may not use this file except in compliance +# * with the License. You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ +export JAVA_HOME={{java64_home}} +export SLIDER_HOME={{slider_home_dir}} http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/configuration/solr-env.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/configuration/solr-env.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/configuration/solr-env.xml new file mode 100755 index 0000000..03e88c9 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/configuration/solr-env.xml @@ -0,0 +1,208 @@ + + + + + solr_user + User to run Solr as + USER + solr + + + + solr_lib_dir + /var/lib/solr + Directory for writable Solr files and index data + + + + solr_pid_dir + /var/run/solr + + + + solr_log_dir + /var/log/solr + + + + solr_port + 8983 + Sets the port Solr binds to, default is 8983 + + + + solr_hdfs_home_dir + /apps/solr/data + A root location in HDFS for Solr to write collection data to. Rather than specifying an HDFS location for the data directory or update log directory, use this to specify one root location and have everything automatically created within this HDFS + + + + + ZOOKEEPER_CHROOT + /solr + If you're using a ZooKeeper instance that is shared by other systems, it's recommended to isolate the SolrCloud znode tree using ZooKeeper's chroot support. + For instance, to ensure all znodes created by SolrCloud are stored under /solr, you can put /solr on the end of your ZK_HOST connection string, such as: ZK_HOST=zk1,zk2,zk3/solr + + + + content + This is the jinja template for solr.in.sh file + + +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +export JAVA_HOME={{java64_home}} + +# By default the script will use JAVA_HOME to determine which java +# to use, but you can set a specific path for Solr to use without +# affecting other Java applications on your server/workstation. +#SOLR_JAVA_HOME="" + + + +# Increase Java Min/Max Heap as needed to support your indexing / query needs +SOLR_JAVA_MEM="-Xms512m -Xmx512m" + + +# Enable verbose GC logging +GC_LOG_OPTS="-verbose:gc -XX:+PrintHeapAtGC -XX:+PrintGCDetails \ +-XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps -XX:+PrintTenuringDistribution -XX:+PrintGCApplicationStoppedTime" + + +# These GC settings have shown to work well for a number of common Solr workloads +GC_TUNE="-XX:NewRatio=3 \ +-XX:SurvivorRatio=4 \ +-XX:TargetSurvivorRatio=90 \ +-XX:MaxTenuringThreshold=8 \ +-XX:+UseConcMarkSweepGC \ +-XX:+UseParNewGC \ +-XX:ConcGCThreads=4 -XX:ParallelGCThreads=4 \ +-XX:+CMSScavengeBeforeRemark \ +-XX:PretenureSizeThreshold=64m \ +-XX:+UseCMSInitiatingOccupancyOnly \ +-XX:CMSInitiatingOccupancyFraction=50 \ +-XX:CMSMaxAbortablePrecleanTime=6000 \ +-XX:+CMSParallelRemarkEnabled \ +-XX:+ParallelRefProcEnabled \ +-XX:MaxDirectMemorySize=20g" + + +# Set the ZooKeeper connection string if using an external ZooKeeper ensemble +# e.g. host1:2181,host2:2181/chroot +# Leave empty if not using SolrCloud +#ZK_HOST="" + + + +# Set the ZooKeeper client timeout (for SolrCloud mode) +#ZK_CLIENT_TIMEOUT="15000" + + +# By default the start script uses "localhost"; override the hostname here +# for production SolrCloud environments to control the hostname exposed to cluster state +#SOLR_HOST="192.168.1.1" + +# By default the start script uses UTC; override the timezone if needed +#SOLR_TIMEZONE="UTC" + +# Set to true to activate the JMX RMI connector to allow remote JMX client applications +# to monitor the JVM hosting Solr; set to "false" to disable that behavior +# (false is recommended in production environments) +ENABLE_REMOTE_JMX_OPTS="false" + + +# The script will use SOLR_PORT+10000 for the RMI_PORT or you can set it here +# RMI_PORT=18983 + + +# Anything you add to the SOLR_OPTS variable will be included in the java +# start command line as-is, in ADDITION to other options. If you specify the +# -a option on start script, those options will be appended as well. Examples: +#SOLR_OPTS="$SOLR_OPTS -Dsolr.autoSoftCommit.maxTime=3000" +#SOLR_OPTS="$SOLR_OPTS -Dsolr.autoCommit.maxTime=60000" +#SOLR_OPTS="$SOLR_OPTS -Dsolr.clustering.enabled=true" + + + +# Location where the bin/solr script will save PID files for running instances +# If not set, the script will create PID files in $SOLR_TIP/bin +#SOLR_PID_DIR= + + + +# Path to a directory where Solr creates index files, the specified directory +# must contain a solr.xml; by default, Solr will use server/solr +#SOLR_HOME= + + + +# Solr provides a default Log4J configuration properties file in server/resources +# however, you may want to customize the log settings and file appender location +# so you can point the script to use a different log4j.properties file +#LOG4J_PROPS=/var/solr/log4j.properties + + + +# Location where Solr should write logs to; should agree with the file appender +# settings in server/resources/log4j.properties +#SOLR_LOGS_DIR= + + + +# Sets the port Solr binds to, default is 8983 +#SOLR_PORT=8983 + + + +# Uncomment to set SSL-related system properties +# Be sure to update the paths to the correct keystore for your environment +#SOLR_SSL_OPTS="-Djavax.net.ssl.keyStore=etc/solr-ssl.keystore.jks \ +#-Djavax.net.ssl.keyStorePassword=secret \ +#-Djavax.net.ssl.trustStore=etc/solr-ssl.keystore.jks \ +#-Djavax.net.ssl.trustStorePassword=secret" + + + +# Uncomment to set a specific SSL port (-Djetty.ssl.port=N); if not set +# and you are using SSL, then the start script will use SOLR_PORT for the SSL port +#SOLR_SSL_PORT= + + +SOLR_PID_DIR={{pid_dir}} +SOLR_HOME={{lib_dir}}/data +LOG4J_PROPS={{solr_conf_dir}}/log4j.properties +SOLR_LOGS_DIR={{log_dir}} +SOLR_PORT={{solr_port}} +SOLR_MODE=solrcloud +ZK_HOST={{zookeeper_hosts_list}}{{zookeeper_chroot}} +SOLR_HOST={{hostname}} + +# Comment out the following SOLR_OPTS setting to config Solr to write its index and transaction log files to local filesystem. +# Data (index and transaction log files) exists on HDFS will not be moved to local filesystem, +# after you change this config, they will not be available from local filesystem. +SOLR_OPTS="-Dsolr.directoryFactory=HdfsDirectoryFactory \ +-Dsolr.lock.type=hdfs \ +-Dsolr.hdfs.confdir=/etc/hadoop/conf \ +-Dsolr.hdfs.home={{fs_root}}{{solr_hdfs_home_dir}} \ +-Dsolr.hdfs.security.kerberos.enabled={{sole_kerberos_enabled}} \ +-Dsolr.hdfs.security.kerberos.keytabfile={{solr_keytab}} \ +-Dsolr.hdfs.security.kerberos.principal={{solr_principal}}" + + + + + http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/configuration/solr-log4j.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/configuration/solr-log4j.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/configuration/solr-log4j.xml new file mode 100755 index 0000000..abbf9d2 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/configuration/solr-log4j.xml @@ -0,0 +1,82 @@ + + + + + + + + content + Custom log4j.properties + +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# + +# +# Solr Logging Configuration +# + +# Logging level +solr.log=${solr.solr.home}/../logs +log4j.rootLogger=INFO, file, CONSOLE + +log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender + +log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout +log4j.appender.CONSOLE.layout.ConversionPattern=%-4r [%t] %-5p %c %x [%X{collection} %X{shard} %X{replica} %X{core}] \u2013 %m%n + +#- size rotation with log cleanup. +log4j.appender.file=org.apache.log4j.RollingFileAppender +log4j.appender.file.MaxFileSize=4MB +log4j.appender.file.MaxBackupIndex=9 + +#- File to log to and log format +log4j.appender.file.File=${solr.log}/solr.log +log4j.appender.file.layout=org.apache.log4j.PatternLayout +log4j.appender.file.layout.ConversionPattern=%-5p - %d{yyyy-MM-dd HH:mm:ss.SSS}; [%X{collection} %X{shard} %X{replica} %X{core}] %C; %m\n + +log4j.logger.org.apache.zookeeper=WARN +log4j.logger.org.apache.hadoop=WARN + +# set to INFO to enable infostream log messages +log4j.logger.org.apache.solr.update.LoggingInfoStream=OFF + + + + http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/configuration/solr-site.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/configuration/solr-site.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/configuration/solr-site.xml new file mode 100755 index 0000000..e36e633 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/configuration/solr-site.xml @@ -0,0 +1,44 @@ + + + + + + solr.hdfs.security.kerberos.enabled + false + Set to true to enable Kerberos authentication + + + + solr.hdfs.security.kerberos.keytabfile + /etc/security/keytabs/solr.service.keytab + A keytab file contains pairs of Kerberos principals and encrypted keys which allows for password-less authentication when Solr attempts to authenticate with secure Hadoop. + This file will need to be present on all Solr servers at the same path provided in this parameter. + + + + + solr.hdfs.security.kerberos.principal + solr/_HOST@EXAMPLE.COM + The Kerberos principal that Solr should use to authenticate to secure Hadoop; the format of a typical Kerberos V5 principal is: primary/instance@realm + + + + http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/kerberos.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/kerberos.json b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/kerberos.json new file mode 100755 index 0000000..cd46977 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/kerberos.json @@ -0,0 +1,47 @@ +{ + "services": [ + { + "name": "SOLR", + "identities": [ + { + "name": "/smokeuser" + }, + ], + "configurations": [ + { + "solr-site": { + "solr.hdfs.security.kerberos.enabled":"true" + } + } + ], + "components": [ + { + "name": "SOLR", + "identities": [ + { + "name": "solr", + "principal": { + "value": "${solr-env/solr_user}/_HOST@${realm}", + "type": "service", + "configuration": "solr-site/solr.hdfs.security.kerberos.principal", + "local_username": "${solr-env/solr_user}" + }, + "keytab": { + "file": "${keytab_dir}/solr.service.keytab", + "owner": { + "name": "${solr-env/solr_user}", + "access": "r" + }, + "group": { + "name": "${cluster-env/user_group}", + "access": "" + }, + "configuration": "solr-site/solr.hdfs.security.kerberos.keytabfile" + } + } + ] + } + ] + } + ] +} http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/metainfo.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/metainfo.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/metainfo.xml new file mode 100755 index 0000000..f472263 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/metainfo.xml @@ -0,0 +1,74 @@ + + + + 2.0 + + + SOLR + Solr + Solr is the popular, blazing fast open source enterprise search platform from the Apache Lucene project + + 5.1.0 + + + + SOLR + Solr + MASTER + 1+ + + + PYTHON + 600 + + + + xml + solr-site.xml + solr-site + + + + + + + any + + + solr_4_1_* + + + + + + + + PYTHON + 300 + + + + HDFS + + + + solr-log4j + + + + http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/__init__.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/__init__.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/__init__.py new file mode 100755 index 0000000..5561e10 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/__init__.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/params.py new file mode 100755 index 0000000..d5d90f6 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/params.py @@ -0,0 +1,182 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Ambari Agent + +""" + +from resource_management import * +from resource_management.libraries import Script +from resource_management.libraries.functions import conf_select, stack_select +from resource_management.libraries.resources import HdfsResource +import status_params + +# server configurations +config = Script.get_config() +stack_name = default("/hostLevelParams/stack_name", None) +security_enabled = config['configurations']['cluster-env']['security_enabled'] + +zookeeper_hosts = config['clusterHostInfo']['zookeeper_hosts'] +zookeeper_hosts.sort() +zookeeper_hosts_list=','.join(zookeeper_hosts) + +java64_home = config['hostLevelParams']['java_home'] + +# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade. +# Version being upgraded/downgraded to +# It cannot be used during the initial Cluser Install because the version is not yet known. +version = default("/commandParams/version", None) + +# current host stack version +current_version = default("/hostLevelParams/current_version", None) +stack_version_unformatted = str(config['hostLevelParams']['stack_version']) +iop_stack_version = format_stack_version(stack_version_unformatted) + +# Upgrade direction +upgrade_direction = default("/commandParams/upgrade_direction", None) + +solr_user=config['configurations']['solr-env']['solr_user'] +user_group=config['configurations']['cluster-env']['user_group'] +hostname = config['hostname'] +solr_server_hosts = config['clusterHostInfo']['solr_hosts'] +solr_server_host = solr_server_hosts[0] + +fs_root = config['configurations']['core-site']['fs.defaultFS'] + +solr_home = '/usr/iop/current/solr-server' +solr_conf_dir='/usr/iop/current/solr-server/conf' +solr_conf = solr_conf_dir + +solr_piddir = status_params.solr_pid_dir +solr_pidfile = status_params.solr_pid_file + +if "solr-env" in config['configurations']: + #solr_hosts = config['clusterHostInfo']['solr_hosts'] + solr_znode = default('/configurations/solr-env/solr_znode', '/solr') + solr_min_mem = default('/configurations/solr-env/solr_minmem', 1024) + solr_max_mem = default('/configurations/solr-env/solr_maxmem', 2048) + #solr_instance_count = len(config['clusterHostInfo']['solr_hosts']) + solr_datadir = default('/configurations/solr-env/solr_datadir', '/opt/solr/data') + #solr_data_resources_dir = os.path.join(solr_datadir, 'resources') + solr_jmx_port = default('/configurations/solr-env/solr_jmx_port', 18983) + solr_ssl_enabled = default('configurations/solr-env/solr_ssl_enabled', False) + solr_keystore_location = default('/configurations/solr-env/solr_keystore_location', '/etc/security/serverKeys/solr.keyStore.jks') + solr_keystore_password = default('/configurations/solr-env/solr_keystore_password', 'bigdata') + solr_keystore_type = default('/configurations/solr-env/solr_keystore_type', 'jks') + #solr_truststore_location = config['configurations']['solr-env']['solr_truststore_location'] + #solr_truststore_password = config['configurations']['solr-env']['solr_truststore_password'] + #solr_truststore_type = config['configurations']['solr-env']['solr_truststore_type'] + #solr_user = config['configurations']['solr-env']['solr_user'] + solr_log_dir = config['configurations']['solr-env']['solr_log_dir'] + solr_log = format("{solr_log_dir}/solr-install.log") + #solr_env_content = config['configurations']['solr-env']['content'] + solr_hdfs_home_dir = config['configurations']['solr-env']['solr_hdfs_home_dir'] + +zookeeper_port = default('/configurations/zoo.cfg/clientPort', None) +# get comma separated list of zookeeper hosts from clusterHostInfo +index = 0 +zookeeper_quorum = "" +for host in config['clusterHostInfo']['zookeeper_hosts']: + zookeeper_quorum += host + ":" + str(zookeeper_port) + index += 1 + if index < len(config['clusterHostInfo']['zookeeper_hosts']): + zookeeper_quorum += "," + +if compare_versions(format_stack_version(current_version), '4.2.0.0') >= 0: + if upgrade_direction is not None and upgrade_direction == Direction.DOWNGRADE and version is not None and compare_versions(format_stack_version(version), '4.2.0.0') < 0: + lib_dir=default("/configurations/solr-env/solr_lib_dir", None) + else: + lib_dir=default("/configurations/solr-env/solr_data_dir", None) + solr_data_dir=default("/configurations/solr-env/solr_data_dir", None) +else: #IOP 4.1 + if upgrade_direction is not None and upgrade_direction == Direction.UPGRADE: + solr_data_dir=default("/configurations/solr-env/solr_data_dir", None) + if not solr_data_dir: + solr_data_dir=default("/configurations/solr-env/solr_datadir", None) + lib_dir=default("/configurations/solr-env/solr_data_dir", None) + if not lib_dir: + lib_dir=default("/configurations/solr-env/solr_datadir", None) + old_lib_dir=default("/configurations/solr-env/solr_lib_dir", None) + else: + solr_data_dir=default("/configurations/solr-env/solr_lib_dir", None) + lib_dir=default("/configurations/solr-env/solr_lib_dir", None) +log_dir=config['configurations']['solr-env']['solr_log_dir'] +pid_dir=config['configurations']['solr-env']['solr_pid_dir'] +solr_port=config['configurations']['solr-env']['solr_port'] + +zookeeper_chroot=config['configurations']['solr-env']['ZOOKEEPER_CHROOT'] + +solr_site = dict(config['configurations']['solr-site']) +solr_principal = solr_site['solr.hdfs.security.kerberos.principal'] + +if security_enabled: + solr_principal = solr_principal.replace('_HOST',hostname) + solr_site['solr.hdfs.security.kerberos.principal']=solr_principal + +#kerberos +sole_kerberos_enabled=config['configurations']['solr-site']['solr.hdfs.security.kerberos.enabled'] +solr_keytab=config['configurations']['solr-site']['solr.hdfs.security.kerberos.keytabfile'] + +#log4j.properties +log4j_props = config['configurations']['solr-log4j']['content'] + +solr_in_sh_template = config['configurations']['solr-env']['content'] + +solr_pid_file = status_params.solr_pid_file + +solr_hdfs_home_dir = config['configurations']['solr-env']['solr_hdfs_home_dir'] +solr_hdfs_user_mode = 0775 + +smokeuser = config['configurations']['cluster-env']['smokeuser'] +smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name'] +smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab'] + +hadoop_conf_dir = conf_select.get_hadoop_conf_dir() +hadoop_bin_dir = stack_select.get_hadoop_dir("bin") +hdfs_user = config['configurations']['hadoop-env']['hdfs_user'] +hdfs_site = config['configurations']['hdfs-site'] +default_fs = config['configurations']['core-site']['fs.defaultFS'] +hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab'] +hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name'] + +if security_enabled: + _hostname_lowercase = config['hostname'].lower() + solr_jaas_file = solr_conf + '/solr_jaas.conf' + solr_kerberos_keytab = solr_keytab + solr_kerberos_principal = solr_principal #cannot use the one from solr-env, otherwise default @EXAMPLE.COM is in the real value + solr_web_kerberos_keytab = default('/configurations/solr-env/solr_web_kerberos_keytab', None) + solr_web_kerberos_principal = default('/configurations/solr-env/solr_web_kerberos_principal', None) + if solr_web_kerberos_principal: + solr_web_kerberos_principal = solr_web_kerberos_principal.replace('_HOST',_hostname_lowercase) + solr_kerberos_name_rules = default('/configurations/solr-env/solr_kerberos_name_rules', 'DEFAULT') + +kinit_path_local = get_kinit_path() +import functools +#create partial functions with common arguments for every HdfsDirectory call +#to create hdfs directory we need to call params.HdfsDirectory in code +HdfsResource = functools.partial( + HdfsResource, + user=hdfs_user, + security_enabled = security_enabled, + keytab = hdfs_user_keytab, + kinit_path_local = kinit_path_local, + hadoop_bin_dir = hadoop_bin_dir, + hadoop_conf_dir = hadoop_conf_dir, + principal_name = hdfs_principal_name, + hdfs_site = hdfs_site, + default_fs = default_fs +) http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/service_check.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/service_check.py new file mode 100755 index 0000000..d3add2f --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/service_check.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + + +from resource_management import * +from resource_management.libraries.functions.validate import call_and_match_output +import subprocess +import time + +class SolrServiceCheck(Script): + def service_check(self, env): + import params + env.set_params(params) + + url = "http://" + params.solr_server_host + ":" + str(params.solr_port) + "/solr/" + + if params.security_enabled: + kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};") + Execute(kinit_cmd, + user = params.smokeuser, + logoutput = True + ) + + create_collection_cmd = format("SOLR_INCLUDE={solr_conf_dir}/solr.in.sh solr create -c smokeuser_ExampleCollection -s 2 -d data_driven_schema_configs") + Logger.info("Creating solr collection from example: %s" % create_collection_cmd) + Execute(create_collection_cmd, + user = params.smokeuser, + logoutput = True) + + list_collection_cmd = "su " + params.smokeuser + " -c 'curl -s --negotiate -u : " + url + "admin/collections?action=list'" + list_collection_output = "smokeuser_ExampleCollection" + Logger.info("List Collections: %s" % list_collection_cmd) + call_and_match_output(list_collection_cmd, format("({list_collection_output})"), "Failed to create collection \"smokeuser_ExampleCollection\" or check that collection exists") + + delete_collection_cmd = format("SOLR_INCLUDE={solr_conf_dir}/solr.in.sh solr delete -c smokeuser_ExampleCollection") + Logger.info("Deleting solr collection : %s" % delete_collection_cmd) + Execute(delete_collection_cmd, + user = params.smokeuser, + logoutput=True + ) + +if __name__ == "__main__": + SolrServiceCheck().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/solr.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/solr.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/solr.py new file mode 100755 index 0000000..ae4a101 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/solr.py @@ -0,0 +1,143 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +from resource_management import * +from resource_management.libraries.functions import conf_select +import sys +import os + +def solr(type = None, upgrade_type=None): + import params + + if type == 'server': + effective_version = params.iop_stack_version if upgrade_type is None else format_stack_version(params.version) + + params.HdfsResource(params.solr_hdfs_home_dir, + type="directory", + action="create_on_execute", + owner=params.solr_user, + mode=params.solr_hdfs_user_mode + ) + params.HdfsResource(None, action="execute") + + Directory([params.log_dir,params.pid_dir,params.solr_conf_dir], + mode=0755, + cd_access='a', + owner=params.solr_user, + create_parents=True, + group=params.user_group + ) + + XmlConfig("solr-site.xml", + conf_dir=params.solr_conf_dir, + configurations=params.solr_site, + configuration_attributes=params.config['configuration_attributes']['solr-site'], + owner=params.solr_user, + group=params.user_group, + mode=0644 + ) + + File(format("{solr_conf_dir}/solr.in.sh"), + content=InlineTemplate(params.solr_in_sh_template), + owner=params.solr_user, + group=params.user_group + ) + + File(format("{solr_conf_dir}/log4j.properties"), + mode=0644, + group=params.user_group, + owner=params.solr_user, + content=params.log4j_props + ) + + File(format("{solr_conf_dir}/log4j.properties"), + mode=0644, + group=params.user_group, + owner=params.solr_user, + content=params.log4j_props + ) + + Directory(params.lib_dir, + mode=0755, + cd_access='a', + owner=params.solr_user, + create_parents=True, + group=params.user_group + ) + + if effective_version is not None and effective_version != "" and compare_versions(effective_version, '4.2.0.0') >= 0: + File(format("{lib_dir}/solr.xml"), + mode=0644, + group=params.user_group, + owner=params.solr_user, + content=Template("solr.xml.j2") + ) + else: + Directory(format("{lib_dir}/data"), + owner=params.solr_user, + create_parents=True, + group=params.user_group + ) + + File(format("{lib_dir}/data/solr.xml"), + mode=0644, + group=params.user_group, + owner=params.solr_user, + content=Template("solr.xml.j2") + ) + + #solr-webapp is temp dir, need to own by solr in order for it to wirte temp files into. + Directory(format("{solr_home}/server/solr-webapp"), + owner=params.solr_user, + create_parents=True, + ) + + if params.security_enabled: + File(format("{solr_jaas_file}"), + content=Template("solr_jaas.conf.j2"), + owner=params.solr_user) + + elif type == '4103': + solr41_conf_dir = "/usr/iop/4.1.0.0/solr/conf" + solr41_etc_dir="/etc/solr/4.1.0.0/0" + if not os.path.exists(solr41_etc_dir): + Execute("mkdir -p /etc/solr/4.1.0.0/0") + + content_path=solr41_conf_dir + if not os.path.isfile("/usr/iop/4.1.0.0/solr/conf/solr.in.sh"): + content_path = "/etc/solr/conf.backup" + + for each in os.listdir(content_path): + File(os.path.join(solr41_etc_dir, each), + owner=params.solr_user, + content = StaticFile(os.path.join(content_path,each))) + + if not os.path.islink(solr41_conf_dir): + Directory(solr41_conf_dir, + action="delete", + create_parents=True) + + if os.path.islink(solr41_conf_dir): + os.unlink(solr41_conf_dir) + + if not os.path.islink(solr41_conf_dir): + Link(solr41_conf_dir, + to=solr41_etc_dir + ) http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/solr_client.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/solr_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/solr_client.py new file mode 100755 index 0000000..1969c70 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/solr_client.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +import sys +from resource_management import * + +class SolrClient(Script): + def install(self, env): + self.install_packages(env) + + + def configure(self, env): + print 'Configure the solr client'; + + def status(self, env): + raise ClientComponentHasNoStatus() + +if __name__ == "__main__": + SolrClient().execute()