Return-Path: X-Original-To: apmail-ambari-commits-archive@www.apache.org Delivered-To: apmail-ambari-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 6C8F1116C6 for ; Wed, 16 Apr 2014 15:21:46 +0000 (UTC) Received: (qmail 88735 invoked by uid 500); 16 Apr 2014 15:21:45 -0000 Delivered-To: apmail-ambari-commits-archive@ambari.apache.org Received: (qmail 88624 invoked by uid 500); 16 Apr 2014 15:21:43 -0000 Mailing-List: contact commits-help@ambari.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: ambari-dev@ambari.apache.org Delivered-To: mailing list commits@ambari.apache.org Received: (qmail 88596 invoked by uid 99); 16 Apr 2014 15:21:43 -0000 Received: from tyr.zones.apache.org (HELO tyr.zones.apache.org) (140.211.11.114) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 16 Apr 2014 15:21:43 +0000 Received: by tyr.zones.apache.org (Postfix, from userid 65534) id DA30C931C79; Wed, 16 Apr 2014 15:21:42 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: aonishuk@apache.org To: commits@ambari.apache.org Date: Wed, 16 Apr 2014 15:21:44 -0000 Message-Id: In-Reply-To: References: X-Mailer: ASF-Git Admin Mailer Subject: [3/3] git commit: AMBARI-4885. Refactor temporary shared resource for HDFS and MR initialization (Ivan Kozlov via aonishuk) AMBARI-4885. Refactor temporary shared resource for HDFS and MR initialization (Ivan Kozlov via aonishuk) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b00d45e5 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b00d45e5 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b00d45e5 Branch: refs/heads/trunk Commit: b00d45e544ac96727da0bc57209826dadd5d581c Parents: 6fa376a Author: Andrew Onishuk Authored: Wed Apr 16 18:20:23 2014 +0300 Committer: Andrew Onishuk Committed: Wed Apr 16 18:21:18 2014 +0300 ---------------------------------------------------------------------- .../libraries/script/hook.py | 6 +- .../1.3.2/hooks/after-INSTALL/scripts/hook.py | 36 +++ .../1.3.2/hooks/after-INSTALL/scripts/params.py | 61 +++++ .../scripts/shared_initialization.py | 45 ++++ .../after-INSTALL/templates/hadoop-env.sh.j2 | 139 ++++++++++++ .../1.3.2/hooks/before-INSTALL/scripts/hook.py | 1 + .../hooks/before-INSTALL/scripts/params.py | 43 ++++ .../scripts/shared_initialization.py | 43 ++++ .../1.3.2/hooks/before-RESTART/scripts/hook.py | 2 +- .../1.3.2/hooks/before-START/scripts/hook.py | 3 +- .../1.3.2/hooks/before-START/scripts/params.py | 9 - .../scripts/shared_initialization.py | 154 +------------ .../before-START/templates/hadoop-env.sh.j2 | 139 ------------ .../hooks/before-START/templates/hdfs.conf.j2 | 35 --- .../hooks/before-START/templates/slaves.j2 | 21 -- .../templates/taskcontroller.cfg.j2 | 38 ---- .../services/HDFS/package/scripts/datanode.py | 3 + .../1.3.2/services/HDFS/package/scripts/hdfs.py | 63 ++++++ .../HDFS/package/scripts/hdfs_client.py | 16 +- .../services/HDFS/package/scripts/namenode.py | 2 + .../services/HDFS/package/scripts/params.py | 4 +- .../services/HDFS/package/scripts/snamenode.py | 3 +- .../HDFS/package/templates/hdfs.conf.j2 | 35 +++ .../services/HDFS/package/templates/slaves.j2 | 21 ++ .../MAPREDUCE/package/scripts/mapreduce.py | 77 +++++++ .../MAPREDUCE/package/scripts/params.py | 8 +- .../package/templates/taskcontroller.cfg.j2 | 38 ++++ .../2.0.6/hooks/before-INSTALL/scripts/hook.py | 1 + .../hooks/before-INSTALL/scripts/params.py | 12 + .../scripts/shared_initialization.py | 43 ++++ .../2.0.6/hooks/before-RESTART/scripts/hook.py | 2 +- .../2.0.6/hooks/before-START/scripts/hook.py | 1 - .../2.0.6/hooks/before-START/scripts/params.py | 12 - .../scripts/shared_initialization.py | 145 +----------- .../hooks/before-START/templates/hdfs.conf.j2 | 35 --- .../hooks/before-START/templates/slaves.j2 | 21 -- .../templates/taskcontroller.cfg.j2 | 38 ---- .../services/HDFS/package/scripts/datanode.py | 4 +- .../2.0.6/services/HDFS/package/scripts/hdfs.py | 63 ++++++ .../HDFS/package/scripts/hdfs_client.py | 17 +- .../HDFS/package/scripts/journalnode.py | 3 + .../services/HDFS/package/scripts/namenode.py | 2 + .../services/HDFS/package/scripts/params.py | 2 + .../services/HDFS/package/scripts/snamenode.py | 3 +- .../services/HDFS/package/scripts/zkfc_slave.py | 2 + .../HDFS/package/templates/hdfs.conf.j2 | 35 +++ .../services/HDFS/package/templates/slaves.j2 | 21 ++ .../services/YARN/package/scripts/params.py | 9 + .../2.0.6/services/YARN/package/scripts/yarn.py | 77 +++++++ .../package/templates/taskcontroller.cfg.j2 | 38 ++++ .../python/stacks/1.3.2/HDFS/test_datanode.py | 38 +++- .../python/stacks/1.3.2/HDFS/test_namenode.py | 38 ++++ .../python/stacks/1.3.2/HDFS/test_snamenode.py | 35 +++ .../1.3.2/MAPREDUCE/test_mapreduce_client.py | 28 ++- .../MAPREDUCE/test_mapreduce_historyserver.py | 27 +++ .../MAPREDUCE/test_mapreduce_jobtracker.py | 65 +++++- .../MAPREDUCE/test_mapreduce_tasktracker.py | 31 ++- .../hooks/after-INSTALL/test_after_install.py | 48 ++++ .../hooks/before-INSTALL/test_before_install.py | 89 +++++--- .../hooks/before-START/test_before_start.py | 223 +++---------------- .../python/stacks/2.0.6/HDFS/test_datanode.py | 36 ++- .../stacks/2.0.6/HDFS/test_journalnode.py | 33 ++- .../python/stacks/2.0.6/HDFS/test_namenode.py | 36 ++- .../python/stacks/2.0.6/HDFS/test_snamenode.py | 34 ++- .../test/python/stacks/2.0.6/HDFS/test_zkfc.py | 36 ++- .../2.0.6/HIVE/_test_hive_service_check.py | 98 ++++++++ .../2.0.6/HIVE/test_hive_service_check.py | 98 -------- .../stacks/2.0.6/YARN/test_historyserver.py | 39 ++++ .../stacks/2.0.6/YARN/test_mapreduce2_client.py | 39 ++++ .../stacks/2.0.6/YARN/test_nodemanager.py | 40 +++- .../stacks/2.0.6/YARN/test_resourcemanager.py | 41 +++- .../stacks/2.0.6/YARN/test_yarn_client.py | 55 +++++ .../hooks/before-INSTALL/test_before_install.py | 13 ++ .../hooks/before-START/test_before_start.py | 138 +----------- .../stacks/2.1/YARN/test_apptimelineserver.py | 16 ++ 75 files changed, 1860 insertions(+), 1145 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-agent/src/main/python/resource_management/libraries/script/hook.py ---------------------------------------------------------------------- diff --git a/ambari-agent/src/main/python/resource_management/libraries/script/hook.py b/ambari-agent/src/main/python/resource_management/libraries/script/hook.py index 19b5204..3133bab 100644 --- a/ambari-agent/src/main/python/resource_management/libraries/script/hook.py +++ b/ambari-agent/src/main/python/resource_management/libraries/script/hook.py @@ -46,9 +46,11 @@ class Hook(Script): """ args = sys.argv #Hook script to run - args[0] = args[0].replace(args[1], command) + args[0] = args[0].replace('before-'+args[1], command) + args[0] = args[0].replace('after-'+args[1], command) #Hook script base directory - args[3] = args[3].replace(args[1], command) + args[3] = args[3].replace('before-'+args[1], command) + args[3] = args[3].replace('after-'+args[1], command) cmd = [sys.executable] http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/hook.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/hook.py new file mode 100644 index 0000000..25a56ad --- /dev/null +++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/hook.py @@ -0,0 +1,36 @@ +##!/usr/bin/env python2.6 +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +import sys +from resource_management import * +from shared_initialization import * + +#Hook for hosts with only client without other components +class AfterInstallHook(Hook): + + def hook(self, env): + import params + + env.set_params(params) + setup_hadoop_env() + setup_config() + +if __name__ == "__main__": + AfterInstallHook().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/params.py new file mode 100644 index 0000000..48b8701 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/params.py @@ -0,0 +1,61 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +from resource_management import * +from resource_management.core.system import System +import os + +config = Script.get_config() + +#security params +_authentication = config['configurations']['core-site']['hadoop.security.authentication'] +security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos') +#java params +java_home = config['hostLevelParams']['java_home'] +#hadoop params +hadoop_conf_dir = "/etc/hadoop/conf" +hdfs_log_dir_prefix = config['configurations']['global']['hdfs_log_dir_prefix'] +hadoop_pid_dir_prefix = config['configurations']['global']['hadoop_pid_dir_prefix'] + +#hadoop-env.sh +if System.get_instance().os_family == "suse": + jsvc_path = "/usr/lib/bigtop-utils" +else: + jsvc_path = "/usr/libexec/bigtop-utils" +hadoop_heapsize = config['configurations']['global']['hadoop_heapsize'] +namenode_heapsize = config['configurations']['global']['namenode_heapsize'] +namenode_opt_newsize = config['configurations']['global']['namenode_opt_newsize'] +namenode_opt_maxnewsize = config['configurations']['global']['namenode_opt_maxnewsize'] + +jtnode_opt_newsize = default("jtnode_opt_newsize","200m") +jtnode_opt_maxnewsize = default("jtnode_opt_maxnewsize","200m") +jtnode_heapsize = default("jtnode_heapsize","1024m") +ttnode_heapsize = default("ttnode_heapsize","1024m") + +dtnode_heapsize = config['configurations']['global']['dtnode_heapsize'] + +mapred_pid_dir_prefix = default("mapred_pid_dir_prefix","/var/run/hadoop-mapreduce") +mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*" +hadoop_libexec_dir = "/usr/lib/hadoop/libexec" +mapred_log_dir_prefix = default("mapred_log_dir_prefix","/var/log/hadoop-mapreduce") + + +#users and groups +hdfs_user = config['configurations']['global']['hdfs_user'] +user_group = config['configurations']['global']['user_group'] \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/shared_initialization.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/shared_initialization.py new file mode 100644 index 0000000..2a26e2e --- /dev/null +++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/shared_initialization.py @@ -0,0 +1,45 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" +import os +from resource_management import * + +def setup_hadoop_env(): + import params + if params.security_enabled: + tc_owner = "root" + else: + tc_owner = params.hdfs_user + Directory(params.hadoop_conf_dir, + recursive=True, + owner='root', + group='root' + ) + File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'), + owner=tc_owner, + content=Template('hadoop-env.sh.j2') + ) + +def setup_config(): + import params + XmlConfig("core-site.xml", + conf_dir=params.hadoop_conf_dir, + configurations=params.config['configurations']['core-site'], + owner=params.hdfs_user, + group=params.user_group + ) http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/templates/hadoop-env.sh.j2 ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/templates/hadoop-env.sh.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/templates/hadoop-env.sh.j2 new file mode 100644 index 0000000..76ac3f3 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/templates/hadoop-env.sh.j2 @@ -0,0 +1,139 @@ +{# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#} + +#/* +# * Licensed to the Apache Software Foundation (ASF) under one +# * or more contributor license agreements. See the NOTICE file +# * distributed with this work for additional information +# * regarding copyright ownership. The ASF licenses this file +# * to you under the Apache License, Version 2.0 (the +# * "License"); you may not use this file except in compliance +# * with the License. You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +# Set Hadoop-specific environment variables here. + +# The only required environment variable is JAVA_HOME. All others are +# optional. When running a distributed configuration it is best to +# set JAVA_HOME in this file, so that it is correctly defined on +# remote nodes. + +# The java implementation to use. Required. +export JAVA_HOME={{java_home}} +export HADOOP_HOME_WARN_SUPPRESS=1 + +# Hadoop Configuration Directory +#TODO: if env var set that can cause problems +export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-{{hadoop_conf_dir}}} + +# this is different for HDP1 # +# Path to jsvc required by secure HDP 2.0 datanode +# export JSVC_HOME={{jsvc_path}} + + +# The maximum amount of heap to use, in MB. Default is 1000. +export HADOOP_HEAPSIZE="{{hadoop_heapsize}}" + +export HADOOP_NAMENODE_INIT_HEAPSIZE="-Xms{{namenode_heapsize}}" + +# Extra Java runtime options. Empty by default. +export HADOOP_OPTS="-Djava.net.preferIPv4Stack=true ${HADOOP_OPTS}" + +# History server logs +export HADOOP_MAPRED_LOG_DIR={{mapred_log_dir_prefix}}/$USER + +# Command specific options appended to HADOOP_OPTS when specified +export HADOOP_NAMENODE_OPTS="-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xms{{namenode_heapsize}} -Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_NAMENODE_OPTS}" +export HADOOP_JOBTRACKER_OPTS="-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{jtnode_opt_newsize}} -XX:MaxNewSize={{jtnode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xmx{{jtnode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dmapred.audit.logger=INFO,MRAUDIT -Dhadoop.mapreduce.jobsummary.logger=INFO,JSA -Dmapred.log.dir=$HADOOP_MAPRED_LOG_DIR ${HADOOP_JOBTRACKER_OPTS}" + +HADOOP_TASKTRACKER_OPTS="-server -Xmx{{ttnode_heapsize}} -Dhadoop.security.logger=ERROR,console -Dmapred.audit.logger=ERROR,console ${HADOOP_TASKTRACKER_OPTS}" +HADOOP_DATANODE_OPTS="-Xmx{{dtnode_heapsize}} -Dhadoop.security.logger=ERROR,DRFAS ${HADOOP_DATANODE_OPTS}" +HADOOP_BALANCER_OPTS="-server -Xmx{{hadoop_heapsize}}m ${HADOOP_BALANCER_OPTS}" + +export HADOOP_SECONDARYNAMENODE_OPTS="-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps ${HADOOP_NAMENODE_INIT_HEAPSIZE} -Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_SECONDARYNAMENODE_OPTS}" + +# The following applies to multiple commands (fs, dfs, fsck, distcp etc) +export HADOOP_CLIENT_OPTS="-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS" +# On secure datanodes, user to run the datanode as after dropping privileges +export HADOOP_SECURE_DN_USER={{hdfs_user}} + +# Extra ssh options. Empty by default. +export HADOOP_SSH_OPTS="-o ConnectTimeout=5 -o SendEnv=HADOOP_CONF_DIR" + +# Where log files are stored. $HADOOP_HOME/logs by default. +export HADOOP_LOG_DIR={{hdfs_log_dir_prefix}}/$USER + +# Where log files are stored in the secure data environment. +export HADOOP_SECURE_DN_LOG_DIR={{hdfs_log_dir_prefix}}/$HADOOP_SECURE_DN_USER + +# File naming remote slave hosts. $HADOOP_HOME/conf/slaves by default. +# export HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves + +# host:path where hadoop code should be rsync'd from. Unset by default. +# export HADOOP_MASTER=master:/home/$USER/src/hadoop + +# Seconds to sleep between slave commands. Unset by default. This +# can be useful in large clusters, where, e.g., slave rsyncs can +# otherwise arrive faster than the master can service them. +# export HADOOP_SLAVE_SLEEP=0.1 + +# The directory where pid files are stored. /tmp by default. +export HADOOP_PID_DIR={{hadoop_pid_dir_prefix}}/$USER +export HADOOP_SECURE_DN_PID_DIR={{hadoop_pid_dir_prefix}}/$HADOOP_SECURE_DN_USER + +# History server pid +export HADOOP_MAPRED_PID_DIR={{mapred_pid_dir_prefix}}/$USER + +YARN_RESOURCEMANAGER_OPTS="-Dyarn.server.resourcemanager.appsummary.logger=INFO,RMSUMMARY" + +# A string representing this instance of hadoop. $USER by default. +export HADOOP_IDENT_STRING=$USER + +# The scheduling priority for daemon processes. See 'man nice'. + +# export HADOOP_NICENESS=10 + +# Use libraries from standard classpath +JAVA_JDBC_LIBS="" +#Add libraries required by mysql connector +for jarFile in `ls /usr/share/java/*mysql* 2>/dev/null` +do + JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile +done +#Add libraries required by oracle connector +for jarFile in `ls /usr/share/java/*ojdbc* 2>/dev/null` +do + JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile +done +#Add libraries required by nodemanager +MAPREDUCE_LIBS={{mapreduce_libs_path}} +export HADOOP_CLASSPATH=${HADOOP_CLASSPATH}${JAVA_JDBC_LIBS}:${MAPREDUCE_LIBS} + +# Setting path to hdfs command line +export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}} + +#Mostly required for hadoop 2.0 +export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:/usr/lib/hadoop/lib/native/Linux-amd64-64 http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/hook.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/hook.py index 51e5cd2..03859e4 100644 --- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/hook.py +++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/hook.py @@ -29,6 +29,7 @@ class BeforeConfigureHook(Hook): import params env.set_params(params) + setup_java() setup_users() install_packages() http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py index fa19ca3..f2a4199 100644 --- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py +++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py @@ -23,6 +23,49 @@ import os config = Script.get_config() +#java params +artifact_dir = "/tmp/HDP-artifacts/" +jdk_name = default("/hostLevelParams/jdk_name", None) # None when jdk is already installed by user +jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user +jce_location = config['hostLevelParams']['jdk_location'] +jdk_location = config['hostLevelParams']['jdk_location'] +java_home = config['hostLevelParams']['java_home'] +if System.get_instance().os_family == "suse": + jsvc_path = "/usr/lib/bigtop-utils" +else: + jsvc_path = "/usr/libexec/bigtop-utils" +#security params +_authentication = config['configurations']['core-site']['hadoop.security.authentication'] +security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos') +#hadoop params +hadoop_conf_dir = "/etc/hadoop/conf" + +#hadoop-env.sh + +java_home = config['hostLevelParams']['java_home'] +if System.get_instance().os_family == "suse": + jsvc_path = "/usr/lib/bigtop-utils" +else: + jsvc_path = "/usr/libexec/bigtop-utils" +hadoop_heapsize = config['configurations']['global']['hadoop_heapsize'] +namenode_heapsize = config['configurations']['global']['namenode_heapsize'] +namenode_opt_newsize = config['configurations']['global']['namenode_opt_newsize'] +namenode_opt_maxnewsize = config['configurations']['global']['namenode_opt_maxnewsize'] + +jtnode_opt_newsize = default("jtnode_opt_newsize","200m") +jtnode_opt_maxnewsize = default("jtnode_opt_maxnewsize","200m") +jtnode_heapsize = default("jtnode_heapsize","1024m") +ttnode_heapsize = "1024m" + +dtnode_heapsize = config['configurations']['global']['dtnode_heapsize'] +mapred_pid_dir_prefix = default("mapred_pid_dir_prefix","/var/run/hadoop-mapreduce") +mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*" +hadoop_libexec_dir = "/usr/lib/hadoop/libexec" +mapred_log_dir_prefix = default("mapred_log_dir_prefix","/var/log/hadoop-mapreduce") + +hdfs_log_dir_prefix = config['configurations']['global']['hdfs_log_dir_prefix'] +hadoop_pid_dir_prefix = config['configurations']['global']['hadoop_pid_dir_prefix'] + #users and groups yarn_user = config['configurations']['global']['yarn_user'] hbase_user = config['configurations']['global']['hbase_user'] http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py index 6a37d02..98b5dea 100644 --- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py +++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py @@ -101,6 +101,49 @@ def set_uid(user, user_dirs): Execute(format("/tmp/changeUid.sh {user} {user_dirs} 2>/dev/null"), not_if = format("test $(id -u {user}) -gt 1000")) +def setup_java(): + """ + Installs jdk using specific params, that comes from ambari-server + """ + import params + + jdk_curl_target = format("{artifact_dir}/{jdk_name}") + java_dir = os.path.dirname(params.java_home) + java_exec = format("{java_home}/bin/java") + + if not params.jdk_name: + return + + Execute(format("mkdir -p {artifact_dir} ; curl -kf --retry 10 {jdk_location}/{jdk_name} -o {jdk_curl_target}"), + path = ["/bin","/usr/bin/"], + not_if = format("test -e {java_exec}")) + + if params.jdk_name.endswith(".bin"): + install_cmd = format("mkdir -p {java_dir} ; chmod +x {jdk_curl_target}; cd {java_dir} ; echo A | {jdk_curl_target} -noregister > /dev/null 2>&1") + elif params.jdk_name.endswith(".gz"): + install_cmd = format("mkdir -p {java_dir} ; cd {java_dir} ; tar -xf {jdk_curl_target} > /dev/null 2>&1") + + Execute(install_cmd, + path = ["/bin","/usr/bin/"], + not_if = format("test -e {java_exec}") + ) + jce_curl_target = format("{artifact_dir}/{jce_policy_zip}") + download_jce = format("mkdir -p {artifact_dir}; curl -kf --retry 10 {jce_location}/{jce_policy_zip} -o {jce_curl_target}") + Execute( download_jce, + path = ["/bin","/usr/bin/"], + not_if =format("test -e {jce_curl_target}"), + ignore_failures = True + ) + + if params.security_enabled: + security_dir = format("{java_home}/jre/lib/security") + extract_cmd = format("rm -f local_policy.jar; rm -f US_export_policy.jar; unzip -o -j -q {jce_curl_target}") + Execute(extract_cmd, + only_if = format("test -e {security_dir} && test -f {jce_curl_target}"), + cwd = security_dir, + path = ['/bin/','/usr/bin'] + ) + def install_packages(): packages = {"redhat": ["net-snmp-utils", "net-snmp"], "suse": ["net-snmp"], http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-RESTART/scripts/hook.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-RESTART/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-RESTART/scripts/hook.py index 7042602..0596106 100644 --- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-RESTART/scripts/hook.py +++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-RESTART/scripts/hook.py @@ -24,7 +24,7 @@ from resource_management import * class BeforeConfigureHook(Hook): def hook(self, env): - self.run_custom_hook('START') + self.run_custom_hook('before-START') if __name__ == "__main__": BeforeConfigureHook().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/hook.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/hook.py index 075a6b6..31f70bb 100644 --- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/hook.py +++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/hook.py @@ -29,8 +29,9 @@ class BeforeConfigureHook(Hook): import params env.set_params(params) - setup_java() + self.run_custom_hook('after-INSTALL') setup_hadoop() + setup_database() setup_configs() create_javahome_symlink() init_services() http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py index 2db9b4b..61e04f8 100644 --- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py +++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py @@ -23,23 +23,14 @@ import os config = Script.get_config() -#java params -artifact_dir = "/tmp/HDP-artifacts/" -jdk_name = default("/hostLevelParams/jdk_name", None) # None when jdk is already installed by user -jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user -jce_location = config['hostLevelParams']['jdk_location'] -jdk_location = config['hostLevelParams']['jdk_location'] #security params _authentication = config['configurations']['core-site']['hadoop.security.authentication'] security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos') #users and groups -mapred_user = config['configurations']['global']['mapred_user'] hdfs_user = config['configurations']['global']['hdfs_user'] -yarn_user = config['configurations']['global']['yarn_user'] user_group = config['configurations']['global']['user_group'] -mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group) #snmp snmp_conf_dir = "/etc/snmp/" http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py index c6cc432..35f22ba 100644 --- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py +++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py @@ -21,49 +21,6 @@ import os from resource_management import * -def setup_java(): - """ - Installs jdk using specific params, that comes from ambari-server - """ - import params - - jdk_curl_target = format("{artifact_dir}/{jdk_name}") - java_dir = os.path.dirname(params.java_home) - java_exec = format("{java_home}/bin/java") - - if not params.jdk_name: - return - - Execute(format("mkdir -p {artifact_dir} ; curl -kf --retry 10 {jdk_location}/{jdk_name} -o {jdk_curl_target}"), - path = ["/bin","/usr/bin/"], - not_if = format("test -e {java_exec}")) - - if params.jdk_name.endswith(".bin"): - install_cmd = format("mkdir -p {java_dir} ; chmod +x {jdk_curl_target}; cd {java_dir} ; echo A | {jdk_curl_target} -noregister > /dev/null 2>&1") - elif params.jdk_name.endswith(".gz"): - install_cmd = format("mkdir -p {java_dir} ; cd {java_dir} ; tar -xf {jdk_curl_target} > /dev/null 2>&1") - - Execute(install_cmd, - path = ["/bin","/usr/bin/"], - not_if = format("test -e {java_exec}") - ) - jce_curl_target = format("{artifact_dir}/{jce_policy_zip}") - download_jce = format("mkdir -p {artifact_dir}; curl -kf --retry 10 {jce_location}/{jce_policy_zip} -o {jce_curl_target}") - Execute( download_jce, - path = ["/bin","/usr/bin/"], - not_if =format("test -e {jce_curl_target}"), - ignore_failures = True - ) - - if params.security_enabled: - security_dir = format("{java_home}/jre/lib/security") - extract_cmd = format("rm -f local_policy.jar; rm -f US_export_policy.jar; unzip -o -j -q {jce_curl_target}") - Execute(extract_cmd, - only_if = format("test -e {security_dir} && test -f {jce_curl_target}"), - cwd = security_dir, - path = ['/bin/','/usr/bin'] - ) - def setup_hadoop(): """ Setup hadoop files and directories @@ -77,11 +34,6 @@ def setup_hadoop(): install_snappy() #directories - Directory(params.hadoop_conf_dir, - recursive=True, - owner='root', - group='root' - ) Directory(params.hdfs_log_dir_prefix, recursive=True, owner='root', @@ -94,40 +46,14 @@ def setup_hadoop(): ) #files - File(os.path.join(params.limits_conf_dir, 'hdfs.conf'), - owner='root', - group='root', - mode=0644, - content=Template("hdfs.conf.j2") - ) if params.security_enabled: - File(os.path.join(params.hadoop_bin, "task-controller"), - owner="root", - group=params.mapred_tt_group, - mode=06050 - ) - tc_mode = 0644 tc_owner = "root" else: - tc_mode = None tc_owner = params.hdfs_user - if tc_mode: - File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'), - owner = tc_owner, - mode = tc_mode, - group = params.mapred_tt_group, - content=Template("taskcontroller.cfg.j2") - ) - else: - File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'), + File(os.path.join(params.hadoop_conf_dir, 'commons-logging.properties'), owner=tc_owner, - content=Template("taskcontroller.cfg.j2") - ) - for file in ['hadoop-env.sh', 'commons-logging.properties', 'slaves']: - File(os.path.join(params.hadoop_conf_dir, file), - owner=tc_owner, - content=Template(file + ".j2") + content=Template("commons-logging.properties.j2") ) health_check_template = "health_check" #for stack 1 use 'health_check' @@ -156,6 +82,11 @@ def setup_hadoop(): content=Template("hadoop-metrics2.properties.j2") ) +def setup_database(): + """ + Load DB + """ + import params db_driver_dload_cmd = "" if params.server_db_name == 'oracle' and params.oracle_driver_url != "": db_driver_dload_cmd = format( @@ -176,66 +107,17 @@ def setup_configs(): """ import params - if "mapred-queue-acls" in params.config['configurations']: - XmlConfig("mapred-queue-acls.xml", - conf_dir=params.hadoop_conf_dir, - configurations=params.config['configurations'][ - 'mapred-queue-acls'], - owner=params.mapred_user, - group=params.user_group - ) - elif os.path.exists( - os.path.join(params.hadoop_conf_dir, "mapred-queue-acls.xml")): - File(os.path.join(params.hadoop_conf_dir, "mapred-queue-acls.xml"), - owner=params.mapred_user, - group=params.user_group - ) - - if "hadoop-policy" in params.config['configurations']: - XmlConfig("hadoop-policy.xml", - conf_dir=params.hadoop_conf_dir, - configurations=params.config['configurations']['hadoop-policy'], - owner=params.hdfs_user, - group=params.user_group - ) - XmlConfig("core-site.xml", conf_dir=params.hadoop_conf_dir, configurations=params.config['configurations']['core-site'], owner=params.hdfs_user, group=params.user_group ) - - if "mapred-site" in params.config['configurations']: - XmlConfig("mapred-site.xml", - conf_dir=params.hadoop_conf_dir, - configurations=params.config['configurations']['mapred-site'], - owner=params.mapred_user, - group=params.user_group - ) - File(params.task_log4j_properties_location, content=StaticFile("task-log4j.properties"), mode=0755 ) - if "capacity-scheduler" in params.config['configurations']: - XmlConfig("capacity-scheduler.xml", - conf_dir=params.hadoop_conf_dir, - configurations=params.config['configurations'][ - 'capacity-scheduler'], - owner=params.hdfs_user, - group=params.user_group - ) - - XmlConfig("hdfs-site.xml", - conf_dir=params.hadoop_conf_dir, - configurations=params.config['configurations']['hdfs-site'], - owner=params.hdfs_user, - group=params.user_group - ) - - # if params.stack_version[0] == "1": Link('/usr/lib/hadoop/lib/hadoop-tools.jar', to = '/usr/lib/hadoop/hadoop-tools.jar' ) @@ -245,26 +127,10 @@ def setup_configs(): owner=params.hdfs_user, group=params.user_group ) - if os.path.exists(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml')): - File(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml'), - owner=params.mapred_user, - group=params.user_group - ) + if os.path.exists(os.path.join(params.hadoop_conf_dir, 'masters')): File(os.path.join(params.hadoop_conf_dir, 'masters'), - owner=params.hdfs_user, - group=params.user_group - ) - if os.path.exists( - os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example')): - File(os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example'), - owner=params.mapred_user, - group=params.user_group - ) - if os.path.exists( - os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example')): - File(os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example'), - owner=params.mapred_user, + owner=params.hdfs_user, group=params.user_group ) @@ -312,4 +178,4 @@ def init_services(): # enable snmpd Execute( "service snmpd start; chkconfig snmpd on", path = "/usr/local/bin/:/bin/:/sbin/" - ) \ No newline at end of file + ) http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/hadoop-env.sh.j2 ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/hadoop-env.sh.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/hadoop-env.sh.j2 deleted file mode 100644 index 76ac3f3..0000000 --- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/hadoop-env.sh.j2 +++ /dev/null @@ -1,139 +0,0 @@ -{# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#} - -#/* -# * Licensed to the Apache Software Foundation (ASF) under one -# * or more contributor license agreements. See the NOTICE file -# * distributed with this work for additional information -# * regarding copyright ownership. The ASF licenses this file -# * to you under the Apache License, Version 2.0 (the -# * "License"); you may not use this file except in compliance -# * with the License. You may obtain a copy of the License at -# * -# * http://www.apache.org/licenses/LICENSE-2.0 -# * -# * Unless required by applicable law or agreed to in writing, software -# * distributed under the License is distributed on an "AS IS" BASIS, -# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# * See the License for the specific language governing permissions and -# * limitations under the License. -# */ - -# Set Hadoop-specific environment variables here. - -# The only required environment variable is JAVA_HOME. All others are -# optional. When running a distributed configuration it is best to -# set JAVA_HOME in this file, so that it is correctly defined on -# remote nodes. - -# The java implementation to use. Required. -export JAVA_HOME={{java_home}} -export HADOOP_HOME_WARN_SUPPRESS=1 - -# Hadoop Configuration Directory -#TODO: if env var set that can cause problems -export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-{{hadoop_conf_dir}}} - -# this is different for HDP1 # -# Path to jsvc required by secure HDP 2.0 datanode -# export JSVC_HOME={{jsvc_path}} - - -# The maximum amount of heap to use, in MB. Default is 1000. -export HADOOP_HEAPSIZE="{{hadoop_heapsize}}" - -export HADOOP_NAMENODE_INIT_HEAPSIZE="-Xms{{namenode_heapsize}}" - -# Extra Java runtime options. Empty by default. -export HADOOP_OPTS="-Djava.net.preferIPv4Stack=true ${HADOOP_OPTS}" - -# History server logs -export HADOOP_MAPRED_LOG_DIR={{mapred_log_dir_prefix}}/$USER - -# Command specific options appended to HADOOP_OPTS when specified -export HADOOP_NAMENODE_OPTS="-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xms{{namenode_heapsize}} -Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_NAMENODE_OPTS}" -export HADOOP_JOBTRACKER_OPTS="-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{jtnode_opt_newsize}} -XX:MaxNewSize={{jtnode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xmx{{jtnode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dmapred.audit.logger=INFO,MRAUDIT -Dhadoop.mapreduce.jobsummary.logger=INFO,JSA -Dmapred.log.dir=$HADOOP_MAPRED_LOG_DIR ${HADOOP_JOBTRACKER_OPTS}" - -HADOOP_TASKTRACKER_OPTS="-server -Xmx{{ttnode_heapsize}} -Dhadoop.security.logger=ERROR,console -Dmapred.audit.logger=ERROR,console ${HADOOP_TASKTRACKER_OPTS}" -HADOOP_DATANODE_OPTS="-Xmx{{dtnode_heapsize}} -Dhadoop.security.logger=ERROR,DRFAS ${HADOOP_DATANODE_OPTS}" -HADOOP_BALANCER_OPTS="-server -Xmx{{hadoop_heapsize}}m ${HADOOP_BALANCER_OPTS}" - -export HADOOP_SECONDARYNAMENODE_OPTS="-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps ${HADOOP_NAMENODE_INIT_HEAPSIZE} -Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_SECONDARYNAMENODE_OPTS}" - -# The following applies to multiple commands (fs, dfs, fsck, distcp etc) -export HADOOP_CLIENT_OPTS="-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS" -# On secure datanodes, user to run the datanode as after dropping privileges -export HADOOP_SECURE_DN_USER={{hdfs_user}} - -# Extra ssh options. Empty by default. -export HADOOP_SSH_OPTS="-o ConnectTimeout=5 -o SendEnv=HADOOP_CONF_DIR" - -# Where log files are stored. $HADOOP_HOME/logs by default. -export HADOOP_LOG_DIR={{hdfs_log_dir_prefix}}/$USER - -# Where log files are stored in the secure data environment. -export HADOOP_SECURE_DN_LOG_DIR={{hdfs_log_dir_prefix}}/$HADOOP_SECURE_DN_USER - -# File naming remote slave hosts. $HADOOP_HOME/conf/slaves by default. -# export HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves - -# host:path where hadoop code should be rsync'd from. Unset by default. -# export HADOOP_MASTER=master:/home/$USER/src/hadoop - -# Seconds to sleep between slave commands. Unset by default. This -# can be useful in large clusters, where, e.g., slave rsyncs can -# otherwise arrive faster than the master can service them. -# export HADOOP_SLAVE_SLEEP=0.1 - -# The directory where pid files are stored. /tmp by default. -export HADOOP_PID_DIR={{hadoop_pid_dir_prefix}}/$USER -export HADOOP_SECURE_DN_PID_DIR={{hadoop_pid_dir_prefix}}/$HADOOP_SECURE_DN_USER - -# History server pid -export HADOOP_MAPRED_PID_DIR={{mapred_pid_dir_prefix}}/$USER - -YARN_RESOURCEMANAGER_OPTS="-Dyarn.server.resourcemanager.appsummary.logger=INFO,RMSUMMARY" - -# A string representing this instance of hadoop. $USER by default. -export HADOOP_IDENT_STRING=$USER - -# The scheduling priority for daemon processes. See 'man nice'. - -# export HADOOP_NICENESS=10 - -# Use libraries from standard classpath -JAVA_JDBC_LIBS="" -#Add libraries required by mysql connector -for jarFile in `ls /usr/share/java/*mysql* 2>/dev/null` -do - JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile -done -#Add libraries required by oracle connector -for jarFile in `ls /usr/share/java/*ojdbc* 2>/dev/null` -do - JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile -done -#Add libraries required by nodemanager -MAPREDUCE_LIBS={{mapreduce_libs_path}} -export HADOOP_CLASSPATH=${HADOOP_CLASSPATH}${JAVA_JDBC_LIBS}:${MAPREDUCE_LIBS} - -# Setting path to hdfs command line -export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}} - -#Mostly required for hadoop 2.0 -export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:/usr/lib/hadoop/lib/native/Linux-amd64-64 http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/hdfs.conf.j2 ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/hdfs.conf.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/hdfs.conf.j2 deleted file mode 100644 index d58a6f5..0000000 --- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/hdfs.conf.j2 +++ /dev/null @@ -1,35 +0,0 @@ -{# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#} - -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -{{hdfs_user}} - nofile 32768 -{{hdfs_user}} - nproc 65536 http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/slaves.j2 ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/slaves.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/slaves.j2 deleted file mode 100644 index 4a9e713..0000000 --- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/slaves.j2 +++ /dev/null @@ -1,21 +0,0 @@ -{# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#} - -{% for host in slave_hosts %} -{{host}} -{% endfor %} http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/taskcontroller.cfg.j2 ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/taskcontroller.cfg.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/taskcontroller.cfg.j2 deleted file mode 100644 index 3d5f4f2..0000000 --- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/taskcontroller.cfg.j2 +++ /dev/null @@ -1,38 +0,0 @@ -{# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#} - -#/* -# * Licensed to the Apache Software Foundation (ASF) under one -# * or more contributor license agreements. See the NOTICE file -# * distributed with this work for additional information -# * regarding copyright ownership. The ASF licenses this file -# * to you under the Apache License, Version 2.0 (the -# * "License"); you may not use this file except in compliance -# * with the License. You may obtain a copy of the License at -# * -# * http://www.apache.org/licenses/LICENSE-2.0 -# * -# * Unless required by applicable law or agreed to in writing, software -# * distributed under the License is distributed on an "AS IS" BASIS, -# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# * See the License for the specific language governing permissions and -# * limitations under the License. -# */ -mapred.local.dir={{mapred_local_dir}} -mapreduce.tasktracker.group={{mapred_tt_group}} -hadoop.log.dir={{hdfs_log_dir_prefix}}/{{mapred_user}} http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/datanode.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/datanode.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/datanode.py index 57fdb35..379594d 100644 --- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/datanode.py +++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/datanode.py @@ -19,6 +19,7 @@ limitations under the License. from resource_management import * from hdfs_datanode import datanode +from hdfs import hdfs class DataNode(Script): @@ -44,6 +45,8 @@ class DataNode(Script): def configure(self, env): import params + env.set_params(params) + hdfs() datanode(action="configure") def status(self, env): http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs.py new file mode 100644 index 0000000..06bf583 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python2.6 +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Ambari Agent + +""" + +from resource_management import * +import sys +import os + + +def hdfs(name=None): + import params + + File(os.path.join(params.limits_conf_dir, 'hdfs.conf'), + owner='root', + group='root', + mode=0644, + content=Template("hdfs.conf.j2") + ) + + if params.security_enabled: + tc_mode = 0644 + tc_owner = "root" + else: + tc_mode = None + tc_owner = params.hdfs_user + + if "hadoop-policy" in params.config['configurations']: + XmlConfig("hadoop-policy.xml", + conf_dir=params.hadoop_conf_dir, + configurations=params.config['configurations']['hadoop-policy'], + owner=params.hdfs_user, + group=params.user_group + ) + + XmlConfig("hdfs-site.xml", + conf_dir=params.hadoop_conf_dir, + configurations=params.config['configurations']['hdfs-site'], + owner=params.hdfs_user, + group=params.user_group + ) + + File(os.path.join(params.hadoop_conf_dir, 'slaves'), + owner=tc_owner, + content=Template("slaves.j2") + ) \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_client.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_client.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_client.py index 40bde89..3adc40d 100644 --- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_client.py +++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_client.py @@ -18,6 +18,7 @@ limitations under the License. """ from resource_management import * +from hdfs import hdfs from utils import service @@ -44,20 +45,7 @@ class HdfsClient(Script): def configure(self, env): import params - - XmlConfig("core-site.xml", - conf_dir=params.hadoop_conf_dir, - configurations=params.config['configurations']['core-site'], - owner=params.hdfs_user, - group=params.user_group - ) - - XmlConfig("hdfs-site.xml", - conf_dir=params.hadoop_conf_dir, - configurations=params.config['configurations']['hdfs-site'], - owner=params.hdfs_user, - group=params.user_group - ) + hdfs() if __name__ == "__main__": http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/namenode.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/namenode.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/namenode.py index 2f26c98..759832c 100644 --- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/namenode.py +++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/namenode.py @@ -19,6 +19,7 @@ limitations under the License. from resource_management import * from hdfs_namenode import namenode +from hdfs import hdfs class NameNode(Script): @@ -45,6 +46,7 @@ class NameNode(Script): import params env.set_params(params) + hdfs() namenode(action="configure") pass http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py index 98d536c..9307910 100644 --- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py +++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py @@ -143,6 +143,4 @@ HdfsDirectory = functools.partial( keytab = hdfs_user_keytab, kinit_path_local = kinit_path_local ) - - - +limits_conf_dir = "/etc/security/limits.d" \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/snamenode.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/snamenode.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/snamenode.py index b2a3bd1..62fe2bc 100644 --- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/snamenode.py +++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/snamenode.py @@ -19,6 +19,7 @@ limitations under the License. from resource_management import * from hdfs_snamenode import snamenode +from hdfs import hdfs class SNameNode(Script): @@ -49,7 +50,7 @@ class SNameNode(Script): import params env.set_params(params) - + hdfs() snamenode(action="configure") def status(self, env): http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/templates/hdfs.conf.j2 ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/templates/hdfs.conf.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/templates/hdfs.conf.j2 new file mode 100644 index 0000000..d58a6f5 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/templates/hdfs.conf.j2 @@ -0,0 +1,35 @@ +{# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#} + +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +{{hdfs_user}} - nofile 32768 +{{hdfs_user}} - nproc 65536 http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/templates/slaves.j2 ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/templates/slaves.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/templates/slaves.j2 new file mode 100644 index 0000000..4a9e713 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/templates/slaves.j2 @@ -0,0 +1,21 @@ +{# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#} + +{% for host in slave_hosts %} +{{host}} +{% endfor %} http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/mapreduce.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/mapreduce.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/mapreduce.py index 4b26814..a957d78 100644 --- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/mapreduce.py +++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/mapreduce.py @@ -97,3 +97,80 @@ def mapreduce(name=None): owner=params.mapred_user, group=params.user_group, ) + + if params.security_enabled: + tc_mode = 0644 + tc_owner = "root" + else: + tc_mode = None + tc_owner = params.hdfs_user + + if params.security_enabled: + File(os.path.join(params.hadoop_bin, "task-controller"), + owner="root", + group=params.mapred_tt_group, + mode=06050 + ) + File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'), + owner = tc_owner, + mode = tc_mode, + group = params.mapred_tt_group, + content=Template("taskcontroller.cfg.j2") + ) + else: + File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'), + owner=tc_owner, + content=Template("taskcontroller.cfg.j2") + ) + + if "capacity-scheduler" in params.config['configurations']: + XmlConfig("capacity-scheduler.xml", + conf_dir=params.hadoop_conf_dir, + configurations=params.config['configurations'][ + 'capacity-scheduler'], + owner=params.hdfs_user, + group=params.user_group + ) + + if "mapred-queue-acls" in params.config['configurations']: + XmlConfig("mapred-queue-acls.xml", + conf_dir=params.hadoop_conf_dir, + configurations=params.config['configurations'][ + 'mapred-queue-acls'], + owner=params.mapred_user, + group=params.user_group + ) + elif os.path.exists( + os.path.join(params.hadoop_conf_dir, "mapred-queue-acls.xml")): + File(os.path.join(params.hadoop_conf_dir, "mapred-queue-acls.xml"), + owner=params.mapred_user, + group=params.user_group + ) + + if "mapred-site" in params.config['configurations']: + XmlConfig("mapred-site.xml", + conf_dir=params.hadoop_conf_dir, + configurations=params.config['configurations']['mapred-site'], + owner=params.mapred_user, + group=params.user_group + ) + + if os.path.exists(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml')): + File(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml'), + owner=params.mapred_user, + group=params.user_group + ) + + if os.path.exists( + os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example')): + File(os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example'), + owner=params.mapred_user, + group=params.user_group + ) + + if os.path.exists( + os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example')): + File(os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example'), + owner=params.mapred_user, + group=params.user_group + ) http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py index 1a77f95..f6f4367 100644 --- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py +++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py @@ -61,9 +61,9 @@ mapreduce_jobhistory_done_dir = config['configurations']['mapred-site']['mapred. #for create_hdfs_directory hostname = config["hostname"] hadoop_conf_dir = "/etc/hadoop/conf" +hadoop_pid_dir_prefix = config['configurations']['global']['hadoop_pid_dir_prefix'] hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab'] hdfs_user = config['configurations']['global']['hdfs_user'] -kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"]) import functools #create partial functions with common arguments for every HdfsDirectory call #to create hdfs directory we need to call params.HdfsDirectory in code @@ -74,4 +74,8 @@ HdfsDirectory = functools.partial( security_enabled = security_enabled, keytab = hdfs_user_keytab, kinit_path_local = kinit_path_local -) \ No newline at end of file +) + +mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group) + +slave_hosts = default("/clusterHostInfo/slave_hosts", []) http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/templates/taskcontroller.cfg.j2 ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/templates/taskcontroller.cfg.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/templates/taskcontroller.cfg.j2 new file mode 100644 index 0000000..3d5f4f2 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/templates/taskcontroller.cfg.j2 @@ -0,0 +1,38 @@ +{# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#} + +#/* +# * Licensed to the Apache Software Foundation (ASF) under one +# * or more contributor license agreements. See the NOTICE file +# * distributed with this work for additional information +# * regarding copyright ownership. The ASF licenses this file +# * to you under the Apache License, Version 2.0 (the +# * "License"); you may not use this file except in compliance +# * with the License. You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ +mapred.local.dir={{mapred_local_dir}} +mapreduce.tasktracker.group={{mapred_tt_group}} +hadoop.log.dir={{hdfs_log_dir_prefix}}/{{mapred_user}} http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/hook.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/hook.py index 51e5cd2..03859e4 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/hook.py +++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/hook.py @@ -29,6 +29,7 @@ class BeforeConfigureHook(Hook): import params env.set_params(params) + setup_java() setup_users() install_packages() http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py index 341d86f..1f8bfa8 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py +++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py @@ -87,3 +87,15 @@ if has_ganglia_server: ganglia_server_host = ganglia_server_hosts[0] hbase_tmp_dir = config['configurations']['hbase-site']['hbase.tmp.dir'] + +#security params +_authentication = config['configurations']['core-site']['hadoop.security.authentication'] +security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos') + +#java params +java_home = config['hostLevelParams']['java_home'] +artifact_dir = "/tmp/HDP-artifacts/" +jdk_name = default("/hostLevelParams/jdk_name", None) # None when jdk is already installed by user +jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user +jce_location = config['hostLevelParams']['jdk_location'] +jdk_location = config['hostLevelParams']['jdk_location'] http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py index c7c0c70..0ae8b19 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py +++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py @@ -119,6 +119,49 @@ def set_uid(user, user_dirs): Execute(format("/tmp/changeUid.sh {user} {user_dirs} 2>/dev/null"), not_if = format("test $(id -u {user}) -gt 1000")) +def setup_java(): + """ + Installs jdk using specific params, that comes from ambari-server + """ + import params + + jdk_curl_target = format("{artifact_dir}/{jdk_name}") + java_dir = os.path.dirname(params.java_home) + java_exec = format("{java_home}/bin/java") + + if not params.jdk_name: + return + + Execute(format("mkdir -p {artifact_dir} ; curl -kf --retry 10 {jdk_location}/{jdk_name} -o {jdk_curl_target}"), + path = ["/bin","/usr/bin/"], + not_if = format("test -e {java_exec}")) + + if params.jdk_name.endswith(".bin"): + install_cmd = format("mkdir -p {java_dir} ; chmod +x {jdk_curl_target}; cd {java_dir} ; echo A | {jdk_curl_target} -noregister > /dev/null 2>&1") + elif params.jdk_name.endswith(".gz"): + install_cmd = format("mkdir -p {java_dir} ; cd {java_dir} ; tar -xf {jdk_curl_target} > /dev/null 2>&1") + + Execute(install_cmd, + path = ["/bin","/usr/bin/"], + not_if = format("test -e {java_exec}") + ) + jce_curl_target = format("{artifact_dir}/{jce_policy_zip}") + download_jce = format("mkdir -p {artifact_dir}; curl -kf --retry 10 {jce_location}/{jce_policy_zip} -o {jce_curl_target}") + Execute( download_jce, + path = ["/bin","/usr/bin/"], + not_if =format("test -e {jce_curl_target}"), + ignore_failures = True + ) + + if params.security_enabled: + security_dir = format("{java_home}/jre/lib/security") + extract_cmd = format("rm -f local_policy.jar; rm -f US_export_policy.jar; unzip -o -j -q {jce_curl_target}") + Execute(extract_cmd, + only_if = format("test -e {security_dir} && test -f {jce_curl_target}"), + cwd = security_dir, + path = ['/bin/','/usr/bin'] + ) + def install_packages(): packages = {"redhat": ["net-snmp-utils", "net-snmp"], "suse": ["net-snmp"], http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-RESTART/scripts/hook.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-RESTART/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-RESTART/scripts/hook.py index 7042602..0596106 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-RESTART/scripts/hook.py +++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-RESTART/scripts/hook.py @@ -24,7 +24,7 @@ from resource_management import * class BeforeConfigureHook(Hook): def hook(self, env): - self.run_custom_hook('START') + self.run_custom_hook('before-START') if __name__ == "__main__": BeforeConfigureHook().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py index 075a6b6..979c628 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py +++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py @@ -29,7 +29,6 @@ class BeforeConfigureHook(Hook): import params env.set_params(params) - setup_java() setup_hadoop() setup_configs() create_javahome_symlink() http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py index f1ef84a..8b342f7 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py +++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py @@ -23,12 +23,6 @@ import os config = Script.get_config() -#java params -artifact_dir = "/tmp/HDP-artifacts/" -jdk_name = default("/hostLevelParams/jdk_name", None) # None when jdk is already installed by user -jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user -jce_location = config['hostLevelParams']['jdk_location'] -jdk_location = config['hostLevelParams']['jdk_location'] #security params _authentication = config['configurations']['core-site']['hadoop.security.authentication'] security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos') @@ -39,7 +33,6 @@ hdfs_user = config['configurations']['global']['hdfs_user'] yarn_user = config['configurations']['global']['yarn_user'] user_group = config['configurations']['global']['user_group'] -mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group) #snmp snmp_conf_dir = "/etc/snmp/" @@ -88,7 +81,6 @@ hadoop_home = "/usr" hadoop_bin = "/usr/lib/hadoop/sbin" task_log4j_properties_location = os.path.join(hadoop_conf_dir, "task-log4j.properties") -limits_conf_dir = "/etc/security/limits.d" hdfs_log_dir_prefix = config['configurations']['global']['hdfs_log_dir_prefix'] hbase_tmp_dir = config['configurations']['hbase-site']['hbase.tmp.dir'] @@ -138,10 +130,6 @@ mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*" hadoop_libexec_dir = "/usr/lib/hadoop/libexec" mapred_log_dir_prefix = default("mapred_log_dir_prefix","/var/log/hadoop-mapreduce") -#taskcontroller.cfg - -mapred_local_dir = "/tmp/hadoop-mapred/mapred/local" - #log4j.properties yarn_log_dir_prefix = default("yarn_log_dir_prefix","/var/log/hadoop-yarn") http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py index f132c2e..6e34a95 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py +++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py @@ -21,49 +21,6 @@ import os from resource_management import * -def setup_java(): - """ - Installs jdk using specific params, that comes from ambari-server - """ - import params - - jdk_curl_target = format("{artifact_dir}/{jdk_name}") - java_dir = os.path.dirname(params.java_home) - java_exec = format("{java_home}/bin/java") - - if not params.jdk_name: - return - - Execute(format("mkdir -p {artifact_dir} ; curl -kf --retry 10 {jdk_location}/{jdk_name} -o {jdk_curl_target}"), - path = ["/bin","/usr/bin/"], - not_if = format("test -e {java_exec}")) - - if params.jdk_name.endswith(".bin"): - install_cmd = format("mkdir -p {java_dir} ; chmod +x {jdk_curl_target}; cd {java_dir} ; echo A | {jdk_curl_target} -noregister > /dev/null 2>&1") - elif params.jdk_name.endswith(".gz"): - install_cmd = format("mkdir -p {java_dir} ; cd {java_dir} ; tar -xf {jdk_curl_target} > /dev/null 2>&1") - - Execute(install_cmd, - path = ["/bin","/usr/bin/"], - not_if = format("test -e {java_exec}") - ) - jce_curl_target = format("{artifact_dir}/{jce_policy_zip}") - download_jce = format("mkdir -p {artifact_dir}; curl -kf --retry 10 {jce_location}/{jce_policy_zip} -o {jce_curl_target}") - Execute( download_jce, - path = ["/bin","/usr/bin/"], - not_if =format("test -e {jce_curl_target}"), - ignore_failures = True - ) - - if params.security_enabled: - security_dir = format("{java_home}/jre/lib/security") - extract_cmd = format("rm -f local_policy.jar; rm -f US_export_policy.jar; unzip -o -j -q {jce_curl_target}") - Execute(extract_cmd, - only_if = format("test -e {security_dir} && test -f {jce_curl_target}"), - cwd = security_dir, - path = ['/bin/','/usr/bin'] - ) - def setup_hadoop(): """ Setup hadoop files and directories @@ -98,37 +55,12 @@ def setup_hadoop(): owner=params.hdfs_user, ) #files - File(os.path.join(params.limits_conf_dir, 'hdfs.conf'), - owner='root', - group='root', - mode=0644, - content=Template("hdfs.conf.j2") - ) if params.security_enabled: - File(os.path.join(params.hadoop_bin, "task-controller"), - owner="root", - group=params.mapred_tt_group, - mode=06050 - ) - tc_mode = 0644 tc_owner = "root" else: - tc_mode = None tc_owner = params.hdfs_user - if tc_mode: - File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'), - owner = tc_owner, - mode = tc_mode, - group = params.mapred_tt_group, - content=Template("taskcontroller.cfg.j2") - ) - else: - File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'), - owner=tc_owner, - content=Template("taskcontroller.cfg.j2") - ) - for file in ['hadoop-env.sh', 'commons-logging.properties', 'slaves']: + for file in ['hadoop-env.sh', 'commons-logging.properties']: File(os.path.join(params.hadoop_conf_dir, file), owner=tc_owner, content=Template(file + ".j2") @@ -160,6 +92,11 @@ def setup_hadoop(): content=Template("hadoop-metrics2.properties.j2") ) +def setup_database(): + """ + Load DB + """ + import params db_driver_dload_cmd = "" if params.server_db_name == 'oracle' and params.oracle_driver_url != "": db_driver_dload_cmd = format( @@ -180,29 +117,6 @@ def setup_configs(): """ import params - if "mapred-queue-acls" in params.config['configurations']: - XmlConfig("mapred-queue-acls.xml", - conf_dir=params.hadoop_conf_dir, - configurations=params.config['configurations'][ - 'mapred-queue-acls'], - owner=params.mapred_user, - group=params.user_group - ) - elif os.path.exists( - os.path.join(params.hadoop_conf_dir, "mapred-queue-acls.xml")): - File(os.path.join(params.hadoop_conf_dir, "mapred-queue-acls.xml"), - owner=params.mapred_user, - group=params.user_group - ) - - if "hadoop-policy" in params.config['configurations']: - XmlConfig("hadoop-policy.xml", - conf_dir=params.hadoop_conf_dir, - configurations=params.config['configurations']['hadoop-policy'], - owner=params.hdfs_user, - group=params.user_group - ) - XmlConfig("core-site.xml", conf_dir=params.hadoop_conf_dir, configurations=params.config['configurations']['core-site'], @@ -210,68 +124,21 @@ def setup_configs(): group=params.user_group ) - if "mapred-site" in params.config['configurations']: - XmlConfig("mapred-site.xml", - conf_dir=params.hadoop_conf_dir, - configurations=params.config['configurations']['mapred-site'], - owner=params.mapred_user, - group=params.user_group - ) - File(params.task_log4j_properties_location, content=StaticFile("task-log4j.properties"), mode=0755 ) - if "capacity-scheduler" in params.config['configurations']: - XmlConfig("capacity-scheduler.xml", - conf_dir=params.hadoop_conf_dir, - configurations=params.config['configurations'][ - 'capacity-scheduler'], - owner=params.hdfs_user, - group=params.user_group - ) - - XmlConfig("hdfs-site.xml", - conf_dir=params.hadoop_conf_dir, - configurations=params.config['configurations']['hdfs-site'], - owner=params.hdfs_user, - group=params.user_group - ) - - # if params.stack_version[0] == "1": - # Link('/usr/lib/hadoop/hadoop-tools.jar', - # to = '/usr/lib/hadoop/lib/hadoop-tools.jar', - # mode = 0755 - # ) - if os.path.exists(os.path.join(params.hadoop_conf_dir, 'configuration.xsl')): File(os.path.join(params.hadoop_conf_dir, 'configuration.xsl'), owner=params.hdfs_user, group=params.user_group ) - if os.path.exists(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml')): - File(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml'), - owner=params.mapred_user, - group=params.user_group - ) if os.path.exists(os.path.join(params.hadoop_conf_dir, 'masters')): File(os.path.join(params.hadoop_conf_dir, 'masters'), owner=params.hdfs_user, group=params.user_group ) - if os.path.exists( - os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example')): - File(os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example'), - owner=params.mapred_user, - group=params.user_group - ) - if os.path.exists( - os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example')): - File(os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example'), - owner=params.mapred_user, - group=params.user_group - ) generate_include_file()