ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dmitriu...@apache.org
Subject [14/21] ambari git commit: AMBARI-9068. Remove HDP 1.3 stack defn from Ambari. (dlysnichenko)
Date Mon, 12 Jan 2015 11:53:09 GMT
http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/hiveSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/hiveSmoke.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/hiveSmoke.sh
deleted file mode 100644
index 7e03524..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/hiveSmoke.sh
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-export tablename=$1
-echo "CREATE EXTERNAL TABLE IF NOT EXISTS ${tablename} ( foo INT, bar STRING );" | hive
-echo "DESCRIBE ${tablename};" | hive

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/hiveserver2.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/hiveserver2.sql b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/hiveserver2.sql
deleted file mode 100644
index 99a3865..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/hiveserver2.sql
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-CREATE EXTERNAL TABLE IF NOT EXISTS hiveserver2smoke20408 ( foo INT, bar STRING );
-DESCRIBE hiveserver2smoke20408;

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/hiveserver2Smoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/hiveserver2Smoke.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/hiveserver2Smoke.sh
deleted file mode 100644
index 051a21e..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/hiveserver2Smoke.sh
+++ /dev/null
@@ -1,31 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-smokeout=`/usr/lib/hive/bin/beeline -u $1 -n fakeuser -p fakepwd -d org.apache.hive.jdbc.HiveDriver -e '!run $2' 2>&1| awk '{print}'|grep Error`
-
-if [ "x$smokeout" == "x" ]; then
-  echo "Smoke test of hiveserver2 passed"
-  exit 0
-else
-  echo "Smoke test of hiveserver2 wasnt passed"
-  echo $smokeout
-  exit 1
-fi

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/pigSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/pigSmoke.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/pigSmoke.sh
deleted file mode 100644
index 2e90ac0..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/pigSmoke.sh
+++ /dev/null
@@ -1,18 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-A = load 'passwd' using PigStorage(':');
-B = foreach A generate \$0 as id;
-store B into 'pigsmoke.out';

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/startHiveserver2.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/startHiveserver2.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/startHiveserver2.sh
deleted file mode 100644
index e02b751..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/startHiveserver2.sh
+++ /dev/null
@@ -1,22 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-HIVE_CONF_DIR=$4 /usr/lib/hive/bin/hiveserver2 -hiveconf hive.metastore.uris=' ' -hiveconf hive.log.file=hiveserver2.log -hiveconf hive.log.dir=$5 > $1 2> $2 &
-echo $!|cat>$3

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/startMetastore.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/startMetastore.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/startMetastore.sh
deleted file mode 100644
index cdb6a77..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/startMetastore.sh
+++ /dev/null
@@ -1,22 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-HIVE_CONF_DIR=$4 hive --service metastore -hiveconf hive.log.file=hivemetastore.log -hiveconf hive.log.dir=$5 > $1 2> $2 &
-echo $!|cat>$3

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/templetonSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/templetonSmoke.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/templetonSmoke.sh
deleted file mode 100644
index 662142d..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/templetonSmoke.sh
+++ /dev/null
@@ -1,96 +0,0 @@
-#!/bin/sh
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-export ttonhost=$1
-export smoke_test_user=$2
-export smoke_user_keytab=$3
-export security_enabled=$4
-export kinit_path_local=$5
-export ttonurl="http://${ttonhost}:50111/templeton/v1"
-
-if [[ $security_enabled == "true" ]]; then
-  kinitcmd="${kinit_path_local}  -kt ${smoke_user_keytab} ${smoke_test_user}; "
-else
-  kinitcmd=""
-fi
-
-export no_proxy=$ttonhost
-cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>'    $ttonurl/status 2>&1"
-retVal=`sudo su ${smoke_test_user} -s /bin/bash - -c "$cmd"`
-httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
-
-if [[ "$httpExitCode" -ne "200" ]] ; then
-  echo "Templeton Smoke Test (status cmd): Failed. : $retVal"
-  export TEMPLETON_EXIT_CODE=1
-  exit 1
-fi
-
-exit 0
-
-#try hcat ddl command
-echo "user.name=${smoke_test_user}&exec=show databases;" /tmp/show_db.post.txt
-cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>' -d  \@${destdir}/show_db.post.txt  $ttonurl/ddl 2>&1"
-retVal=`sudo su ${smoke_test_user} -s /bin/bash - -c "$cmd"`
-httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
-
-if [[ "$httpExitCode" -ne "200" ]] ; then
-  echo "Templeton Smoke Test (ddl cmd): Failed. : $retVal"
-  export TEMPLETON_EXIT_CODE=1
-  exit  1
-fi
-
-# NOT SURE?? SUHAS
-if [[ $security_enabled == "true" ]]; then
-  echo "Templeton Pig Smoke Tests not run in secure mode"
-  exit 0
-fi
-
-#try pig query
-outname=${smoke_test_user}.`date +"%M%d%y"`.$$;
-ttonTestOutput="/tmp/idtest.${outname}.out";
-ttonTestInput="/tmp/idtest.${outname}.in";
-ttonTestScript="idtest.${outname}.pig"
-
-echo "A = load '$ttonTestInput' using PigStorage(':');"  > /tmp/$ttonTestScript
-echo "B = foreach A generate \$0 as id; " >> /tmp/$ttonTestScript
-echo "store B into '$ttonTestOutput';" >> /tmp/$ttonTestScript
-
-#copy pig script to hdfs
-sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -copyFromLocal /tmp/$ttonTestScript /tmp/$ttonTestScript"
-
-#copy input file to hdfs
-sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -copyFromLocal /etc/passwd $ttonTestInput"
-
-#create, copy post args file
-echo -n "user.name=${smoke_test_user}&file=/tmp/$ttonTestScript" > /tmp/pig_post.txt
-
-#submit pig query
-cmd="curl -s -w 'http_code <%{http_code}>' -d  \@${destdir}/pig_post.txt  $ttonurl/pig 2>&1"
-retVal=`sudo su ${smoke_test_user} -s /bin/bash - -c "$cmd"`
-httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
-if [[ "$httpExitCode" -ne "200" ]] ; then
-  echo "Templeton Smoke Test (pig cmd): Failed. : $retVal"
-  export TEMPLETON_EXIT_CODE=1
-  exit 1
-fi
-
-exit 0

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/__init__.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/__init__.py
deleted file mode 100644
index 5561e10..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/__init__.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hcat.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hcat.py
deleted file mode 100644
index 3865dc0..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hcat.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-import sys
-
-
-def hcat():
-  import params
-
-  Directory(params.hcat_conf_dir,
-            owner=params.hcat_user,
-            group=params.user_group,
-  )
-
-  Directory(params.hcat_pid_dir,
-            owner=params.webhcat_user,
-            recursive=True
-  )
-
-  XmlConfig("hive-site.xml",
-            conf_dir=params.hive_conf_dir,
-            configurations=params.config['configurations']['hive-site'],
-            configuration_attributes=params.config['configuration_attributes']['hive-site'],
-            owner=params.hive_user,
-            group=params.user_group,
-            mode=0644)
-
-  File(format("{hcat_conf_dir}/hcat-env.sh"),
-       owner=params.hcat_user,
-       group=params.user_group,
-       content=InlineTemplate(params.hcat_env_sh_template)
-  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hcat_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hcat_client.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hcat_client.py
deleted file mode 100644
index 54a8937..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hcat_client.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-from hcat import hcat
-
-class HCatClient(Script):
-  def install(self, env):
-    self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    hcat()
-
-
-  def status(self, env):
-    raise ClientComponentHasNoStatus()
-
-
-if __name__ == "__main__":
-  HCatClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hcat_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hcat_service_check.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hcat_service_check.py
deleted file mode 100644
index d2100f9..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hcat_service_check.py
+++ /dev/null
@@ -1,79 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from resource_management.libraries.functions import get_unique_id_and_date
-
-def hcat_service_check():
-    import params
-
-    unique = get_unique_id_and_date()
-    output_file = format("/apps/hive/warehouse/hcatsmoke{unique}")
-    test_cmd = format("fs -test -e {output_file}")
-
-    if params.security_enabled:
-      kinit_cmd = format(
-        "{kinit_path_local} -kt {smoke_user_keytab} {smokeuser}; ")
-    else:
-      kinit_cmd = ""
-
-    File(format("{tmp_dir}/hcatSmoke.sh"),
-         content=StaticFile("hcatSmoke.sh"),
-         mode=0755
-    )
-
-    prepare_cmd = format("{kinit_cmd}sh {tmp_dir}/hcatSmoke.sh hcatsmoke{unique} prepare")
-
-    Execute(prepare_cmd,
-            tries=3,
-            user=params.smokeuser,
-            try_sleep=5,
-            path=['/usr/sbin', '/usr/local/bin', '/bin', '/usr/bin'],
-            logoutput=True)
-
-    if params.security_enabled:
-      ExecuteHadoop(test_cmd,
-                    user=params.hdfs_user,
-                    logoutput=True,
-                    conf_dir=params.hadoop_conf_dir,
-                    security_enabled=params.security_enabled,
-                    kinit_path_local=params.kinit_path_local,
-                    keytab=params.hdfs_user_keytab,
-                    principal=params.hdfs_principal_name
-      )
-    else:
-      ExecuteHadoop(test_cmd,
-                    user=params.hdfs_user,
-                    logoutput=True,
-                    conf_dir=params.hadoop_conf_dir,
-                    security_enabled=params.security_enabled,
-                    kinit_path_local=params.kinit_path_local,
-                    keytab=params.hdfs_user_keytab
-      )
-
-    cleanup_cmd = format("{kinit_cmd}sh {tmp_dir}/hcatSmoke.sh hcatsmoke{unique} cleanup")
-
-    Execute(cleanup_cmd,
-            tries=3,
-            user=params.smokeuser,
-            try_sleep=5,
-            path=['/usr/sbin', '/usr/local/bin', '/bin', '/usr/bin'],
-            logoutput=True
-    )

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive.py
deleted file mode 100644
index 19ead7b..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive.py
+++ /dev/null
@@ -1,193 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-import os
-
-from resource_management import *
-import sys
-
-
-def hive(name=None):
-  import params
-
-
-  if name == "hiveserver2":
-    params.HdfsDirectory(params.hive_apps_whs_dir,
-                   action="create_delayed",
-                   owner=params.hive_user,
-                   mode=0777
-    )
-    params.HdfsDirectory(params.hive_hdfs_user_dir,
-                   action="create_delayed",
-                   owner=params.hive_user,
-                   mode=params.hive_hdfs_user_mode
-    )
-    params.HdfsDirectory(None, action="create")
-  if name == 'metastore' or name == 'hiveserver2':
-    jdbc_connector()
-
-  Directory(params.hive_conf_dir,
-            owner=params.hive_user,
-            group=params.user_group,
-            recursive=True
-  )
-  Directory(params.hive_server_conf_dir,
-            owner=params.hive_user,
-            group=params.user_group,
-            recursive=True
-  )
-
-
-  environment = {
-    "no_proxy": format("{ambari_server_hostname}")
-  }
-
-  cmd = format("/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf -x \"\" "
-               "--retry 5 "
-               "{jdk_location}{check_db_connection_jar_name} "
-               "-o {check_db_connection_jar_name}'")
-
-  Execute(cmd,
-          not_if=format("[ -f {check_db_connection_jar} ]"),
-          environment = environment)
-
-  if name == 'metastore':
-    File(params.start_metastore_path,
-         mode=0755,
-         content=StaticFile('startMetastore.sh')
-    )
-
-  elif name == 'hiveserver2':
-    File(params.start_hiveserver2_path,
-         mode=0755,
-         content=StaticFile('startHiveserver2.sh')
-    )
-
-  if name != "client":
-    crt_directory(params.hive_pid_dir)
-    crt_directory(params.hive_log_dir)
-    crt_directory(params.hive_var_lib)
-    # Setting mode for hive-site
-    hive_site_mode = 0600
-  else:
-    # Setting mode for hive-site
-    hive_site_mode = 0644
-
-  XmlConfig("hive-site.xml",
-            conf_dir=params.hive_config_dir,
-            configurations=params.config['configurations']['hive-site'],
-            configuration_attributes=params.config['configuration_attributes']['hive-site'],
-            owner=params.hive_user,
-            group=params.user_group,
-            mode=hive_site_mode
-  )
-  File(format("{hive_config_dir}/hive-env.sh"),
-       owner=params.hive_user,
-       group=params.user_group,
-       content=InlineTemplate(params.hive_env_sh_template)
-  )
-
-  crt_file(format("{hive_conf_dir}/hive-default.xml.template"))
-  crt_file(format("{hive_conf_dir}/hive-env.sh.template"))
-
-  log4j_exec_filename = 'hive-exec-log4j.properties'
-  if (params.log4j_exec_props != None):
-    File(format("{params.hive_conf_dir}/{log4j_exec_filename}"),
-         mode=0644,
-         group=params.user_group,
-         owner=params.hive_user,
-         content=params.log4j_exec_props
-    )
-  elif (os.path.exists("{params.hive_conf_dir}/{log4j_exec_filename}.template")):
-    File(format("{params.hive_conf_dir}/{log4j_exec_filename}"),
-         mode=0644,
-         group=params.user_group,
-         owner=params.hive_user,
-         content=StaticFile(format("{params.hive_conf_dir}/{log4j_exec_filename}.template"))
-    )
-
-  log4j_filename = 'hive-log4j.properties'
-  if (params.log4j_props != None):
-    File(format("{params.hive_conf_dir}/{log4j_filename}"),
-         mode=0644,
-         group=params.user_group,
-         owner=params.hive_user,
-         content=params.log4j_props
-    )
-  elif (os.path.exists("{params.hive_conf_dir}/{log4j_filename}.template")):
-    File(format("{params.hive_conf_dir}/{log4j_filename}"),
-         mode=0644,
-         group=params.user_group,
-         owner=params.hive_user,
-         content=StaticFile(format("{params.hive_conf_dir}/{log4j_filename}.template"))
-    )
-
-
-def crt_directory(name):
-  import params
-
-  Directory(name,
-            recursive=True,
-            owner=params.hive_user,
-            group=params.user_group,
-            mode=0755)
-
-
-def crt_file(name):
-  import params
-
-  File(name,
-       owner=params.hive_user,
-       group=params.user_group
-  )
-
-
-def jdbc_connector():
-  import params
-
-  if params.hive_jdbc_driver in params.hive_jdbc_drivers_list and params.hive_use_existing_db:
-    environment = {
-      "no_proxy": format("{ambari_server_hostname}")
-    }
-
-    # TODO: should be removed after ranger_hive_plugin will not provide jdbc
-    Execute(('rm', '-f', params.prepackaged_ojdbc_symlink),
-            path=["/bin", "/usr/bin/"],
-            sudo = True)
-
-    Execute(('curl', '-kf', '-x', "", '--retry', '10', params.driver_curl_source, '-o',
-             params.downloaded_custom_connector),
-            not_if=format("test -f {downloaded_custom_connector}"),
-            path=["/bin", "/usr/bin/"],
-            environment=environment,
-            sudo = True)
-
-
-    Execute(('cp', '--remove-destination', params.downloaded_custom_connector, params.target),
-            #creates=params.target, TODO: uncomment after ranger_hive_plugin will not provide jdbc
-            path=["/bin", "/usr/bin/"],
-            sudo = True)
-
-  else:
-    #for default hive db (Mysql)
-    Execute(('cp', '--remove-destination', format('/usr/share/java/{jdbc_jar_name}'), params.target),
-            #creates=params.target, TODO: uncomment after ranger_hive_plugin will not provide jdbc
-            path=["/bin", "/usr/bin/"],
-            sudo=True
-    )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_client.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_client.py
deleted file mode 100644
index 0a5fb2b..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_client.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-import sys
-from resource_management import *
-
-from hive import hive
-
-class HiveClient(Script):
-  def install(self, env):
-    self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-
-    hive(name='client')
-
-
-  def status(self, env):
-    raise ClientComponentHasNoStatus()
-
-if __name__ == "__main__":
-  HiveClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_metastore.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_metastore.py
deleted file mode 100644
index c741174..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_metastore.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-
-from hive import hive
-from hive_service import hive_service
-
-class HiveMetastore(Script):
-
-  def install(self, env):
-    self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-
-    hive(name='metastore')
-
-  def start(self, env):
-    import params
-    env.set_params(params)
-    self.configure(env) # FOR SECURITY
-    hive_service( 'metastore',
-                   action = 'start'
-    )
-
-  def stop(self, env):
-    import params
-    env.set_params(params)
-
-    hive_service( 'metastore',
-                   action = 'stop'
-    )
-
-  def status(self, env):
-    import status_params
-    env.set_params(status_params)
-    pid_file = format("{hive_pid_dir}/{hive_metastore_pid}")
-    # Recursively check all existing gmetad pid files
-    check_process_status(pid_file)
-
-if __name__ == "__main__":
-  HiveMetastore().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_server.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_server.py
deleted file mode 100644
index b05649c..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_server.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-
-from hive import hive
-from hive_service import hive_service
-
-class HiveServer(Script):
-
-  def install(self, env):
-    self.install_packages(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-
-    hive(name='hiveserver2')
-
-  def start(self, env):
-    import params
-    env.set_params(params)
-    self.configure(env) # FOR SECURITY
-    hive_service( 'hiveserver2',
-                  action = 'start'
-    )
-
-  def stop(self, env):
-    import params
-    env.set_params(params)
-
-    hive_service( 'hiveserver2',
-                  action = 'stop'
-    )
-
-
-  def status(self, env):
-    import status_params
-    env.set_params(status_params)
-    pid_file = format("{hive_pid_dir}/{hive_pid}")
-    # Recursively check all existing gmetad pid files
-    check_process_status(pid_file)
-
-if __name__ == "__main__":
-  HiveServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_service.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_service.py
deleted file mode 100644
index c68391a..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_service.py
+++ /dev/null
@@ -1,98 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-import sys
-import time
-
-def hive_service(
-    name,
-    action='start'):
-
-  import params
-
-  if name == 'metastore':
-    pid_file = format("{hive_pid_dir}/{hive_metastore_pid}")
-    cmd = format(
-      "env HADOOP_HOME={hadoop_home} JAVA_HOME={java64_home} {start_metastore_path} {hive_log_dir}/hive.out {hive_log_dir}/hive.log {pid_file} {hive_server_conf_dir} {hive_log_dir}")
-  elif name == 'hiveserver2':
-    pid_file = format("{hive_pid_dir}/{hive_pid}")
-    cmd = format(
-      "env JAVA_HOME={java64_home} {start_hiveserver2_path} {hive_log_dir}/hive-server2.out {hive_log_dir}/hive-server2.log {pid_file} {hive_server_conf_dir} {hive_log_dir}")
-
-  process_id_exists = format("ls {pid_file} >/dev/null 2>&1 && ps -p `cat {pid_file}` >/dev/null 2>&1")
-  
-  if action == 'start':
-    demon_cmd = format("{cmd}")
-    
-    Execute(demon_cmd,
-            user=params.hive_user,
-            not_if=process_id_exists
-    )
-
-    if params.hive_jdbc_driver == "com.mysql.jdbc.Driver" or \
-       params.hive_jdbc_driver == "org.postgresql.Driver" or \
-       params.hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
-      
-      db_connection_check_command = format(
-        "{java64_home}/bin/java -cp {check_db_connection_jar}:/usr/share/java/{jdbc_jar_name} org.apache.ambari.server.DBConnectionVerification '{hive_jdbc_connection_url}' {hive_metastore_user_name} {hive_metastore_user_passwd!p} {hive_jdbc_driver}")
-      
-      Execute(db_connection_check_command,
-              path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin', tries=5, try_sleep=10)
-      
-    # AMBARI-5800 - wait for the server to come up instead of just the PID existance
-    if name == 'hiveserver2':
-      SOCKET_WAIT_SECONDS = 120
-      address=params.hive_server_host
-      port=int(params.hive_server_port)
-      
-      start_time = time.time()
-      end_time = start_time + SOCKET_WAIT_SECONDS
-
-      is_service_socket_valid = False
-      print "Waiting for the Hive server to start..."
-      if params.security_enabled:
-        kinitcmd=format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser}; ")
-      else:
-        kinitcmd=None
-      while time.time() < end_time:
-        try:
-          check_thrift_port_sasl(address, port, params.hive_server2_authentication,
-                                 params.hive_server_principal, kinitcmd, params.smokeuser)
-          is_service_socket_valid = True
-          break
-        except:
-          time.sleep(5)
-
-      elapsed_time = time.time() - start_time
-      
-      if is_service_socket_valid == False: 
-        raise Fail("Connection to Hive server %s on port %s failed after %d seconds" % (address, port, elapsedwebhcat_service.py_time))
-      
-      print "Successfully connected to Hive at %s on port %s after %d seconds" % (address, port, elapsed_time)    
-            
-  elif action == 'stop':
-    demon_cmd = format("sudo kill `cat {pid_file}`")
-    Execute(demon_cmd,
-         not_if = format("! ({process_id_exists})")
-    )
-    File(pid_file,
-         action = "delete",
-    )

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/mysql_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/mysql_server.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/mysql_server.py
deleted file mode 100644
index 1712964..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/mysql_server.py
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-import os
-from resource_management import *
-
-from mysql_service import mysql_service
-
-class MysqlServer(Script):
-
-  def install(self, env):
-    import params
-    self.install_packages(env, exclude_packages=params.hive_exclude_packages)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-
-    mysql_service(daemon_name=params.daemon_name, action='start')
-
-    File(params.mysql_adduser_path,
-         mode=0755,
-         content=StaticFile('addMysqlUser.sh')
-    )
-
-    cmd = format("bash -x {mysql_adduser_path} {daemon_name} {hive_metastore_user_name} {hive_metastore_user_passwd!p} {mysql_host[0]}")
-
-    Execute(cmd,
-            tries=3,
-            try_sleep=5,
-            path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'
-    )
-
-    mysql_service(daemon_name=params.daemon_name, action='stop')
-
-  def start(self, env):
-    import params
-    env.set_params(params)
-
-    mysql_service(daemon_name=params.daemon_name, action = 'start')
-
-  def stop(self, env):
-    import params
-    env.set_params(params)
-
-    mysql_service(daemon_name=params.daemon_name, action = 'stop')
-
-  def status(self, env):
-    import status_params
-    mysql_service(daemon_name=status_params.daemon_name, action = 'status')
-
-if __name__ == "__main__":
-  MysqlServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/mysql_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/mysql_service.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/mysql_service.py
deleted file mode 100644
index fa06712..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/mysql_service.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-
-
-def mysql_service(daemon_name=None, action='start'):
-  cmd = ('service', daemon_name, action)
-
-  if action == 'status':
-    Execute(cmd,
-            path="/usr/local/bin/:/bin/:/sbin/",
-            logoutput=False,
-            sudo=True,
-    )
-  else:
-    Execute(cmd,
-            path="/usr/local/bin/:/bin/:/sbin/",
-            logoutput=True,
-            sudo=True,
-    )
-
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
deleted file mode 100644
index 7024a49..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
+++ /dev/null
@@ -1,220 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-import status_params
-import os
-
-# server configurations
-config = Script.get_config()
-tmp_dir = Script.get_tmp_dir()
-
-hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
-hive_server_conf_dir = "/etc/hive/conf.server"
-hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
-
-hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
-
-#users
-hive_user = config['configurations']['hive-env']['hive_user']
-hive_lib = '/usr/lib/hive/lib/'
-#JDBC driver jar name
-hive_jdbc_driver = config['configurations']['hive-site']['javax.jdo.option.ConnectionDriverName']
-if hive_jdbc_driver == "com.mysql.jdbc.Driver":
-  jdbc_jar_name = "mysql-connector-java.jar"
-  jdbc_symlink_name = "mysql-jdbc-driver.jar"
-elif hive_jdbc_driver == "org.postgresql.Driver":
-  jdbc_jar_name = "postgresql-jdbc.jar"
-  jdbc_symlink_name = "postgres-jdbc-driver.jar"
-elif hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
-  jdbc_jar_name = "ojdbc.jar"
-  jdbc_symlink_name = "oracle-jdbc-driver.jar"
-
-check_db_connection_jar_name = "DBConnectionVerification.jar"
-check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
-hive_jdbc_drivers_list = ["com.mysql.jdbc.Driver","org.postgresql.Driver","oracle.jdbc.driver.OracleDriver"]
-downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
-prepackaged_ojdbc_symlink = format("{hive_lib}/ojdbc6.jar")
-
-#common
-hive_metastore_port = get_port_from_url(config['configurations']['hive-site']['hive.metastore.uris']) #"9083"
-hive_var_lib = '/var/lib/hive'
-ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
-hive_server_host = config['clusterHostInfo']['hive_server_host'][0]
-hive_server_port = default('/configurations/hive-site/hive.server2.thrift.port',"10000")
-hive_url = format("jdbc:hive2://{hive_server_host}:{hive_server_port}")
-hive_server_principal = config['configurations']['hive-site']['hive.server2.authentication.kerberos.principal']
-hive_server2_authentication = config['configurations']['hive-site']['hive.server2.authentication']
-
-smokeuser = config['configurations']['cluster-env']['smokeuser']
-smoke_test_sql = format("{tmp_dir}/hiveserver2.sql")
-smoke_test_path = format("{tmp_dir}/hiveserver2Smoke.sh")
-smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-
-hive_metastore_keytab_path =  config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
-
-#hive_env
-hive_conf_dir = "/etc/hive/conf"
-hive_dbroot = config['configurations']['hive-env']['hive_dbroot']
-hive_log_dir = config['configurations']['hive-env']['hive_log_dir']
-hive_pid_dir = status_params.hive_pid_dir
-hive_pid = status_params.hive_pid
-#Default conf dir for client
-hive_config_dir = hive_conf_dir
-if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
-  hive_config_dir = hive_server_conf_dir
-
-#hive-site
-hive_database_name = config['configurations']['hive-env']['hive_database_name']
-hive_database = config['configurations']['hive-env']['hive_database']
-hive_use_existing_db = hive_database.startswith('Existing')
-
-
-mysql_jdbc_driver_jar = "/usr/share/java/mysql-connector-java.jar"
-hive_exclude_packages = []
-
-if hive_use_existing_db:
-  hive_exclude_packages = ['mysql-connector-java','mysql','mysql-server']
-else:
-  if 'role' in config and config['role'] != "MYSQL_SERVER":
-    hive_exclude_packages = ['mysql','mysql-server']
-  if os.path.exists(mysql_jdbc_driver_jar):
-    hive_exclude_packages.append('mysql-connector-java')
-
-#Starting hiveserver2
-start_hiveserver2_script = 'startHiveserver2.sh'
-
-hadoop_home = '/usr'
-
-##Starting metastore
-start_metastore_script = 'startMetastore.sh'
-hive_metastore_pid = status_params.hive_metastore_pid
-java_share_dir = '/usr/share/java'
-driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
-
-hdfs_user =  config['configurations']['hadoop-env']['hdfs_user']
-user_group = config['configurations']['cluster-env']['user_group']
-artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
-
-target = format("{hive_lib}/{jdbc_jar_name}")
-
-jdk_location = config['hostLevelParams']['jdk_location']
-driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
-
-start_hiveserver2_path = format("{tmp_dir}/start_hiveserver2_script")
-start_metastore_path = format("{tmp_dir}/start_metastore_script")
-
-hive_aux_jars_path = config['configurations']['hive-env']['hive_aux_jars_path']
-hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
-hive_heapsize = config['configurations']['hive-site']['hive.heapsize']
-java64_home = config['hostLevelParams']['java_home']
-hive_env_sh_template = config['configurations']['hive-env']['content']
-
-##### MYSQL
-
-db_name = config['configurations']['hive-env']['hive_database_name']
-mysql_user = "mysql"
-mysql_group = 'mysql'
-mysql_host = config['clusterHostInfo']['hive_mysql_host']
-
-mysql_adduser_path = format("{tmp_dir}/addMysqlUser.sh")
-
-########## HCAT
-
-hcat_conf_dir = '/etc/hcatalog/conf'
-
-hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
-
-hcat_dbroot = hcat_lib
-
-hcat_user = config['configurations']['hive-env']['hcat_user']
-webhcat_user = config['configurations']['hive-env']['webhcat_user']
-
-hcat_pid_dir = status_params.hcat_pid_dir
-hcat_log_dir = config['configurations']['hive-env']['hcat_log_dir']   #hcat_log_dir
-
-hadoop_conf_dir = '/etc/hadoop/conf'
-
-hcat_env_sh_template = config['configurations']['hcat-env']['content']
-
-#hive-log4j.properties.template
-if (('hive-log4j' in config['configurations']) and ('content' in config['configurations']['hive-log4j'])):
-  log4j_props = config['configurations']['hive-log4j']['content']
-else:
-  log4j_props = None
-
-#hive-exec-log4j.properties.template
-if (('hive-exec-log4j' in config['configurations']) and ('content' in config['configurations']['hive-exec-log4j'])):
-  log4j_exec_props = config['configurations']['hive-exec-log4j']['content']
-else:
-  log4j_exec_props = None
-  
-daemon_name = status_params.daemon_name
-
-#hdfs directories
-hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.warehouse.dir"]
-hive_hdfs_user_dir = format("/user/{hive_user}")
-hive_hdfs_user_mode = 0700
-#for create_hdfs_directory
-hostname = config["hostname"]
-hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
-
-#################################################
-################## WebHCat ######################
-#################################################
-webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
-
-config_dir = '/etc/hcatalog/conf'
-
-templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
-templeton_pid_dir = status_params.templeton_pid_dir
-
-webhcat_pid_file = status_params.webhcat_pid_file
-
-templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
-
-
-webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
-
-webhcat_apps_dir = "/apps/webhcat"
-
-#hdfs directories
-hcat_hdfs_user_dir = format("/user/{hcat_user}")
-hcat_hdfs_user_mode = 0755
-webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
-webhcat_hdfs_user_mode = 0755
-#for create_hdfs_directory
-security_param = "true" if security_enabled else "false"
-
-import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
-  security_enabled = security_enabled,
-  keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local
-)

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/service_check.py
deleted file mode 100644
index 945a676..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/service_check.py
+++ /dev/null
@@ -1,53 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-import socket
-import sys
-
-from hcat_service_check import hcat_service_check
-from webhcat_service_check import webhcat_service_check
-
-class HiveServiceCheck(Script):
-  def service_check(self, env):
-    import params
-    env.set_params(params)
-
-    address=format("{hive_server_host}")
-    port=int(format("{hive_server_port}"))
-    print "Test connectivity to hive server"
-    if params.security_enabled:
-      kinitcmd=format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser}; ")
-    else:
-      kinitcmd=None
-
-    try:
-      check_thrift_port_sasl(address, port, params.hive_server2_authentication,
-                             params.hive_server_principal, kinitcmd, params.smokeuser)
-      print "Successfully connected to %s on port %s" % (address, port)
-    except:
-      print "Connection to %s on port %s failed" % (address, port)
-      exit(1)
-
-    hcat_service_check()
-    webhcat_service_check()
-
-if __name__ == "__main__":
-  HiveServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/status_params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/status_params.py
deleted file mode 100644
index 7c1af00..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/status_params.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-
-config = Script.get_config()
-
-hive_pid_dir = config['configurations']['hive-env']['hive_pid_dir']
-hive_pid = 'hive-server.pid'
-
-hive_metastore_pid = 'hive.pid'
-
-hcat_pid_dir = config['configurations']['hive-env']['hcat_pid_dir'] #hcat_pid_dir
-
-templeton_pid_dir = config['configurations']['hive-env']['hcat_pid_dir']
-webhcat_pid_file = format('{templeton_pid_dir}/webhcat.pid')
-
-if System.get_instance().os_family == "suse":
-  daemon_name = 'mysql'
-else:
-  daemon_name = 'mysqld'

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat.py
deleted file mode 100644
index 037cdb5..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat.py
+++ /dev/null
@@ -1,107 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-from resource_management import *
-
-
-def webhcat():
-  import params
-
-  if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
-    params.HdfsDirectory(params.hcat_hdfs_user_dir,
-                         action="create_delayed",
-                         owner=params.hcat_user,
-                         mode=params.hcat_hdfs_user_mode
-    )
-  params.HdfsDirectory(params.webhcat_hdfs_user_dir,
-                       action="create_delayed",
-                       owner=params.webhcat_user,
-                       mode=params.webhcat_hdfs_user_mode
-  )
-  params.HdfsDirectory(params.webhcat_apps_dir,
-                       action="create_delayed",
-                       owner=params.webhcat_user,
-                       mode=0755
-  )
-  params.HdfsDirectory(None, action="create")
-  Directory(params.templeton_pid_dir,
-            owner=params.webhcat_user,
-            mode=0755,
-            group=params.user_group,
-            recursive=True)
-
-  Directory(params.templeton_log_dir,
-            owner=params.webhcat_user,
-            mode=0755,
-            group=params.user_group,
-            recursive=True)
-
-  Directory(params.config_dir,
-            owner=params.webhcat_user,
-            group=params.user_group)
-
-  XmlConfig("webhcat-site.xml",
-            conf_dir=params.config_dir,
-            configurations=params.config['configurations']['webhcat-site'],
-            configuration_attributes=params.config['configuration_attributes']['webhcat-site'],
-            owner=params.webhcat_user,
-            group=params.user_group,
-  )
-
-  File(format("{config_dir}/webhcat-env.sh"),
-       owner=params.webhcat_user,
-       group=params.user_group,
-       content=InlineTemplate(params.webhcat_env_sh_template)
-  )
-
-  if params.security_enabled:
-    kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_user};")
-  else:
-    kinit_if_needed = ""
-
-  if kinit_if_needed:
-    Execute(kinit_if_needed,
-            user=params.webhcat_user,
-            path='/bin'
-    )
-
-  CopyFromLocal('/usr/lib/hadoop/contrib/streaming/hadoop-streaming*.jar',
-                owner=params.webhcat_user,
-                mode=0755,
-                dest_dir=params.webhcat_apps_dir,
-                kinnit_if_needed=kinit_if_needed,
-                hdfs_user=params.hdfs_user
-  )
-
-  CopyFromLocal('/usr/share/HDP-webhcat/pig.tar.gz',
-                owner=params.webhcat_user,
-                mode=0755,
-                dest_dir=params.webhcat_apps_dir,
-                kinnit_if_needed=kinit_if_needed,
-                hdfs_user=params.hdfs_user
-  )
-
-  CopyFromLocal('/usr/share/HDP-webhcat/hive.tar.gz',
-                owner=params.webhcat_user,
-                mode=0755,
-                dest_dir=params.webhcat_apps_dir,
-                kinnit_if_needed=kinit_if_needed,
-                hdfs_user=params.hdfs_user
-  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_server.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_server.py
deleted file mode 100644
index 0794ea1..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_server.py
+++ /dev/null
@@ -1,54 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-import sys
-from resource_management import *
-
-from webhcat import webhcat
-from webhcat_service import webhcat_service
-
-class WebHCatServer(Script):
-  def install(self, env):
-    import params
-    self.install_packages(env, exclude_packages=params.hive_exclude_packages)
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    webhcat()
-
-  def start(self, env):
-    import params
-    env.set_params(params)
-    self.configure(env) # FOR SECURITY
-    webhcat_service(action = 'start')
-
-  def stop(self, env):
-    import params
-    env.set_params(params)
-
-    webhcat_service(action = 'stop')
-
-  def status(self, env):
-    import status_params
-    env.set_params(status_params)
-    check_process_status(status_params.webhcat_pid_file)
-
-if __name__ == "__main__":
-  WebHCatServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service.py
deleted file mode 100644
index fbad9fa..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service.py
+++ /dev/null
@@ -1,42 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-from resource_management import *
-
-def webhcat_service(action='start'):
-  import params
-
-  cmd = format('env HADOOP_HOME={hadoop_home} /usr/lib/hcatalog/sbin/webhcat_server.sh')
-
-  if action == 'start':
-    demon_cmd = format('{cmd} start')
-    no_op_test = format('ls {webhcat_pid_file} >/dev/null 2>&1 && ps -p `cat {webhcat_pid_file}` >/dev/null 2>&1')
-    Execute(demon_cmd,
-            user=params.webhcat_user,
-            not_if=no_op_test
-    )
-  elif action == 'stop':
-    demon_cmd = format('{cmd} stop')
-    Execute(demon_cmd,
-            user=params.webhcat_user
-    )
-    File(params.webhcat_pid_file,
-         action="delete",
-    )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service_check.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service_check.py
deleted file mode 100644
index 1352e0b..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service_check.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-
-def webhcat_service_check():
-  import params
-  File(format("{tmp_dir}/templetonSmoke.sh"),
-       content= StaticFile('templetonSmoke.sh'),
-       mode=0755
-  )
-
-  cmd = format("{tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} {smokeuser} {smokeuser_keytab}"
-               " {security_param} {kinit_path_local}",
-               smokeuser_keytab=params.smoke_user_keytab if params.security_enabled else "no_keytab")
-
-  Execute(cmd,
-          tries=3,
-          try_sleep=5,
-          path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
-          logoutput=True)
-
-
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/alerts.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/alerts.json b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/alerts.json
deleted file mode 100644
index d068bc0..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/alerts.json
+++ /dev/null
@@ -1,221 +0,0 @@
-{
-  "MAPREDUCE":{
-    "service": [
-      {
-        "name": "mapreduce_tasktracker_process_percent",
-        "label": "Percent TaskTrackers Available",
-        "description": "This alert is triggered if the number of down TaskTrackers in the cluster is greater than the configured critical threshold. It aggregates the results of TaskTrackers process checks.",
-        "interval": 1,
-        "scope": "SERVICE",
-        "enabled": true,
-        "source": {
-          "type": "AGGREGATE",
-          "alert_name": "mapreduce_tasktracker_process",
-          "reporting": {
-            "ok": {
-              "text": "affected: [{1}], total: [{0}]"
-            },
-            "warning": {
-              "text": "affected: [{1}], total: [{0}]",
-              "value": 0.1
-            },
-            "critical": {
-              "text": "affected: [{1}], total: [{0}]",
-              "value": 0.3
-            }
-          }
-        }
-      }
-    ],
-    "JOBTRACKER": [
-      {
-        "name": "mapreduce_jobtracker_webui",
-        "label": "JobTracker Web UI",
-        "description": "This host-level alert is triggered if the JobTracker Web UI is unreachable.",
-        "interval": 1,
-        "scope": "ANY",
-        "enabled": true,
-        "source": {
-          "type": "WEB",
-          "uri": {
-            "http": "{{mapred-site/mapred.job.tracker.http.address}}"
-          },
-          "reporting": {
-            "ok": {
-              "text": "HTTP {0} response in {2:.3f} seconds"
-            },
-            "warning":{
-              "text": "HTTP {0} response in {2:.3f} seconds"
-            },
-            "critical": {
-              "text": "Connection failed to {1}"
-            }
-          }
-        }
-      },
-      {
-        "name": "mapreduce_jobtracker_cpu",
-        "label": "JobTracker Host CPU Utilization",
-        "description": "This host-level alert is triggered if the percent of CPU utilization on the JobTracker exceeds the configured critical threshold. The threshold values are in percent.",
-        "interval": 5,
-        "scope": "ANY",
-        "enabled": true,
-        "source": {
-          "type": "METRIC",
-          "uri": {
-            "http": "{{mapred-site/mapred.job.tracker.http.address}}"
-          },
-          "reporting": {
-            "ok": {
-              "text": "{1} CPU, load {0:.1%}"
-            },
-            "warning": {
-              "text": "{1} CPU, load {0:.1%}",
-              "value": 200
-            },
-            "critical": {
-              "text": "{1} CPU, load {0:.1%}",
-              "value": 250
-            },
-            "units" : "%"
-          },
-          "jmx": {
-            "property_list": [
-              "java.lang:type=OperatingSystem/SystemCpuLoad",
-              "java.lang:type=OperatingSystem/AvailableProcessors"
-            ],
-            "value": "{0} * 100"
-          }
-        }
-      },
-      {
-        "name": "mapreduce_jobtracker_rpc_latency",
-        "label": "JobTracker RPC Latency",
-        "description": "This host-level alert is triggered if the JobTracker operations RPC latency exceeds the configured critical threshold. Typically an increase in the RPC processing time increases the RPC queue length, causing the average queue wait time to increase for operations. The threshold values are in milliseconds.",
-        "interval": 2,
-        "scope": "ANY",
-        "enabled": true,
-        "source": {
-          "type": "METRIC",
-          "uri": {
-            "http": "{{mapred-site/mapred.job.tracker.http.address}}"
-          },
-          "reporting": {
-            "ok": {
-              "text": "Average Queue Time:[{0}], Average Processing Time:[{1}]"
-            },
-            "warning": {
-              "text": "Average Queue Time:[{0}], Average Processing Time:[{1}]",
-              "value": 3000
-            },          
-            "critical": {
-              "text": "Average Queue Time:[{0}], Average Processing Time:[{1}]",
-              "value": 5000
-            },
-            "units" : "ms"
-          },
-          "jmx": {
-            "property_list": [
-              "Hadoop:service=JobTracker,name=RpcActivityForPort*/RpcQueueTime_avg_time",
-              "Hadoop:service=JobTracker,name=RpcActivityForPort*/RpcProcessingTime_avg_time"
-            ],
-            "value": "{0}"
-          }
-        }
-      },
-      {
-        "name": "mapreduce_jobtracker_process",
-        "label": "JobTracker Process",
-        "description": "This host-level alert is triggered if the JobTracker process cannot be established to be up and listening on the network.",
-        "interval": 1,
-        "scope": "ANY",
-        "enabled": true,
-        "source": {
-          "type": "PORT",
-          "uri": "{{mapred-site/mapred.job.tracker.http.address}}",
-          "default_port": 50030,
-          "reporting": {
-            "ok": {
-              "text": "TCP OK - {0:.3f}s response on port {1}"
-            },
-            "warning": {
-              "text": "TCP OK - {0:.3f}s response on port {1}",
-              "value": 1.5
-            },
-            "critical": {
-              "text": "Connection failed: {0} to {1}:{2}",
-              "value": 5.0
-            }
-          }
-        }
-      }
-    ],
-    "TASKTRACKER": [
-      {
-        "name": "mapreduce_tasktracker_process",
-        "label": "TaskTracker Process",
-        "description": "This host-level alert is triggered if the TaskTracker process cannot be established to be up and listening on the network.",
-        "interval": 1,
-        "scope": "HOST",
-        "enabled": true,
-        "source": {
-          "type": "PORT",
-          "uri": "50060",
-          "default_port": 50060,
-          "reporting": {
-            "ok": {
-              "text": "TCP OK - {0:.3f}s response on port {1}"
-            },
-            "warning": {
-              "text": "TCP OK - {0:.3f}s response on port {1}",
-              "value": 1.5
-            },
-            "critical": {
-              "text": "Connection failed: {0} to {1}:{2}",
-              "value": 5.0
-            }
-          }
-        }
-      },
-      {
-        "name": "mapreduce_local_directory_space",
-        "label": "MapReduce Local Directory Space",
-        "description": "This host-level alert is triggered if the task tracker is reporting low disk space.",
-        "interval": 1,
-        "scope": "ANY",
-        "enabled": true,
-        "source": {
-          "type": "SCRIPT",
-          "path": "HDP/1.3.2/services/MAPREDUCE/package/alerts/alert_mapreduce_directory_space.py"
-        }
-      }
-    ],
-    "HISTORYSERVER": [
-      {
-        "name": "mapreduce_historyserver_webui",
-        "label": "History Server Web UI",
-        "description": "This host-level alert is triggered if the History Server Web UI is unreachable.",
-        "interval": 1,
-        "scope": "ANY",
-        "enabled": true,
-        "source": {
-          "type": "WEB",
-          "uri": {
-            "http": "{{mapred-site/mapreduce.history.server.http.address}}"
-          },
-          "reporting": {
-            "ok": {
-              "text": "HTTP {0} response in {2:.3f} seconds"
-            },
-            "warning":{
-              "text": "HTTP {0} response in {2:.3f} seconds"
-            },
-            "critical": {
-              "text": "Connection failed to {1}"
-            }
-          }
-        }
-      }
-    ]
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/configuration/capacity-scheduler.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/configuration/capacity-scheduler.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/configuration/capacity-scheduler.xml
deleted file mode 100644
index 0751f63..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/configuration/capacity-scheduler.xml
+++ /dev/null
@@ -1,195 +0,0 @@
-<?xml version="1.0"?>
-
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-<!-- This is the configuration file for the resource manager in Hadoop. -->
-<!-- You can configure various scheduling parameters related to queues. -->
-<!-- The properties for a queue follow a naming convention,such as, -->
-<!-- mapred.capacity-scheduler.queue.<queue-name>.property-name. -->
-
-<configuration supports_final="false" supports_adding_forbidden="true">
-
-  <property>
-    <name>mapred.capacity-scheduler.maximum-system-jobs</name>
-    <value>3000</value>
-    <description>Maximum number of jobs in the system which can be initialized,
-     concurrently, by the CapacityScheduler.
-    </description>    
-  </property>
-  
-  <property>
-    <name>mapred.capacity-scheduler.queue.default.capacity</name>
-    <value>100</value>
-    <description>Percentage of the number of slots in the cluster that are
-      to be available for jobs in this queue.
-    </description>    
-  </property>
-  
-  <property>
-    <name>mapred.capacity-scheduler.queue.default.maximum-capacity</name>
-    <value>-1</value>
-    <description>
-	maximum-capacity defines a limit beyond which a queue cannot use the capacity of the cluster.
-	This provides a means to limit how much excess capacity a queue can use. By default, there is no limit.
-	The maximum-capacity of a queue can only be greater than or equal to its minimum capacity.
-        Default value of -1 implies a queue can use complete capacity of the cluster.
-
-        This property could be to curtail certain jobs which are long running in nature from occupying more than a 
-        certain percentage of the cluster, which in the absence of pre-emption, could lead to capacity guarantees of 
-        other queues being affected.
-        
-        One important thing to note is that maximum-capacity is a percentage , so based on the cluster's capacity
-        the max capacity would change. So if large no of nodes or racks get added to the cluster , max Capacity in 
-        absolute terms would increase accordingly.
-    </description>    
-  </property>
-  
-  <property>
-    <name>mapred.capacity-scheduler.queue.default.supports-priority</name>
-    <value>false</value>
-    <description>If true, priorities of jobs will be taken into 
-      account in scheduling decisions.
-    </description>
-  </property>
-
-  <property>
-    <name>mapred.capacity-scheduler.queue.default.minimum-user-limit-percent</name>
-    <value>100</value>
-    <description> Each queue enforces a limit on the percentage of resources 
-    allocated to a user at any given time, if there is competition for them. 
-    This user limit can vary between a minimum and maximum value. The former
-    depends on the number of users who have submitted jobs, and the latter is
-    set to this property value. For example, suppose the value of this 
-    property is 25. If two users have submitted jobs to a queue, no single 
-    user can use more than 50% of the queue resources. If a third user submits
-    a job, no single user can use more than 33% of the queue resources. With 4 
-    or more users, no user can use more than 25% of the queue's resources. A 
-    value of 100 implies no user limits are imposed. 
-    </description>
-  </property>
-  
-  <property>
-    <name>mapred.capacity-scheduler.queue.default.user-limit-factor</name>
-    <value>1</value>
-    <description>The multiple of the queue capacity which can be configured to 
-    allow a single user to acquire more slots. 
-    </description>
-  </property>
-
-  <property>
-    <name>mapred.capacity-scheduler.queue.default.maximum-initialized-active-tasks</name>
-    <value>200000</value>
-    <description>The maximum number of tasks, across all jobs in the queue, 
-    which can be initialized concurrently. Once the queue's jobs exceed this 
-    limit they will be queued on disk.  
-    </description>
-  </property>
-
-  <property>
-    <name>mapred.capacity-scheduler.queue.default.maximum-initialized-active-tasks-per-user</name>
-    <value>100000</value>
-    <description>The maximum number of tasks per-user, across all the of the 
-    user's jobs in the queue, which can be initialized concurrently. Once the 
-    user's jobs exceed this limit they will be queued on disk.  
-    </description>
-  </property>
-
-  <property>
-    <name>mapred.capacity-scheduler.queue.default.init-accept-jobs-factor</name>
-    <value>10</value>
-    <description>The multipe of (maximum-system-jobs * queue-capacity) used to 
-    determine the number of jobs which are accepted by the scheduler.  
-    </description>
-  </property>
-
-  <!-- The default configuration settings for the capacity task scheduler -->
-  <!-- The default values would be applied to all the queues which don't have -->
-  <!-- the appropriate property for the particular queue -->
-  <property>
-    <name>mapred.capacity-scheduler.default-supports-priority</name>
-    <value>false</value>
-    <description>If true, priorities of jobs will be taken into 
-      account in scheduling decisions by default in a job queue.
-    </description>
-  </property>
-  
-  <property>
-    <name>mapred.capacity-scheduler.default-minimum-user-limit-percent</name>
-    <value>100</value>
-    <description>The percentage of the resources limited to a particular user
-      for the job queue at any given point of time by default.
-    </description>
-  </property>
-
-
-  <property>
-    <name>mapred.capacity-scheduler.default-user-limit-factor</name>
-    <value>1</value>
-    <description>The default multiple of queue-capacity which is used to 
-    determine the amount of slots a single user can consume concurrently.
-    </description>
-  </property>
-
-  <property>
-    <name>mapred.capacity-scheduler.default-maximum-active-tasks-per-queue</name>
-    <value>200000</value>
-    <description>The default maximum number of tasks, across all jobs in the 
-    queue, which can be initialized concurrently. Once the queue's jobs exceed 
-    this limit they will be queued on disk.  
-    </description>
-  </property>
-
-  <property>
-    <name>mapred.capacity-scheduler.default-maximum-active-tasks-per-user</name>
-    <value>100000</value>
-    <description>The default maximum number of tasks per-user, across all the of 
-    the user's jobs in the queue, which can be initialized concurrently. Once 
-    the user's jobs exceed this limit they will be queued on disk.  
-    </description>
-  </property>
-
-  <property>
-    <name>mapred.capacity-scheduler.default-init-accept-jobs-factor</name>
-    <value>10</value>
-    <description>The default multipe of (maximum-system-jobs * queue-capacity) 
-    used to determine the number of jobs which are accepted by the scheduler.  
-    </description>
-  </property>
-
-  <!-- Capacity scheduler Job Initialization configuration parameters -->
-  <property>
-    <name>mapred.capacity-scheduler.init-poll-interval</name>
-    <value>5000</value>
-    <description>The amount of time in miliseconds which is used to poll 
-    the job queues for jobs to initialize.
-    </description>
-  </property>
-  <property>
-    <name>mapred.capacity-scheduler.init-worker-threads</name>
-    <value>5</value>
-    <description>Number of worker threads which would be used by
-    Initialization poller to initialize jobs in a set of queue.
-    If number mentioned in property is equal to number of job queues
-    then a single thread would initialize jobs in a queue. If lesser
-    then a thread would get a set of queues assigned. If the number
-    is greater then number of threads would be equal to number of 
-    job queues.
-    </description>
-  </property>
-
-</configuration>


Mime
View raw message