ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dmitriu...@apache.org
Subject [21/21] ambari git commit: AMBARI-9068. Remove HDP 1.3 stack defn from Ambari. (dlysnichenko)
Date Mon, 12 Jan 2015 11:53:16 GMT
AMBARI-9068. Remove HDP 1.3 stack defn from Ambari. (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/23b7c110
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/23b7c110
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/23b7c110

Branch: refs/heads/trunk
Commit: 23b7c1108da35fde71db959b7c2655c2030131e5
Parents: f898c04
Author: Lisnichenko Dmitro <dlysnichenko@hortonworks.com>
Authored: Mon Jan 12 13:51:31 2015 +0200
Committer: Lisnichenko Dmitro <dlysnichenko@hortonworks.com>
Committed: Mon Jan 12 13:51:31 2015 +0200

----------------------------------------------------------------------
 ambari-agent/pom.xml                            |    4 +-
 .../HDP/1.3.2/configuration/cluster-env.xml     |   56 -
 .../1.3.2/hooks/after-INSTALL/scripts/hook.py   |   34 -
 .../1.3.2/hooks/after-INSTALL/scripts/params.py |   62 -
 .../scripts/shared_initialization.py            |   31 -
 .../hooks/before-ANY/files/changeToSecureUid.sh |   53 -
 .../HDP/1.3.2/hooks/before-ANY/scripts/hook.py  |   36 -
 .../1.3.2/hooks/before-ANY/scripts/params.py    |  117 --
 .../before-ANY/scripts/shared_initialization.py |  128 --
 .../1.3.2/hooks/before-INSTALL/scripts/hook.py  |   38 -
 .../hooks/before-INSTALL/scripts/params.py      |  132 --
 .../scripts/repo_initialization.py              |   55 -
 .../scripts/shared_initialization.py            |   68 -
 .../before-INSTALL/templates/repo_suse_rhel.j2  |    7 -
 .../before-INSTALL/templates/repo_ubuntu.j2     |    1 -
 .../1.3.2/hooks/before-RESTART/scripts/hook.py  |   29 -
 .../hooks/before-START/files/checkForFormat.sh  |   63 -
 .../before-START/files/task-log4j.properties    |  132 --
 .../1.3.2/hooks/before-START/scripts/hook.py    |   38 -
 .../1.3.2/hooks/before-START/scripts/params.py  |  143 --
 .../scripts/shared_initialization.py            |  189 ---
 .../templates/commons-logging.properties.j2     |   43 -
 .../templates/exclude_hosts_list.j2             |   21 -
 .../templates/hadoop-metrics2.properties.j2     |   63 -
 .../before-START/templates/health_check-v2.j2   |   81 -
 .../before-START/templates/health_check.j2      |  108 --
 .../templates/include_hosts_list.j2             |   21 -
 .../resources/stacks/HDP/1.3.2/metainfo.xml     |   22 -
 .../stacks/HDP/1.3.2/repos/repoinfo.xml         |   55 -
 .../stacks/HDP/1.3.2/role_command_order.json    |   88 --
 .../HDP/1.3.2/services/GANGLIA/alerts.json      |  137 --
 .../GANGLIA/configuration/ganglia-env.xml       |   72 -
 .../HDP/1.3.2/services/GANGLIA/metainfo.xml     |  107 --
 .../GANGLIA/package/files/checkGmetad.sh        |   37 -
 .../GANGLIA/package/files/checkGmond.sh         |   62 -
 .../GANGLIA/package/files/checkRrdcached.sh     |   34 -
 .../services/GANGLIA/package/files/gmetad.init  |   73 -
 .../services/GANGLIA/package/files/gmetadLib.sh |  204 ---
 .../services/GANGLIA/package/files/gmond.init   |   73 -
 .../services/GANGLIA/package/files/gmondLib.sh  |  538 -------
 .../GANGLIA/package/files/rrdcachedLib.sh       |   47 -
 .../GANGLIA/package/files/setupGanglia.sh       |  141 --
 .../GANGLIA/package/files/startGmetad.sh        |   68 -
 .../GANGLIA/package/files/startGmond.sh         |   85 --
 .../GANGLIA/package/files/startRrdcached.sh     |   79 -
 .../GANGLIA/package/files/stopGmetad.sh         |   43 -
 .../services/GANGLIA/package/files/stopGmond.sh |   54 -
 .../GANGLIA/package/files/stopRrdcached.sh      |   41 -
 .../GANGLIA/package/files/teardownGanglia.sh    |   28 -
 .../services/GANGLIA/package/scripts/ganglia.py |  106 --
 .../GANGLIA/package/scripts/ganglia_monitor.py  |  242 ---
 .../package/scripts/ganglia_monitor_service.py  |   27 -
 .../GANGLIA/package/scripts/ganglia_server.py   |  122 --
 .../package/scripts/ganglia_server_service.py   |   27 -
 .../services/GANGLIA/package/scripts/params.py  |  109 --
 .../GANGLIA/package/scripts/status_params.py    |   25 -
 .../GANGLIA/package/templates/ganglia.conf.j2   |   34 -
 .../package/templates/gangliaClusters.conf.j2   |   44 -
 .../GANGLIA/package/templates/gangliaEnv.sh.j2  |   44 -
 .../GANGLIA/package/templates/gangliaLib.sh.j2  |   84 --
 .../GANGLIA/package/templates/rrd.py.j2         |  215 ---
 .../stacks/HDP/1.3.2/services/HBASE/alerts.json |  124 --
 .../services/HBASE/configuration/hbase-env.xml  |  134 --
 .../HBASE/configuration/hbase-log4j.xml         |  129 --
 .../HBASE/configuration/hbase-policy.xml        |   53 -
 .../services/HBASE/configuration/hbase-site.xml |  331 -----
 .../HDP/1.3.2/services/HBASE/metainfo.xml       |  139 --
 .../HBASE/package/files/draining_servers.rb     |  164 ---
 .../HBASE/package/files/hbaseSmokeVerify.sh     |   32 -
 .../services/HBASE/package/scripts/__init__.py  |   19 -
 .../services/HBASE/package/scripts/functions.py |   40 -
 .../services/HBASE/package/scripts/hbase.py     |  138 --
 .../HBASE/package/scripts/hbase_client.py       |   43 -
 .../HBASE/package/scripts/hbase_decommission.py |   74 -
 .../HBASE/package/scripts/hbase_master.py       |   70 -
 .../HBASE/package/scripts/hbase_regionserver.py |   62 -
 .../HBASE/package/scripts/hbase_service.py      |   46 -
 .../services/HBASE/package/scripts/params.py    |  116 --
 .../HBASE/package/scripts/service_check.py      |   80 -
 .../HBASE/package/scripts/status_params.py      |   26 -
 .../hadoop-metrics.properties-GANGLIA-MASTER.j2 |   68 -
 .../hadoop-metrics.properties-GANGLIA-RS.j2     |   68 -
 .../templates/hadoop-metrics.properties.j2      |   69 -
 .../HBASE/package/templates/hbase-smoke.sh.j2   |   44 -
 .../package/templates/hbase_client_jaas.conf.j2 |   24 -
 .../templates/hbase_grant_permissions.j2        |   40 -
 .../package/templates/hbase_master_jaas.conf.j2 |   26 -
 .../templates/hbase_regionserver_jaas.conf.j2   |   26 -
 .../HBASE/package/templates/regionservers.j2    |   21 -
 .../stacks/HDP/1.3.2/services/HDFS/alerts.json  |  476 ------
 .../services/HDFS/configuration/core-site.xml   |  254 ----
 .../services/HDFS/configuration/hadoop-env.xml  |  201 ---
 .../HDFS/configuration/hadoop-policy.xml        |  134 --
 .../services/HDFS/configuration/hdfs-log4j.xml  |  197 ---
 .../services/HDFS/configuration/hdfs-site.xml   |  369 -----
 .../stacks/HDP/1.3.2/services/HDFS/metainfo.xml |  163 ---
 .../HDFS/package/files/checkForFormat.sh        |   63 -
 .../services/HDFS/package/files/checkWebUI.py   |   53 -
 .../services/HDFS/package/scripts/datanode.py   |   60 -
 .../1.3.2/services/HDFS/package/scripts/hdfs.py |   74 -
 .../HDFS/package/scripts/hdfs_client.py         |   58 -
 .../HDFS/package/scripts/hdfs_datanode.py       |   63 -
 .../HDFS/package/scripts/hdfs_namenode.py       |  129 --
 .../HDFS/package/scripts/hdfs_snamenode.py      |   45 -
 .../services/HDFS/package/scripts/namenode.py   |   68 -
 .../services/HDFS/package/scripts/params.py     |  200 ---
 .../HDFS/package/scripts/service_check.py       |  104 --
 .../services/HDFS/package/scripts/snamenode.py  |   65 -
 .../HDFS/package/scripts/status_params.py       |   31 -
 .../services/HDFS/package/scripts/utils.py      |   71 -
 .../package/templates/exclude_hosts_list.j2     |   22 -
 .../HDFS/package/templates/hdfs.conf.j2         |   35 -
 .../services/HDFS/package/templates/slaves.j2   |   21 -
 .../stacks/HDP/1.3.2/services/HIVE/alerts.json  |   60 -
 .../services/HIVE/configuration/hcat-env.xml    |   57 -
 .../services/HIVE/configuration/hive-env.xml    |  142 --
 .../HIVE/configuration/hive-exec-log4j.xml      |  104 --
 .../services/HIVE/configuration/hive-log4j.xml  |  116 --
 .../services/HIVE/configuration/hive-site.xml   |  277 ----
 .../services/HIVE/configuration/webhcat-env.xml |   54 -
 .../HIVE/configuration/webhcat-site.xml         |  156 --
 .../HIVE/etc/hive-schema-0.10.0.mysql.sql       |  748 ----------
 .../HIVE/etc/hive-schema-0.10.0.oracle.sql      |  698 ---------
 .../HIVE/etc/hive-schema-0.10.0.postgres.sql    | 1379 ------------------
 .../stacks/HDP/1.3.2/services/HIVE/metainfo.xml |  245 ----
 .../package/alerts/alert_hive_thrift_port.py    |  124 --
 .../HIVE/package/alerts/alert_webhcat_server.py |  111 --
 .../services/HIVE/package/files/addMysqlUser.sh |   41 -
 .../services/HIVE/package/files/hcatSmoke.sh    |   35 -
 .../services/HIVE/package/files/hiveSmoke.sh    |   23 -
 .../services/HIVE/package/files/hiveserver2.sql |   23 -
 .../HIVE/package/files/hiveserver2Smoke.sh      |   31 -
 .../services/HIVE/package/files/pigSmoke.sh     |   18 -
 .../HIVE/package/files/startHiveserver2.sh      |   22 -
 .../HIVE/package/files/startMetastore.sh        |   22 -
 .../HIVE/package/files/templetonSmoke.sh        |   96 --
 .../services/HIVE/package/scripts/__init__.py   |   19 -
 .../1.3.2/services/HIVE/package/scripts/hcat.py |   50 -
 .../HIVE/package/scripts/hcat_client.py         |   41 -
 .../HIVE/package/scripts/hcat_service_check.py  |   79 -
 .../1.3.2/services/HIVE/package/scripts/hive.py |  193 ---
 .../HIVE/package/scripts/hive_client.py         |   41 -
 .../HIVE/package/scripts/hive_metastore.py      |   63 -
 .../HIVE/package/scripts/hive_server.py         |   63 -
 .../HIVE/package/scripts/hive_service.py        |   98 --
 .../HIVE/package/scripts/mysql_server.py        |   72 -
 .../HIVE/package/scripts/mysql_service.py       |   41 -
 .../services/HIVE/package/scripts/params.py     |  220 ---
 .../HIVE/package/scripts/service_check.py       |   53 -
 .../HIVE/package/scripts/status_params.py       |   38 -
 .../services/HIVE/package/scripts/webhcat.py    |  107 --
 .../HIVE/package/scripts/webhcat_server.py      |   54 -
 .../HIVE/package/scripts/webhcat_service.py     |   42 -
 .../package/scripts/webhcat_service_check.py    |   42 -
 .../HDP/1.3.2/services/MAPREDUCE/alerts.json    |  221 ---
 .../configuration/capacity-scheduler.xml        |  195 ---
 .../MAPREDUCE/configuration/mapred-env.xml      |   81 -
 .../configuration/mapred-queue-acls.xml         |   52 -
 .../MAPREDUCE/configuration/mapred-site.xml     |  554 -------
 .../MAPREDUCE/configuration/mapreduce-log4j.xml |   54 -
 .../HDP/1.3.2/services/MAPREDUCE/metainfo.xml   |  140 --
 .../alerts/alert_mapreduce_directory_space.py   |   93 --
 .../MAPREDUCE/package/scripts/client.py         |   49 -
 .../MAPREDUCE/package/scripts/historyserver.py  |   58 -
 .../MAPREDUCE/package/scripts/jobtracker.py     |   87 --
 .../MAPREDUCE/package/scripts/mapreduce.py      |  178 ---
 .../MAPREDUCE/package/scripts/params.py         |  100 --
 .../MAPREDUCE/package/scripts/service.py        |   56 -
 .../MAPREDUCE/package/scripts/service_check.py  |   87 --
 .../MAPREDUCE/package/scripts/status_params.py  |   32 -
 .../MAPREDUCE/package/scripts/tasktracker.py    |  101 --
 .../package/templates/exclude_hosts_list.j2     |   22 -
 .../package/templates/taskcontroller.cfg.j2     |   38 -
 .../stacks/HDP/1.3.2/services/OOZIE/alerts.json |   42 -
 .../services/OOZIE/configuration/oozie-env.xml  |  121 --
 .../OOZIE/configuration/oozie-log4j.xml         |   97 --
 .../services/OOZIE/configuration/oozie-site.xml |  236 ---
 .../HDP/1.3.2/services/OOZIE/metainfo.xml       |  135 --
 .../package/alerts/alert_check_oozie_server.py  |   81 -
 .../services/OOZIE/package/files/oozieSmoke.sh  |   93 --
 .../OOZIE/package/files/wrap_ooziedb.sh         |   31 -
 .../services/OOZIE/package/scripts/oozie.py     |  167 ---
 .../OOZIE/package/scripts/oozie_client.py       |   44 -
 .../OOZIE/package/scripts/oozie_server.py       |   56 -
 .../OOZIE/package/scripts/oozie_service.py      |   71 -
 .../services/OOZIE/package/scripts/params.py    |  132 --
 .../OOZIE/package/scripts/service_check.py      |   57 -
 .../OOZIE/package/scripts/status_params.py      |   26 -
 .../services/PIG/configuration/pig-env.xml      |   34 -
 .../services/PIG/configuration/pig-log4j.xml    |   62 -
 .../PIG/configuration/pig-properties.xml        |   83 --
 .../stacks/HDP/1.3.2/services/PIG/metainfo.xml  |   85 --
 .../services/PIG/package/files/pigSmoke.sh      |   18 -
 .../services/PIG/package/scripts/params.py      |   48 -
 .../1.3.2/services/PIG/package/scripts/pig.py   |   57 -
 .../services/PIG/package/scripts/pig_client.py  |   41 -
 .../PIG/package/scripts/service_check.py        |   67 -
 .../services/SQOOP/configuration/sqoop-env.xml  |   55 -
 .../HDP/1.3.2/services/SQOOP/metainfo.xml       |   92 --
 .../services/SQOOP/package/scripts/__init__.py  |   18 -
 .../services/SQOOP/package/scripts/params.py    |   37 -
 .../SQOOP/package/scripts/service_check.py      |   38 -
 .../services/SQOOP/package/scripts/sqoop.py     |   55 -
 .../SQOOP/package/scripts/sqoop_client.py       |   40 -
 .../HDP/1.3.2/services/ZOOKEEPER/alerts.json    |   58 -
 .../ZOOKEEPER/configuration/zoo.cfg.xml         |   62 -
 .../ZOOKEEPER/configuration/zookeeper-env.xml   |   58 -
 .../ZOOKEEPER/configuration/zookeeper-log4j.xml |  102 --
 .../HDP/1.3.2/services/ZOOKEEPER/metainfo.xml   |   89 --
 .../services/ZOOKEEPER/package/files/zkEnv.sh   |   96 --
 .../ZOOKEEPER/package/files/zkServer.sh         |  120 --
 .../ZOOKEEPER/package/files/zkService.sh        |   26 -
 .../services/ZOOKEEPER/package/files/zkSmoke.sh |   78 -
 .../ZOOKEEPER/package/scripts/__init__.py       |   20 -
 .../ZOOKEEPER/package/scripts/params.py         |   73 -
 .../ZOOKEEPER/package/scripts/service_check.py  |   46 -
 .../ZOOKEEPER/package/scripts/status_params.py  |   26 -
 .../ZOOKEEPER/package/scripts/zookeeper.py      |  110 --
 .../package/scripts/zookeeper_client.py         |   42 -
 .../package/scripts/zookeeper_server.py         |   54 -
 .../package/scripts/zookeeper_service.py        |   42 -
 .../package/templates/configuration.xsl.j2      |   55 -
 .../ZOOKEEPER/package/templates/zoo.cfg.j2      |   53 -
 .../templates/zookeeper_client_jaas.conf.j2     |   24 -
 .../package/templates/zookeeper_jaas.conf.j2    |   27 -
 .../stacks/HDP/1.3.2/services/stack_advisor.py  |  317 ----
 .../resources/stacks/HDP/1.3.3/metainfo.xml     |   23 -
 .../stacks/HDP/1.3.3/repos/repoinfo.xml         |   55 -
 .../stacks/HDP/1.3.3/role_command_order.json    |   90 --
 .../stacks/HDP/1.3.3/services/stack_advisor.py  |   21 -
 .../main/resources/stacks/HDP/1.3/metainfo.xml  |   23 -
 .../resources/stacks/HDP/1.3/repos/repoinfo.xml |   56 -
 .../stacks/HDP/1.3/role_command_order.json      |   88 --
 .../stacks/HDP/1.3/services/HBASE/metainfo.xml  |   29 -
 .../stacks/HDP/1.3/services/HDFS/metainfo.xml   |   27 -
 .../stacks/HDP/1.3/services/HIVE/metainfo.xml   |   27 -
 .../HDP/1.3/services/MAPREDUCE/metainfo.xml     |   28 -
 .../stacks/HDP/1.3/services/OOZIE/metainfo.xml  |   28 -
 .../stacks/HDP/1.3/services/PIG/metainfo.xml    |   27 -
 .../stacks/HDP/1.3/services/SQOOP/metainfo.xml  |   29 -
 .../HDP/1.3/services/ZOOKEEPER/metainfo.xml     |   27 -
 .../stacks/HDP/1.3/services/stack_advisor.py    |   21 -
 .../1.3.2/GANGLIA/test_ganglia_monitor.py       |  300 ----
 .../stacks/1.3.2/GANGLIA/test_ganglia_server.py |  197 ---
 .../stacks/1.3.2/HBASE/test_hbase_client.py     |  156 --
 .../stacks/1.3.2/HBASE/test_hbase_master.py     |  352 -----
 .../1.3.2/HBASE/test_hbase_regionserver.py      |  283 ----
 .../1.3.2/HBASE/test_hbase_service_check.py     |   92 --
 .../python/stacks/1.3.2/HDFS/test_datanode.py   |  227 ---
 .../stacks/1.3.2/HDFS/test_hdfs_client.py       |   64 -
 .../python/stacks/1.3.2/HDFS/test_namenode.py   |  337 -----
 .../stacks/1.3.2/HDFS/test_service_check.py     |   76 -
 .../python/stacks/1.3.2/HDFS/test_snamenode.py  |  226 ---
 .../stacks/1.3.2/HIVE/test_hcat_client.py       |   86 --
 .../stacks/1.3.2/HIVE/test_hive_client.py       |  139 --
 .../stacks/1.3.2/HIVE/test_hive_metastore.py    |  296 ----
 .../stacks/1.3.2/HIVE/test_hive_server.py       |  394 -----
 .../1.3.2/HIVE/test_hive_service_check.py       |  128 --
 .../stacks/1.3.2/HIVE/test_mysql_server.py      |  138 --
 .../stacks/1.3.2/HIVE/test_webhcat_server.py    |  262 ----
 .../1.3.2/MAPREDUCE/test_mapreduce_client.py    |  208 ---
 .../MAPREDUCE/test_mapreduce_historyserver.py   |  357 -----
 .../MAPREDUCE/test_mapreduce_jobtracker.py      |  421 ------
 .../MAPREDUCE/test_mapreduce_service_check.py   |   78 -
 .../MAPREDUCE/test_mapreduce_tasktracker.py     |  240 ---
 .../stacks/1.3.2/OOZIE/test_oozie_client.py     |  124 --
 .../stacks/1.3.2/OOZIE/test_oozie_server.py     |  511 -------
 .../stacks/1.3.2/OOZIE/test_service_check.py    |   56 -
 .../python/stacks/1.3.2/PIG/test_pig_client.py  |   81 -
 .../stacks/1.3.2/PIG/test_pig_service_check.py  |   93 --
 .../stacks/1.3.2/SQOOP/test_service_check.py    |   50 -
 .../python/stacks/1.3.2/SQOOP/test_sqoop.py     |   52 -
 .../1.3.2/ZOOKEEPER/test_zookeeper_client.py    |  139 --
 .../1.3.2/ZOOKEEPER/test_zookeeper_server.py    |  215 ---
 .../ZOOKEEPER/test_zookeeper_service_check.py   |   61 -
 .../1.3.2/configs/default.hbasedecom.json       |  542 -------
 .../python/stacks/1.3.2/configs/default.json    |  609 --------
 .../1.3.2/configs/default.non_gmetad_host.json  |  590 --------
 .../stacks/1.3.2/configs/default_client.json    |  593 --------
 .../default_update_exclude_file_only.json       |  608 --------
 .../python/stacks/1.3.2/configs/secured.json    |  778 ----------
 .../stacks/1.3.2/configs/secured_client.json    |  771 ----------
 .../1.3.2/configs/secured_no_jce_name.json      |  639 --------
 .../hooks/after-INSTALL/test_after_install.py   |   40 -
 .../1.3.2/hooks/before-ANY/test_before_any.py   |  157 --
 .../hooks/before-INSTALL/test_before_install.py |   55 -
 .../hooks/before-START/test_before_start.py     |  217 ---
 287 files changed, 2 insertions(+), 34610 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-agent/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-agent/pom.xml b/ambari-agent/pom.xml
index 939c366..be12dc3 100644
--- a/ambari-agent/pom.xml
+++ b/ambari-agent/pom.xml
@@ -720,7 +720,7 @@
       <executable.shell>cmd</executable.shell>
       <fileextension.shell>cmd</fileextension.shell>
       <fileextension.dot.shell-default>.cmd</fileextension.dot.shell-default>
-      <path.python.1>${project.basedir}\..\ambari-common\src\main\python;${project.basedir}\..\ambari-agent\src\main\python;${project.basedir}\..\ambari-common\src\main\python\ambari_jinja2;${project.basedir}\..\ambari-common\src\main\python\ambari_commons;${project.basedir}\..\ambari-common\src\test\python;${project.basedir}\src\main\python;${project.basedir}\src\main\python\ambari_agent;${project.basedir}\src\main\python\resource_management;${project.basedir}\src\test\python;${project.basedir}\src\test\python\ambari_agent;${project.basedir}\src\test\python\resource_management;${project.basedir}\..\ambari-server\src\test\python;${project.basedir}\..\ambari-server\src\main\resources\stacks\HDP\2.0.6\services\HDFS\package\files;${project.basedir}\..\ambari-server\src\main\resources\stacks\HDP\1.3.2\services\HDFS\package\files</path.python.1>
+      <path.python.1>${project.basedir}\..\ambari-common\src\main\python;${project.basedir}\..\ambari-agent\src\main\python;${project.basedir}\..\ambari-common\src\main\python\ambari_jinja2;${project.basedir}\..\ambari-common\src\main\python\ambari_commons;${project.basedir}\..\ambari-common\src\test\python;${project.basedir}\src\main\python;${project.basedir}\src\main\python\ambari_agent;${project.basedir}\src\main\python\resource_management;${project.basedir}\src\test\python;${project.basedir}\src\test\python\ambari_agent;${project.basedir}\src\test\python\resource_management;${project.basedir}\..\ambari-server\src\test\python;${project.basedir}\..\ambari-server\src\main\resources\common-services\HDFS\2.1.0.2.0\package\files</path.python.1>
      </properties>
     </profile>
     <profile>
@@ -739,7 +739,7 @@
       <executable.shell>sh</executable.shell>
       <fileextension.shell>sh</fileextension.shell>
       <fileextension.dot.shell-default></fileextension.dot.shell-default>
-      <path.python.1>${project.basedir}/../ambari-common/src/main/python:${project.basedir}/../ambari-agent/src/main/python:${project.basedir}/../ambari-common/src/main/python/ambari_jinja2:${project.basedir}/../ambari-common/src/main/python/ambari_commons:${project.basedir}/../ambari-common/src/test/python:${project.basedir}/src/main/python:${project.basedir}/src/main/python/ambari_agent:${project.basedir}/src/main/python/resource_management:${project.basedir}/src/test/python:${project.basedir}/src/test/python/ambari_agent:${project.basedir}/src/test/python/resource_management:${project.basedir}/../ambari-server/src/test/python:${project.basedir}/../ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/files:${project.basedir}/../ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/files</path.python.1>
+      <path.python.1>${project.basedir}/../ambari-common/src/main/python:${project.basedir}/../ambari-agent/src/main/python:${project.basedir}/../ambari-common/src/main/python/ambari_jinja2:${project.basedir}/../ambari-common/src/main/python/ambari_commons:${project.basedir}/../ambari-common/src/test/python:${project.basedir}/src/main/python:${project.basedir}/src/main/python/ambari_agent:${project.basedir}/src/main/python/resource_management:${project.basedir}/src/test/python:${project.basedir}/src/test/python/ambari_agent:${project.basedir}/src/test/python/resource_management:${project.basedir}/../ambari-server/src/test/python:${project.basedir}/../ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files</path.python.1>
      </properties>
     </profile>
     <profile>

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/configuration/cluster-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/configuration/cluster-env.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/configuration/cluster-env.xml
deleted file mode 100644
index d41ff98..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/configuration/cluster-env.xml
+++ /dev/null
@@ -1,56 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration>
-    <property>
-        <name>security_enabled</name>
-        <value>false</value>
-        <description>Hadoop Security</description>
-    </property>
-    <property>
-        <name>kerberos_domain</name>
-        <value>EXAMPLE.COM</value>
-        <description>Kerberos realm.</description>
-    </property>
-    <property>
-        <name>ignore_groupsusers_create</name>
-        <value>false</value>
-        <description>Whether to ignore failures on users and group creation</description>
-    </property>
-    <property>
-        <name>smokeuser</name>
-        <value>ambari-qa</value>
-        <property-type>USER</property-type>
-        <description>User executing service checks</description>
-    </property>
-    <property>
-        <name>smokeuser_keytab</name>
-        <value>/etc/security/keytabs/smokeuser.headless.keytab</value>
-        <description>Path to smoke test user keytab file</description>
-    </property>
-    <property>
-        <name>user_group</name>
-        <value>hadoop</value>
-        <property-type>GROUP</property-type>
-        <description>Hadoop user group.</description>
-    </property>
-</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/hook.py
deleted file mode 100644
index c86e1c6..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/hook.py
+++ /dev/null
@@ -1,34 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-from shared_initialization import *
-
-#Hook for hosts with only client without other components
-class AfterInstallHook(Hook):
-
-  def hook(self, env):
-    import params
-
-    env.set_params(params)
-    setup_config()
-
-if __name__ == "__main__":
-  AfterInstallHook().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/params.py
deleted file mode 100644
index 24d432d..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/params.py
+++ /dev/null
@@ -1,62 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from resource_management.core.system import System
-import os
-
-config = Script.get_config()
-
-#security params
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-#java params
-java_home = config['hostLevelParams']['java_home']
-#hadoop params
-hadoop_conf_dir = "/etc/hadoop/conf"
-hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
-hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
-hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
-hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
-hadoop_env_sh_template = config['configurations']['hadoop-env']['content']
-
-#hadoop-env.sh
-if System.get_instance().os_family == "suse":
-  jsvc_path = "/usr/lib/bigtop-utils"
-else:
-  jsvc_path = "/usr/libexec/bigtop-utils"
-hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
-namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
-namenode_opt_newsize = config['configurations']['hadoop-env']['namenode_opt_newsize']
-namenode_opt_maxnewsize = config['configurations']['hadoop-env']['namenode_opt_maxnewsize']
-namenode_opt_permsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_permsize","128m")
-namenode_opt_maxpermsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_maxpermsize","256m")
-
-dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
-
-mapred_pid_dir_prefix = default("/configurations/hadoop-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
-mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
-
-
-#users and groups
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-user_group = config['configurations']['cluster-env']['user_group']
-
-namenode_host = default("/clusterHostInfo/namenode_host", [])
-has_namenode = not len(namenode_host) == 0
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/shared_initialization.py
deleted file mode 100644
index ff2e746..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/shared_initialization.py
+++ /dev/null
@@ -1,31 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-import os
-from resource_management import *
-
-def setup_config():
-  import params
-  if params.has_namenode:
-    XmlConfig("core-site.xml",
-              conf_dir=params.hadoop_conf_dir,
-              configurations=params.config['configurations']['core-site'],
-              configuration_attributes=params.config['configuration_attributes']['core-site'],
-              owner=params.hdfs_user,
-              group=params.user_group
-    )

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/files/changeToSecureUid.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/files/changeToSecureUid.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/files/changeToSecureUid.sh
deleted file mode 100644
index 8d25120..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/files/changeToSecureUid.sh
+++ /dev/null
@@ -1,53 +0,0 @@
-#!/bin/sh
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-
-username=$1
-directories=$2
-
-function find_available_uid() {
- for ((i=1001; i<=2000; i++))
- do
-   grep -q $i /etc/passwd
-   if [ "$?" -ne 0 ]
-   then
-    newUid=$i
-    break
-   fi
- done
-}
-
-find_available_uid
-
-if [ $newUid -eq 0 ]
-then
-  echo "Failed to find Uid between 1000 and 2000"
-  exit 1
-fi
-
-set -e
-
-dir_array=($(echo $directories | sed 's/,/\n/g'))
-old_uid=$(id -u $username)
-sudo_prefix="sudo -H -E"
-echo "Changing uid of $username from $old_uid to $newUid"
-echo "Changing directory permisions for ${dir_array[@]}"
-$sudo_prefix usermod -u $newUid $username && for dir in ${dir_array[@]} ; do ls $dir 2> /dev/null && echo "Changing permission for $dir" && $sudo_prefix chown -Rh $newUid $dir ; done
-exit 0

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/hook.py
deleted file mode 100644
index 724f374..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/hook.py
+++ /dev/null
@@ -1,36 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from shared_initialization import *
-
-class BeforeAnyHook(Hook):
-
-  def hook(self, env):
-    import params
-    env.set_params(params)
-    
-    setup_jce()
-    setup_users()
-    setup_hadoop_env()
-
-
-if __name__ == "__main__":
-  BeforeAnyHook().execute()
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/params.py
deleted file mode 100644
index c4ba90c..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/params.py
+++ /dev/null
@@ -1,117 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-import collections
-import json
-
-config = Script.get_config()
-tmp_dir = Script.get_tmp_dir()
-
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-
-artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
-jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user
-jce_location = config['hostLevelParams']['jdk_location']
-jdk_name = default("/hostLevelParams/jdk_name", None)
-java_home = config['hostLevelParams']['java_home']
-
-ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
-
-mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
-
-hadoop_conf_dir = "/etc/hadoop/conf"
-hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
-versioned_hdp_root = '/usr/hdp/current'
-
-#hadoop params
-hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
-hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
-hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
-
-stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.0') >= 0 and compare_versions(hdp_stack_version, '2.1') < 0 and System.get_instance().os_family != "suse":
-  # deprecated rhel jsvc_path
-  jsvc_path = "/usr/libexec/bigtop-utils"
-else:
-  jsvc_path = "/usr/lib/bigtop-utils"
-
-hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
-namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
-namenode_opt_newsize = config['configurations']['hadoop-env']['namenode_opt_newsize']
-namenode_opt_maxnewsize = config['configurations']['hadoop-env']['namenode_opt_maxnewsize']
-namenode_opt_permsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_permsize","128m")
-namenode_opt_maxpermsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_maxpermsize","256m")
-
-jtnode_opt_newsize = default("/configurations/mapred-env/jtnode_opt_newsize","200m")
-jtnode_opt_maxnewsize = default("/configurations/mapred-env/jtnode_opt_maxnewsize","200m")
-jtnode_heapsize =  default("/configurations/mapred-env/jtnode_heapsize","1024m")
-ttnode_heapsize = default("/configurations/mapred-env/ttnode_heapsize","1024m")
-
-dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
-mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
-mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
-hadoop_env_sh_template = config['configurations']['hadoop-env']['content']
-
-#users and groups
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-user_group = config['configurations']['cluster-env']['user_group']
-
-ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", [])
-namenode_host = default("/clusterHostInfo/namenode_host", [])
-hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
-
-has_namenode = not len(namenode_host) == 0
-has_ganglia_server = not len(ganglia_server_hosts) == 0
-has_tez = 'tez-site' in config['configurations']
-has_hbase_masters = not len(hbase_master_hosts) == 0
-
-hbase_tmp_dir = config['configurations']['hbase-site']['hbase.tmp.dir']
-
-#users and groups
-hbase_user = config['configurations']['hbase-env']['hbase_user']
-smoke_user =  config['configurations']['cluster-env']['smokeuser']
-gmetad_user = config['configurations']['ganglia-env']["gmetad_user"]
-gmond_user = config['configurations']['ganglia-env']["gmond_user"]
-
-proxyuser_group =  default("/configurations/hadoop-env/proxyuser_group","users")
-
-ignore_groupsusers_create = default("/configurations/cluster-env/ignore_groupsusers_create", False)
-
-smoke_user_dirs = format("/tmp/hadoop-{smoke_user},/tmp/hsperfdata_{smoke_user},/home/{smoke_user},/tmp/{smoke_user},/tmp/sqoop-{smoke_user}")
-if has_hbase_masters:
-  hbase_user_dirs = format("/home/{hbase_user},/tmp/{hbase_user},/usr/bin/{hbase_user},/var/log/{hbase_user},{hbase_tmp_dir}")
-#repo params
-repo_info = config['hostLevelParams']['repo_info']
-service_repo_info = default("/hostLevelParams/service_repo_info",None)
-
-user_to_groups_dict = collections.defaultdict(lambda:[user_group])
-user_to_groups_dict[smoke_user] = [proxyuser_group]
-if has_ganglia_server:
-  user_to_groups_dict[gmond_user] = [gmond_user]
-  user_to_groups_dict[gmetad_user] = [gmetad_user]
-if has_tez:
-  user_to_groups_dict[tez_user] = [proxyuser_group]
-
-user_to_gid_dict = collections.defaultdict(lambda:user_group)
-
-user_list = json.loads(config['hostLevelParams']['user_list'])
-group_list = json.loads(config['hostLevelParams']['group_list'])

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/shared_initialization.py
deleted file mode 100644
index dc0392c..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/shared_initialization.py
+++ /dev/null
@@ -1,128 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import os
-
-from resource_management import *
-
-
-
-def setup_jce():
-  import params
-  
-  if not params.jdk_name:
-    return
-  
-  environment = {
-    "no_proxy": format("{ambari_server_hostname}")
-  }
-  
-  if params.jce_policy_zip is not None:
-    jce_curl_target = format("{artifact_dir}/{jce_policy_zip}")
-    download_jce = format("mkdir -p {artifact_dir}; \
-    curl -kf -x \"\" --retry 10 \
-    {jce_location}/{jce_policy_zip} -o {jce_curl_target}")
-    Execute( download_jce,
-             path = ["/bin","/usr/bin/"],
-             not_if =format("test -e {jce_curl_target}"),
-             ignore_failures = True,
-             environment = environment
-    )
-  elif params.security_enabled:
-    # Something weird is happening
-    raise Fail("Security is enabled, but JCE policy zip is not specified.")
-  
-  if params.security_enabled:
-    security_dir = format("{java_home}/jre/lib/security")
-    
-    File([format("{security_dir}/US_export_policy.jar"), format("{security_dir}/local_policy.jar")],
-         action = "delete",
-    )
-    
-    extract_cmd = ("unzip", "-o", "-j", "-q", jce_curl_target, "-d", security_dir) 
-    Execute(extract_cmd,
-            only_if = format("test -e {security_dir} && test -f {jce_curl_target}"),
-            path = ['/bin/','/usr/bin'],
-            sudo = True
-    )
-    
-
-def setup_users():
-  """
-  Creates users before cluster installation
-  """
-  import params
-  
-  for group in params.group_list:
-    Group(group,
-        ignore_failures = params.ignore_groupsusers_create
-    )
-    
-  for user in params.user_list:
-    User(user,
-        gid = params.user_to_gid_dict[user],
-        groups = params.user_to_groups_dict[user],
-        ignore_failures = params.ignore_groupsusers_create       
-    )
-           
-  set_uid(params.smoke_user, params.smoke_user_dirs)
-
-  if params.has_hbase_masters:
-    set_uid(params.hbase_user, params.hbase_user_dirs)
-    
-def set_uid(user, user_dirs):
-  """
-  user_dirs - comma separated directories
-  """
-  import params
-
-  File(format("{tmp_dir}/changeUid.sh"),
-       content=StaticFile("changeToSecureUid.sh"),
-       mode=0555)
-  Execute(format("{tmp_dir}/changeUid.sh {user} {user_dirs}"),
-          not_if = format("test $(id -u {user}) -gt 1000"))
-    
-def setup_hadoop_env():
-  import params
-  if params.has_namenode:
-    if params.security_enabled:
-      tc_owner = "root"
-    else:
-      tc_owner = params.hdfs_user
-    Directory(params.hadoop_conf_empty_dir,
-              recursive=True,
-              owner='root',
-              group='root'
-    )
-    Link(params.hadoop_conf_dir,
-         to=params.hadoop_conf_empty_dir,
-         not_if=format("ls {hadoop_conf_dir}")
-    )
-    File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'),
-         owner=tc_owner,
-         content=InlineTemplate(params.hadoop_env_sh_template)
-    )
-    XmlConfig("core-site.xml",
-              conf_dir=params.hadoop_conf_dir,
-              configurations=params.config['configurations']['core-site'],
-              configuration_attributes=params.config['configuration_attributes']['core-site'],
-              owner=params.hdfs_user,
-              group=params.user_group
-    )
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/hook.py
deleted file mode 100644
index 7f1bccd..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/hook.py
+++ /dev/null
@@ -1,38 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-from shared_initialization import *
-from repo_initialization import install_repos
-
-class BeforeInstallHook(Hook):
-
-  def hook(self, env):
-    import params
-    
-    self.run_custom_hook('before-ANY')
-    env.set_params(params)
-    
-    install_repos()
-    setup_java()
-    install_packages()
-
-if __name__ == "__main__":
-  BeforeInstallHook().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
deleted file mode 100644
index a4d9578..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
+++ /dev/null
@@ -1,132 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from resource_management.core.system import System
-import os
-import json
-import collections
-
-config = Script.get_config()
-tmp_dir = Script.get_tmp_dir()
-
-#java params
-artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
-jdk_name = default("/hostLevelParams/jdk_name", None) # None when jdk is already installed by user
-jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user
-jce_location = config['hostLevelParams']['jdk_location']
-jdk_location = config['hostLevelParams']['jdk_location']
-java_home = config['hostLevelParams']['java_home']
-if System.get_instance().os_family == "suse":
-  jsvc_path = "/usr/lib/bigtop-utils"
-else:
-  jsvc_path = "/usr/libexec/bigtop-utils"
-#security params
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-#hadoop params
-hadoop_conf_dir = "/etc/hadoop/conf"
-
-#hadoop-env.sh
-
-java_home = config['hostLevelParams']['java_home']
-if System.get_instance().os_family == "suse":
-  jsvc_path = "/usr/lib/bigtop-utils"
-else:
-  jsvc_path = "/usr/libexec/bigtop-utils"
-hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
-namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
-namenode_opt_newsize = config['configurations']['hadoop-env']['namenode_opt_newsize']
-namenode_opt_maxnewsize = config['configurations']['hadoop-env']['namenode_opt_maxnewsize']
-namenode_opt_permsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_permsize","128m")
-namenode_opt_maxpermsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_maxpermsize","256m")
-
-dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
-mapred_pid_dir_prefix = default("/configurations/hadoop-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
-mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
-mapred_log_dir_prefix = "/var/log/hadoop-mapreduce"
-
-hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
-hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
-
-#users and groups
-hbase_user = config['configurations']['hbase-env']['hbase_user']
-smoke_user =  config['configurations']['cluster-env']['smokeuser']
-gmetad_user = config['configurations']['ganglia-env']["gmetad_user"]
-gmond_user = config['configurations']['ganglia-env']["gmond_user"]
-
-user_group = config['configurations']['cluster-env']['user_group']
-proxyuser_group =  default("/configurations/hadoop-env/proxyuser_group","users")
-
-#hosts
-hostname = config["hostname"]
-ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
-rm_host = default("/clusterHostInfo/rm_host", [])
-slave_hosts = default("/clusterHostInfo/slave_hosts", [])
-oozie_servers = default("/clusterHostInfo/oozie_server", [])
-hcat_server_hosts = default("/clusterHostInfo/webhcat_server_host", [])
-hive_server_host =  default("/clusterHostInfo/hive_server_host", [])
-hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
-hs_host = default("/clusterHostInfo/hs_host", [])
-jtnode_host = default("/clusterHostInfo/jtnode_host", [])
-namenode_host = default("/clusterHostInfo/namenode_host", [])
-zk_hosts = default("/clusterHostInfo/zookeeper_hosts", [])
-ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", [])
-
-has_sqoop_client = 'sqoop-env' in config['configurations']
-has_resourcemanager = not len(rm_host) == 0
-has_namenode = not len(namenode_host) == 0
-has_jt = not len(jtnode_host) == 0
-has_slaves = not len(slave_hosts) == 0
-has_oozie_server = not len(oozie_servers)  == 0
-has_hcat_server_host = not len(hcat_server_hosts)  == 0
-has_hive_server_host = not len(hive_server_host)  == 0
-has_hbase_masters = not len(hbase_master_hosts) == 0
-has_zk_host = not len(zk_hosts) == 0
-has_ganglia_server = not len(ganglia_server_hosts) == 0
-
-is_namenode_master = hostname in namenode_host
-is_jtnode_master = hostname in jtnode_host
-is_rmnode_master = hostname in rm_host
-is_hsnode_master = hostname in hs_host
-is_hbase_master = hostname in hbase_master_hosts
-is_slave = hostname in slave_hosts
-if has_ganglia_server:
-  ganglia_server_host = ganglia_server_hosts[0]
-
-hbase_tmp_dir = config['configurations']['hbase-site']['hbase.tmp.dir']
-ignore_groupsusers_create = default("/configurations/cluster-env/ignore_groupsusers_create", False)
-
-smoke_user_dirs = format("/tmp/hadoop-{smoke_user},/tmp/hsperfdata_{smoke_user},/home/{smoke_user},/tmp/{smoke_user},/tmp/sqoop-{smoke_user}")
-if has_hbase_masters:
-  hbase_user_dirs = format("/home/{hbase_user},/tmp/{hbase_user},/usr/bin/{hbase_user},/var/log/{hbase_user},{hbase_tmp_dir}")
-#repo params
-repo_info = config['hostLevelParams']['repo_info']
-service_repo_info = default("/hostLevelParams/service_repo_info",None)
-
-user_to_groups_dict = collections.defaultdict(lambda:[user_group])
-user_to_groups_dict[smoke_user] = [proxyuser_group]
-if has_ganglia_server:
-  user_to_groups_dict[gmond_user] = [gmond_user]
-  user_to_groups_dict[gmetad_user] = [gmetad_user]
-
-user_to_gid_dict = collections.defaultdict(lambda:user_group)
-
-user_list = json.loads(config['hostLevelParams']['user_list'])
-group_list = json.loads(config['hostLevelParams']['group_list'])

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/repo_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/repo_initialization.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/repo_initialization.py
deleted file mode 100644
index fe86c6a..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/repo_initialization.py
+++ /dev/null
@@ -1,55 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-from resource_management import *
-import json
-from resource_management.core.system import System
-
-_UBUNTU_REPO_COMPONENTS = ["HDP", "main"]
-
-def _alter_repo(action, repo_string, repo_template):
-  """
-  @param action: "delete" or "create"
-  @param repo_string: e.g. "[{\"baseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\",\"osType\":\"centos6\",\"repoId\":\"HDP-2.0._\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\"}]"
-  """
-  repo_dicts = json.loads(repo_string)
-
-  if not isinstance(repo_dicts, list):
-    repo_dicts = [repo_dicts]
-
-  for repo in repo_dicts:
-    if not 'baseUrl' in repo:
-      repo['baseUrl'] = None
-    if not 'mirrorsList' in repo:
-      repo['mirrorsList'] = None
-
-    Repository(repo['repoId'],
-               action = action,
-               base_url = repo['baseUrl'],
-               mirror_list = repo['mirrorsList'],
-               repo_file_name = repo['repoName'],
-               repo_template = repo_template,
-               components = _UBUNTU_REPO_COMPONENTS, # ubuntu specific
-    )
-
-def install_repos():
-  import params
-  template = "repo_suse_rhel.j2" if System.get_instance().os_family in ["suse", "redhat"] else "repo_ubuntu.j2"
-  _alter_repo("create", params.repo_info, template)
-  if params.service_repo_info:
-    _alter_repo("create", params.service_repo_info, template)

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py
deleted file mode 100644
index cfebedf..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py
+++ /dev/null
@@ -1,68 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import os
-
-from resource_management import *
-
-def setup_java():
-  """
-  Installs jdk using specific params, that comes from ambari-server
-  """
-  import params
-
-  jdk_curl_target = format("{artifact_dir}/{jdk_name}")
-  java_dir = os.path.dirname(params.java_home)
-  java_exec = format("{java_home}/bin/java")
-  tmp_java_dir = format("{tmp_dir}/jdk")
-
-  if not params.jdk_name:
-    return
-
-  environment = {
-    "no_proxy": format("{ambari_server_hostname}")
-  }
-
-  Execute(format("mkdir -p {artifact_dir} ; curl -kf -x \"\" "
-                 "--retry 10 {jdk_location}/{jdk_name} -o {jdk_curl_target}"),
-          path = ["/bin","/usr/bin/"],
-          not_if = format("test -e {java_exec}"),
-          environment = environment)
-
-  if params.jdk_name.endswith(".bin"):
-    chmod_cmd = ("chmod", "+x", jdk_curl_target)
-    install_cmd = format("mkdir -p {tmp_java_dir} && cd {tmp_java_dir} && echo A | {jdk_curl_target} -noregister && sudo cp -r {tmp_java_dir}/* {java_dir}")
-  elif params.jdk_name.endswith(".gz"):
-    chmod_cmd = ("chmod","a+x", java_dir)
-    install_cmd = format("mkdir -p {tmp_java_dir} && cd {tmp_java_dir} && tar -xf {jdk_curl_target} && sudo cp -r {tmp_java_dir}/* {java_dir}")
-
-  Directory(java_dir
-  )
-  
-  Execute(chmod_cmd,
-          not_if = format("test -e {java_exec}"),
-          sudo = True    
-  )
-
-  Execute(install_cmd,
-          not_if = format("test -e {java_exec}")
-  )
-
-def install_packages():
-  Package(['unzip'])

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/templates/repo_suse_rhel.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/templates/repo_suse_rhel.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/templates/repo_suse_rhel.j2
deleted file mode 100644
index d486f89..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/templates/repo_suse_rhel.j2
+++ /dev/null
@@ -1,7 +0,0 @@
-[{{repo_id}}]
-name={{repo_file_name}}
-{% if mirror_list %}mirrorlist={{mirror_list}}{% else %}baseurl={{base_url}}{% endif %}
-
-path=/
-enabled=1
-gpgcheck=0

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/templates/repo_ubuntu.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/templates/repo_ubuntu.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/templates/repo_ubuntu.j2
deleted file mode 100644
index 52d4c9a..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/templates/repo_ubuntu.j2
+++ /dev/null
@@ -1 +0,0 @@
-{{package_type}} {{base_url}} {{components}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-RESTART/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-RESTART/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-RESTART/scripts/hook.py
deleted file mode 100644
index 14b9d99..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-RESTART/scripts/hook.py
+++ /dev/null
@@ -1,29 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-
-class BeforeRestartHook(Hook):
-
-  def hook(self, env):
-    self.run_custom_hook('before-START')
-
-if __name__ == "__main__":
-  BeforeRestartHook().execute()
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/files/checkForFormat.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/files/checkForFormat.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/files/checkForFormat.sh
deleted file mode 100644
index c5af7de..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/files/checkForFormat.sh
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/bin/sh
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-export hdfs_user=$1
-shift
-export conf_dir=$1
-shift
-export mark_dir=$1
-shift
-export name_dirs=$*
-
-export EXIT_CODE=0
-export command="namenode -format"
-export list_of_non_empty_dirs=""
-
-mark_file=/var/run/hadoop/hdfs/namenode-formatted
-if [[ -f ${mark_file} ]] ; then
-  sudo rm -f ${mark_file}
-  sudo mkdir -p ${mark_dir}
-fi
-
-if [[ ! -d $mark_dir ]] ; then
-  for dir in `echo $name_dirs | tr ',' ' '` ; do
-    echo "NameNode Dirname = $dir"
-    cmd="ls $dir | wc -l  | grep -q ^0$"
-    eval $cmd
-    if [[ $? -ne 0 ]] ; then
-      (( EXIT_CODE = $EXIT_CODE + 1 ))
-      list_of_non_empty_dirs="$list_of_non_empty_dirs $dir"
-    fi
-  done
-
-  if [[ $EXIT_CODE == 0 ]] ; then
-    sudo su ${hdfs_user} - -s /bin/bash -c "yes Y | hadoop --config ${conf_dir} ${command}"
-    (( EXIT_CODE = $EXIT_CODE | $? ))
-  else
-    echo "ERROR: Namenode directory(s) is non empty. Will not format the namenode. List of non-empty namenode dirs ${list_of_non_empty_dirs}"
-  fi
-else
-  echo "${mark_dir} exists. Namenode DFS already formatted"
-fi
-
-exit $EXIT_CODE
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/files/task-log4j.properties
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/files/task-log4j.properties b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/files/task-log4j.properties
deleted file mode 100644
index c8939fc..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/files/task-log4j.properties
+++ /dev/null
@@ -1,132 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-
-# Define some default values that can be overridden by system properties
-hadoop.root.logger=INFO,console
-hadoop.log.dir=.
-hadoop.log.file=hadoop.log
-
-#
-# Job Summary Appender 
-#
-# Use following logger to send summary to separate file defined by 
-# hadoop.mapreduce.jobsummary.log.file rolled daily:
-# hadoop.mapreduce.jobsummary.logger=INFO,JSA
-# 
-hadoop.mapreduce.jobsummary.logger=${hadoop.root.logger}
-hadoop.mapreduce.jobsummary.log.file=hadoop-mapreduce.jobsummary.log
-
-# Define the root logger to the system property "hadoop.root.logger".
-log4j.rootLogger=${hadoop.root.logger}, EventCounter
-
-# Logging Threshold
-log4j.threshhold=ALL
-
-#
-# Daily Rolling File Appender
-#
-
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this 
-#
-
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
-
-#
-# TaskLog Appender
-#
-
-#Default values
-hadoop.tasklog.taskid=null
-hadoop.tasklog.iscleanup=false
-hadoop.tasklog.noKeepSplits=4
-hadoop.tasklog.totalLogFileSize=100
-hadoop.tasklog.purgeLogSplits=true
-hadoop.tasklog.logsRetainHours=12
-
-log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender
-log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
-log4j.appender.TLA.isCleanup=${hadoop.tasklog.iscleanup}
-log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
-
-log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
-log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-
-#
-# Rolling File Appender
-#
-
-#log4j.appender.RFA=org.apache.log4j.RollingFileAppender
-#log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
-
-# Logfile size and and 30-day backups
-#log4j.appender.RFA.MaxFileSize=1MB
-#log4j.appender.RFA.MaxBackupIndex=30
-
-#log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
-#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
-#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
-
-
-# Custom Logging levels
-
-hadoop.metrics.log.level=INFO
-#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
-#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
-#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
-log4j.logger.org.apache.hadoop.metrics2=${hadoop.metrics.log.level}
-
-# Jets3t library
-log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
-
-#
-# Null Appender
-# Trap security logger on the hadoop client side
-#
-log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
-
-#
-# Event Counter Appender
-# Sends counts of logging messages at different severity levels to Hadoop Metrics.
-#
-log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/hook.py
deleted file mode 100644
index 6a78a34..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/hook.py
+++ /dev/null
@@ -1,38 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-from shared_initialization import *
-
-class BeforeStartHook(Hook):
-
-  def hook(self, env):
-    import params
-
-    self.run_custom_hook('before-ANY')
-    env.set_params(params)
-    
-    setup_hadoop()
-    setup_database()
-    setup_configs()
-    create_javahome_symlink()
-
-if __name__ == "__main__":
-  BeforeStartHook().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py
deleted file mode 100644
index 0b269c3..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py
+++ /dev/null
@@ -1,143 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from resource_management.core.system import System
-import os
-
-config = Script.get_config()
-
-#security params
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-
-#users and groups
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-
-user_group = config['configurations']['cluster-env']['user_group']
-
-#hosts
-hostname = config["hostname"]
-current_service = config['serviceName']
-ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
-rm_host = default("/clusterHostInfo/rm_host", [])
-slave_hosts = default("/clusterHostInfo/slave_hosts", [])
-oozie_servers = default("/clusterHostInfo/oozie_server", [])
-hcat_server_hosts = default("/clusterHostInfo/webhcat_server_host", [])
-hive_server_host =  default("/clusterHostInfo/hive_server_host", [])
-hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
-hs_host = default("/clusterHostInfo/hs_host", [])
-jtnode_host = default("/clusterHostInfo/jtnode_host", [])
-namenode_host = default("/clusterHostInfo/namenode_host", [])
-zk_hosts = default("/clusterHostInfo/zookeeper_hosts", [])
-ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", [])
-
-has_namenode = not len(namenode_host) == 0
-has_resourcemanager = not len(rm_host) == 0
-has_slaves = not len(slave_hosts) == 0
-has_oozie_server = not len(oozie_servers)  == 0
-has_hcat_server_host = not len(hcat_server_hosts)  == 0
-has_hive_server_host = not len(hive_server_host)  == 0
-has_hbase_masters = not len(hbase_master_hosts) == 0
-has_zk_host = not len(zk_hosts) == 0
-has_ganglia_server = not len(ganglia_server_hosts) == 0
-
-is_namenode_master = hostname in namenode_host
-is_jtnode_master = hostname in jtnode_host
-is_rmnode_master = hostname in rm_host
-is_hsnode_master = hostname in hs_host
-is_hbase_master = hostname in hbase_master_hosts
-is_slave = hostname in slave_hosts
-if has_ganglia_server:
-  ganglia_server_host = ganglia_server_hosts[0]
-#hadoop params
-if has_namenode:
-  hadoop_tmp_dir = format("/tmp/hadoop-{hdfs_user}")
-hadoop_lib_home = "/usr/lib/hadoop/lib"
-hadoop_conf_dir = "/etc/hadoop/conf"
-hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
-hadoop_home = "/usr"
-hadoop_bin = "/usr/lib/hadoop/bin"
-
-task_log4j_properties_location = os.path.join(hadoop_conf_dir, "task-log4j.properties")
-limits_conf_dir = "/etc/security/limits.d"
-
-hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
-hbase_tmp_dir = config['configurations']['hbase-site']['hbase.tmp.dir']
-#db params
-server_db_name = config['hostLevelParams']['db_name']
-db_driver_filename = config['hostLevelParams']['db_driver_filename']
-oracle_driver_url = config['hostLevelParams']['oracle_jdbc_url']
-mysql_driver_url = config['hostLevelParams']['mysql_jdbc_url']
-ambari_server_resources = config['hostLevelParams']['jdk_location']
-oracle_driver_symlink_url = format("{ambari_server_resources}oracle-jdbc-driver.jar")
-mysql_driver_symlink_url = format("{ambari_server_resources}mysql-jdbc-driver.jar")
-
-ambari_db_rca_url = config['hostLevelParams']['ambari_db_rca_url']
-ambari_db_rca_driver = config['hostLevelParams']['ambari_db_rca_driver']
-ambari_db_rca_username = config['hostLevelParams']['ambari_db_rca_username']
-ambari_db_rca_password = config['hostLevelParams']['ambari_db_rca_password']
-
-if has_namenode and 'mapred-env' in config['configurations']:
-  rca_enabled =  config['configurations']['mapred-env']['rca_enabled']
-else:
-  rca_enabled = False
-rca_disabled_prefix = "###"
-if rca_enabled == True:
-  rca_prefix = ""
-else:
-  rca_prefix = rca_disabled_prefix
-
-#hadoop-env.sh
-java_home = config['hostLevelParams']['java_home']
-if System.get_instance().os_family == "suse":
-  jsvc_path = "/usr/lib/bigtop-utils"
-else:
-  jsvc_path = "/usr/libexec/bigtop-utils"
-
-hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
-namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
-namenode_opt_newsize = config['configurations']['hadoop-env']['namenode_opt_newsize']
-namenode_opt_maxnewsize = config['configurations']['hadoop-env']['namenode_opt_maxnewsize']
-namenode_opt_permsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_permsize","128m")
-namenode_opt_maxpermsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_maxpermsize","256m")
-
-dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
-mapred_pid_dir_prefix = "/var/run/hadoop-mapreduce"
-mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
-mapred_log_dir_prefix = hdfs_log_dir_prefix
-
-#taskcontroller.cfg
-
-mapred_local_dir = "/tmp/hadoop-mapred/mapred/local"
-
-dfs_hosts = default('/configurations/hdfs-site/dfs.hosts', None)
-
-#log4j.properties
-if 'mapred-env' in config['configurations'] and 'rca_properties' in config['configurations']['mapred-env']:
-  rca_properties = format(config['configurations']['mapred-env']['rca_properties'])
-
-if 'hdfs-log4j' in config['configurations']:
-  log4j_props = config['configurations']['hdfs-log4j']['content']
-  if 'mapreduce-log4j' in config['configurations']:
-    log4j_props += config['configurations']['mapreduce-log4j']['content']
-    if rca_enabled:
-      log4j_props += rca_properties
-else:
-  log4j_props = None

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py
deleted file mode 100644
index 8f8078f..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py
+++ /dev/null
@@ -1,189 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import os
-
-from resource_management import *
-
-def setup_hadoop():
-  """
-  Setup hadoop files and directories
-  """
-  import params
-
-  Execute("/bin/echo 0 > /selinux/enforce",
-          only_if="test -f /selinux/enforce"
-  )
-
-  if params.current_service == "HDFS":
-    install_snappy()
-
-
-  if params.has_namenode:
-    #directories
-    Directory(params.hdfs_log_dir_prefix,
-              recursive=True,
-              owner='root',
-              group=params.user_group,
-              mode=0775
-    )
-    Directory(params.hadoop_pid_dir_prefix,
-              recursive=True,
-              owner='root',
-              group='root'
-    )
-
-    #files
-    if params.security_enabled:
-      tc_owner = "root"
-    else:
-      tc_owner = params.hdfs_user
-
-      File(os.path.join(params.hadoop_conf_dir, 'commons-logging.properties'),
-           owner=tc_owner,
-           content=Template("commons-logging.properties.j2")
-      )
-
-    health_check_template = "health_check" #for stack 1 use 'health_check'
-    File(os.path.join(params.hadoop_conf_dir, "health_check"),
-         owner=tc_owner,
-         content=Template(health_check_template + ".j2")
-    )
-
-    log4j_filename = os.path.join(params.hadoop_conf_dir, "log4j.properties")
-    if (params.log4j_props != None):
-      File(log4j_filename,
-           mode=0644,
-           group=params.user_group,
-           owner=params.hdfs_user,
-           content=params.log4j_props
-      )
-    elif (os.path.exists(format("{params.hadoop_conf_dir}/log4j.properties"))):
-      File(log4j_filename,
-           mode=0644,
-           group=params.user_group,
-           owner=params.hdfs_user,
-      )
-
-    File(os.path.join(params.hadoop_conf_dir, "hadoop-metrics2.properties"),
-         owner=params.hdfs_user,
-         content=Template("hadoop-metrics2.properties.j2")
-    )
-
-def setup_database():
-  """
-  Load DB
-  """
-  import params
-  db_driver_dload_cmd = ""
-  environment = {
-    "no_proxy": format("{ambari_server_hostname}")
-  }
-  if params.server_db_name == 'oracle' and params.oracle_driver_url != "":
-    db_driver_dload_cmd = format(
-      "curl -kf -x \"\" --retry 5 {oracle_driver_symlink_url}"
-      " -o {hadoop_lib_home}/{db_driver_filename}")
-  elif params.server_db_name == 'mysql' and params.mysql_driver_url != "":
-    db_driver_dload_cmd = format(
-      "curl -kf -x \"\" --retry 5 {mysql_driver_symlink_url} "
-      "-o {hadoop_lib_home}/{db_driver_filename}")
-
-  if db_driver_dload_cmd:
-    Execute(db_driver_dload_cmd,
-            not_if =format("test -e {hadoop_lib_home}/{db_driver_filename}"),
-            environment = environment
-    )
-
-
-def setup_configs():
-  """
-  Creates configs for services DHFS mapred
-  """
-  import params
-
-  if params.has_namenode:
-    File(params.task_log4j_properties_location,
-         content=StaticFile("task-log4j.properties"),
-         mode=0755
-    )
-
-    Link('/usr/lib/hadoop/lib/hadoop-tools.jar',
-         to = '/usr/lib/hadoop/hadoop-tools.jar'
-    )
-
-    if os.path.exists(os.path.join(params.hadoop_conf_dir, 'configuration.xsl')):
-      File(os.path.join(params.hadoop_conf_dir, 'configuration.xsl'),
-           owner=params.hdfs_user,
-           group=params.user_group
-      )
-
-    if os.path.exists(os.path.join(params.hadoop_conf_dir, 'masters')):
-      File(os.path.join(params.hadoop_conf_dir, 'masters'),
-           owner=params.hdfs_user,
-           group=params.user_group
-      )
-
-  # generate_include_file()
-
-def generate_include_file():
-  import params
-
-  if params.has_namenode and params.dfs_hosts and params.has_slaves:
-    include_hosts_list = params.slave_hosts
-    File(params.dfs_hosts,
-         content=Template("include_hosts_list.j2"),
-         owner=params.hdfs_user,
-         group=params.user_group
-    )
-
-
-def install_snappy():
-  import params
-
-  snappy_so = "libsnappy.so"
-  so_target_dir_x86 = format("{hadoop_lib_home}/native/Linux-i386-32")
-  so_target_dir_x64 = format("{hadoop_lib_home}/native/Linux-amd64-64")
-  so_target_x86 = format("{so_target_dir_x86}/{snappy_so}")
-  so_target_x64 = format("{so_target_dir_x64}/{snappy_so}")
-  so_src_dir_x86 = format("{hadoop_home}/lib")
-  so_src_dir_x64 = format("{hadoop_home}/lib64")
-  so_src_x86 = format("{so_src_dir_x86}/{snappy_so}")
-  so_src_x64 = format("{so_src_dir_x64}/{snappy_so}")
-  if params.has_namenode:
-    Directory([so_target_dir_x86, so_target_dir_x64],
-              recursive=True,
-    )    
-    Link(so_target_x86,
-         to=so_src_x86,
-    )
-    Link(so_target_x64,
-         to=so_src_x64,
-    )
-
-
-def create_javahome_symlink():
-  if os.path.exists("/usr/jdk/jdk1.6.0_31") and not os.path.exists("/usr/jdk64/jdk1.6.0_31"):
-    Directory("/usr/jdk64/",
-         recursive=True,
-    )
-    Link("/usr/jdk/jdk1.6.0_31",
-         to="/usr/jdk64/jdk1.6.0_31",
-    )
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/commons-logging.properties.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/commons-logging.properties.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/commons-logging.properties.j2
deleted file mode 100644
index 2197ba5..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/commons-logging.properties.j2
+++ /dev/null
@@ -1,43 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-#/*
-# * Licensed to the Apache Software Foundation (ASF) under one
-# * or more contributor license agreements.  See the NOTICE file
-# * distributed with this work for additional information
-# * regarding copyright ownership.  The ASF licenses this file
-# * to you under the Apache License, Version 2.0 (the
-# * "License"); you may not use this file except in compliance
-# * with the License.  You may obtain a copy of the License at
-# *
-# *     http://www.apache.org/licenses/LICENSE-2.0
-# *
-# * Unless required by applicable law or agreed to in writing, software
-# * distributed under the License is distributed on an "AS IS" BASIS,
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# * See the License for the specific language governing permissions and
-# * limitations under the License.
-# */
-
-#Logging Implementation
-
-#Log4J
-org.apache.commons.logging.Log=org.apache.commons.logging.impl.Log4JLogger
-
-#JDK Logger
-#org.apache.commons.logging.Log=org.apache.commons.logging.impl.Jdk14Logger

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/exclude_hosts_list.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/exclude_hosts_list.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/exclude_hosts_list.j2
deleted file mode 100644
index 1adba80..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/exclude_hosts_list.j2
+++ /dev/null
@@ -1,21 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-{% for host in hdfs_exclude_file %}
-{{host}}
-{% endfor %}


Mime
View raw message