ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dmitriu...@apache.org
Subject [2/2] ambari git commit: AMBARI-12375. RU fails for ZooKeeper only cluster (dlysnichenko)
Date Fri, 10 Jul 2015 18:00:06 GMT
AMBARI-12375. RU fails for ZooKeeper only cluster (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/eda2f90b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/eda2f90b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/eda2f90b

Branch: refs/heads/trunk
Commit: eda2f90bbd745cebacbbb1c414c1652ba282cb3c
Parents: 8e5c540
Author: Lisnichenko Dmitro <dlysnichenko@hortonworks.com>
Authored: Fri Jul 10 20:58:45 2015 +0300
Committer: Lisnichenko Dmitro <dlysnichenko@hortonworks.com>
Committed: Fri Jul 10 20:59:50 2015 +0300

----------------------------------------------------------------------
 .../state/stack/upgrade/ClusterGrouping.java    |  1 -
 .../2.0.6/hooks/after-INSTALL/scripts/params.py |  4 +++-
 .../HDP/2.0.6/hooks/before-ANY/scripts/hook.py  |  3 ++-
 .../2.0.6/hooks/before-ANY/scripts/params.py    |  4 +++-
 .../2.0.6/hooks/before-START/scripts/hook.py    |  3 ++-
 .../2.0.6/hooks/before-START/scripts/params.py  |  7 +++----
 .../stacks/HDP/2.2/upgrades/upgrade-2.3.xml     | 20 ++++++++++----------
 7 files changed, 23 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/eda2f90b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java
b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java
index 5b6bc50..ad84210 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java
@@ -128,7 +128,6 @@ public class ClusterGrouping extends Grouping {
               wrapper = getManualStageWrapper(ctx, execution);
               break;
 
-            case CONFIGURE:
             case SERVER_ACTION:
               wrapper = new StageWrapper(
                   StageWrapper.Type.SERVER_SIDE_ACTION,

http://git-wip-us.apache.org/repos/asf/ambari/blob/eda2f90b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
index 11de040..7891a27 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
@@ -37,7 +37,6 @@ hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 # default hadoop params
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 hadoop_libexec_dir = hdp_select.get_hadoop_dir("libexec")
-hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 
 # HDP 2.2+ params
@@ -88,3 +87,6 @@ user_group = config['configurations']['cluster-env']['user_group']
 
 namenode_host = default("/clusterHostInfo/namenode_host", [])
 has_namenode = not len(namenode_host) == 0
+
+if has_namenode:
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)

http://git-wip-us.apache.org/repos/asf/ambari/blob/eda2f90b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/hook.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/hook.py
index a90c3b5..864b222 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/hook.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/hook.py
@@ -27,7 +27,8 @@ class BeforeAnyHook(Hook):
     env.set_params(params)
 
     setup_users()
-    setup_hadoop_env()
+    if params.has_namenode:
+      setup_hadoop_env()
 
 if __name__ == "__main__":
   BeforeAnyHook().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/eda2f90b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index 26a2f28..602f630 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -84,7 +84,6 @@ mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 # which would cause a lot of problems when writing out hadoop-env.sh; instead
 # force the use of "current" in the hook
 hadoop_home = hdp_select.get_hadoop_dir("home", force_latest_on_upgrade=True)
-hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
 hadoop_libexec_dir = hdp_select.get_hadoop_dir("libexec", force_latest_on_upgrade=True)
 
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
@@ -173,6 +172,9 @@ has_oozie_server = not len(oozie_servers) == 0
 has_falcon_server_hosts = not len(falcon_server_hosts) == 0
 has_ranger_admin = not len(ranger_admin_hosts) == 0
 
+if has_namenode:
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
+
 hbase_tmp_dir = "/tmp/hbase-hbase"
 
 proxyuser_group = default("/configurations/hadoop-env/proxyuser_group","users")

http://git-wip-us.apache.org/repos/asf/ambari/blob/eda2f90b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py
index f21e4b1..064b535 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py
@@ -33,7 +33,8 @@ class BeforeStartHook(Hook):
     setup_hadoop()
     setup_configs()
     create_javahome_symlink()
-    create_topology_script_and_mapping()
+    if params.has_namenode:
+      create_topology_script_and_mapping()
 
 if __name__ == "__main__":
   BeforeStartHook().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/eda2f90b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
index 790769a..3ddefe3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
@@ -42,7 +42,6 @@ hadoop_lib_home = hdp_select.get_hadoop_dir("lib")
 hadoop_bin = hdp_select.get_hadoop_dir("sbin")
 hadoop_home = '/usr'
 create_lib_snappy_symlinks = True
-hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
 default_topology_script_file_path = "/etc/hadoop/conf/topology_script.py"
 
 # HDP 2.2+ params
@@ -109,10 +108,10 @@ if has_metric_collector:
 
 if has_namenode:
   hadoop_tmp_dir = format("/tmp/hadoop-{hdfs_user}")
-hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
-
-task_log4j_properties_location = os.path.join(hadoop_conf_dir, "task-log4j.properties")
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
+  task_log4j_properties_location = os.path.join(hadoop_conf_dir, "task-log4j.properties")
 
+hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
 hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
 hbase_tmp_dir = "/tmp/hbase-hbase"
 #db params

http://git-wip-us.apache.org/repos/asf/ambari/blob/eda2f90b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml
index a202fd7..20e1a37 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml
@@ -63,16 +63,6 @@
         </task>
       </execute-stage>
 
-      <execute-stage service="HDFS" component="NAMENODE" title="Modify hadoop-env.sh">
-        <task xsi:type="configure">
-          <type>hadoop-env</type>
-          <replace key="content" find="# Add libraries required by nodemanager" replace-with=""
/>
-          <replace key="content" find="MAPREDUCE_LIBS={{mapreduce_libs_path}}" replace-with=""
/>
-          <replace key="content" find=":${MAPREDUCE_LIBS}" replace-with="" />
-          <replace key="content" find=":/usr/hdp/current/tez-client/*:/usr/hdp/current/tez-client/lib/*:/etc/tez/conf/"
replace-with="" />
-          <replace key="content" find=":/usr/hdp/current/tez-client/*:/usr/hdp/current/tez-client/lib/*:/usr/hdp/current/tez-client/conf/"
replace-with="" />
-        </task>
-      </execute-stage>
     </group>
 
     <group name="ZOOKEEPER" title="ZooKeeper">
@@ -472,6 +462,16 @@
       <component name="NAMENODE">
         <pre-downgrade /> <!--  no-op to prevent config changes on downgrade -->
         <pre-upgrade>
+
+          <task xsi:type="configure" summary="Modify hadoop-env.sh">
+            <type>hadoop-env</type>
+            <replace key="content" find="# Add libraries required by nodemanager" replace-with=""
/>
+            <replace key="content" find="MAPREDUCE_LIBS={{mapreduce_libs_path}}" replace-with=""
/>
+            <replace key="content" find=":${MAPREDUCE_LIBS}" replace-with="" />
+            <replace key="content" find=":/usr/hdp/current/tez-client/*:/usr/hdp/current/tez-client/lib/*:/etc/tez/conf/"
replace-with="" />
+            <replace key="content" find=":/usr/hdp/current/tez-client/*:/usr/hdp/current/tez-client/lib/*:/usr/hdp/current/tez-client/conf/"
replace-with="" />
+          </task>
+
           <task xsi:type="configure">
             <condition type="ranger-hdfs-plugin-properties" key="ranger-hdfs-plugin-enabled"
value="Yes">
               <type>hdfs-site</type>


Mime
View raw message