ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From smoha...@apache.org
Subject [1/2] ambari git commit: Ambari-10669. Update ambaripreupload script to support accepting target path on the cmd line (Ivan Mitic via smohanty)
Date Wed, 22 Apr 2015 20:12:59 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk b1196605e -> a93a31265


Ambari-10669. Update ambaripreupload script to support accepting target path on the cmd line
(Ivan Mitic via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/606ac375
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/606ac375
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/606ac375

Branch: refs/heads/trunk
Commit: 606ac375a5ed6f4a8e090306b93c47d3789e6c8e
Parents: b119660
Author: Sumit Mohanty <smohanty@hortonworks.com>
Authored: Wed Apr 22 12:30:04 2015 -0700
Committer: Sumit Mohanty <smohanty@hortonworks.com>
Committed: Wed Apr 22 12:30:04 2015 -0700

----------------------------------------------------------------------
 .../main/resources/scripts/Ambaripreupload.py   | 41 +++++++++++---------
 1 file changed, 23 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/606ac375/ambari-server/src/main/resources/scripts/Ambaripreupload.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/scripts/Ambaripreupload.py b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
index 42a7faa..1c882cd 100644
--- a/ambari-server/src/main/resources/scripts/Ambaripreupload.py
+++ b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
@@ -21,6 +21,7 @@ limitations under the License.
 # export PYTHONPATH=/usr/lib/python2.6/site-packages 
 import glob
 from logging import thread
+import sys
 import os
 import re
 import tempfile
@@ -230,7 +231,13 @@ def getPropertyValueFromConfigXMLFile(xmlfile, name, defaultValue=None):
         else:
           return ''
   return defaultValue
- 
+
+# See if hdfs path prefix is provided on the command line. If yes, use that value, if no
+# use empty string as default.
+hdfs_path_prefix = ""
+if len(sys.argv) == 2:
+    hdfs_path_prefix = sys.argv[1]
+
 hadoop_conf_dir = params.hadoop_conf_dir
 fsdefaultName =  getPropertyValueFromConfigXMLFile("/etc/hadoop/conf/core-site.xml", "fs.defaultFS")
 if fsdefaultName is None:
@@ -258,18 +265,16 @@ with Environment() as env:
            sudo = True,
            )
 
-
-  oozie_root = 'oozie-server'
-  oozie_setup_sh = format("/usr/hdp/current/{oozie_root}/bin/oozie-setup.sh")
-  oozie_shared_lib = format("/usr/hdp/current/{oozie_root}/share")
+  oozie_shared_lib = format("/usr/hdp/current/oozie-server/share")
   oozie_user = 'oozie'
-  oozie_hdfs_user_dir = format("/user/{oozie_user}")
+  oozie_hdfs_user_dir = format("{hdfs_path_prefix}/user/{oozie_user}")
   kinit_if_needed = ''
- 
-  put_shared_lib_to_hdfs_cmd = format("{oozie_setup_sh} sharelib create -fs {fs_root} -locallib
{oozie_shared_lib}")
- 
+
+  #Ideally, we would want to run: put_shared_lib_to_hdfs_cmd = format("{oozie_setup_sh} sharelib
create -fs {fs_root} -locallib {oozie_shared_lib}")
+  #However given that oozie_setup_sh does not support an arbitrary hdfs path prefix, we are
simulating the same command below
+  put_shared_lib_to_hdfs_cmd = format("hadoop --config {hadoop_conf_dir} dfs -copyFromLocal
{oozie_shared_lib}/lib/** {oozie_hdfs_user_dir}/share/lib/lib_20150212065327")
+
   oozie_cmd = format("{put_shared_lib_to_hdfs_cmd} ; hadoop --config {hadoop_conf_dir} dfs
-chmod -R 755 {oozie_hdfs_user_dir}/share")
-  not_if_command = format("{kinit_if_needed} hadoop --config {hadoop_conf_dir} dfs -ls /user/oozie/share
| awk 'BEGIN {{count=0;}} /share/ {{count++}} END {{if (count > 0) {{exit 0}} else {{exit
1}}}}'")
 
   #Check if destination folder already exists
   does_hdfs_file_exist_cmd = "fs -ls %s" % format("{oozie_hdfs_user_dir}/share")
@@ -282,19 +287,19 @@ with Environment() as env:
     )
   except Fail:
     #If dir does not exist create it and put files there
-    HdfsDirectory(format("{oozie_hdfs_user_dir}/share"),
+    HdfsDirectory(format("{oozie_hdfs_user_dir}/share/lib/lib_20150212065327"),
                   action="create",
                   owner=oozie_user,
                   mode=0555,
                   conf_dir=params.hadoop_conf_dir,
                   hdfs_user=params.hdfs_user,
                   )
-    Execute( oozie_cmd, user = params.oozie_user, not_if = not_if_command,
+    Execute( oozie_cmd, user = params.oozie_user, not_if = None,
              path = params.execute_path )
 
-  copy_tarballs_to_hdfs("/usr/hdp/current/hadoop-client/mapreduce.tar.gz", "wasb:///hdp/apps/{{
hdp_stack_version }}/mapreduce/", 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user,
params.user_group)
-  copy_tarballs_to_hdfs("/usr/hdp/current/tez-client/lib/tez.tar.gz", "wasb:///hdp/apps/{{
hdp_stack_version }}/tez/", 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user,
params.user_group)
-  copy_tarballs_to_hdfs("/usr/hdp/current/hive-client/hive.tar.gz", "wasb:///hdp/apps/{{
hdp_stack_version }}/hive/", 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user,
params.user_group)
-  copy_tarballs_to_hdfs("/usr/hdp/current/pig-client/pig.tar.gz", "wasb:///hdp/apps/{{ hdp_stack_version
}}/pig/", 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, params.user_group)
-  copy_tarballs_to_hdfs("/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
"wasb:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 'hadoop-mapreduce-historyserver', params.mapred_user,
params.hdfs_user, params.user_group)
-  copy_tarballs_to_hdfs("/usr/hdp/current/sqoop-client/sqoop.tar.gz", "wasb:///hdp/apps/{{
hdp_stack_version }}/sqoop/", 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user,
params.user_group)
+  copy_tarballs_to_hdfs("/usr/hdp/current/hadoop-client/mapreduce.tar.gz", hdfs_path_prefix+"/hdp/apps/{{
hdp_stack_version }}/mapreduce/", 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user,
params.user_group)
+  copy_tarballs_to_hdfs("/usr/hdp/current/tez-client/lib/tez.tar.gz", hdfs_path_prefix+"/hdp/apps/{{
hdp_stack_version }}/tez/", 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user,
params.user_group)
+  copy_tarballs_to_hdfs("/usr/hdp/current/hive-client/hive.tar.gz", hdfs_path_prefix+"/hdp/apps/{{
hdp_stack_version }}/hive/", 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user,
params.user_group)
+  copy_tarballs_to_hdfs("/usr/hdp/current/pig-client/pig.tar.gz", hdfs_path_prefix+"/hdp/apps/{{
hdp_stack_version }}/pig/", 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user,
params.user_group)
+  copy_tarballs_to_hdfs("/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
hdfs_path_prefix+"/hdp/apps/{{ hdp_stack_version }}/mapreduce/", 'hadoop-mapreduce-historyserver',
params.mapred_user, params.hdfs_user, params.user_group)
+  copy_tarballs_to_hdfs("/usr/hdp/current/sqoop-client/sqoop.tar.gz", hdfs_path_prefix+"/hdp/apps/{{
hdp_stack_version }}/sqoop/", 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user,
params.user_group)


Mime
View raw message