ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From alejan...@apache.org
Subject ambari git commit: AMBARI-9811. Spark: spark-defaults.conf file does not contains added Custom properties from UI (Gautam Borad via alejandro)
Date Fri, 27 Feb 2015 21:08:11 GMT
Repository: ambari
Updated Branches:
  refs/heads/branch-2.0.0 df70c0799 -> 5ea6b198b


AMBARI-9811. Spark: spark-defaults.conf file does not contains added Custom properties from
UI (Gautam Borad via alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5ea6b198
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5ea6b198
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5ea6b198

Branch: refs/heads/branch-2.0.0
Commit: 5ea6b198bdf57275fa6e7b877fdba61c54ceb7af
Parents: df70c07
Author: Alejandro Fernandez <afernandez@hortonworks.com>
Authored: Fri Feb 27 13:07:45 2015 -0800
Committer: Alejandro Fernandez <afernandez@hortonworks.com>
Committed: Fri Feb 27 13:07:45 2015 -0800

----------------------------------------------------------------------
 .../SPARK/1.2.0.2.2/configuration/spark-env.xml | 11 +++++++++-
 .../SPARK/1.2.0.2.2/metainfo.xml                |  1 +
 .../SPARK/1.2.0.2.2/package/scripts/params.py   | 21 ++++++++++++++++++++
 .../1.2.0.2.2/package/scripts/setup_spark.py    | 11 ++++++++++
 4 files changed, 43 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/5ea6b198/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/configuration/spark-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/configuration/spark-env.xml
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/configuration/spark-env.xml
index 15a71cd..96341d8 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/configuration/spark-env.xml
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/configuration/spark-env.xml
@@ -86,7 +86,16 @@ SPARK_NICENESS=0
 export HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}
 export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-{{hadoop_conf_dir}}}
 
-    </value>
+# The java implementation to use.
+export JAVA_HOME={{java_home}}
+
+if [ -d "/etc/tez/conf/" ]; then
+  export TEZ_CONF_DIR=/etc/tez/conf
+else
+  export TEZ_CONF_DIR=
+fi
+
+</value>
   </property>
 
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ea6b198/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/metainfo.xml
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/metainfo.xml
index 95df6e5..ce8ad7a 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/metainfo.xml
@@ -127,6 +127,7 @@
         <config-type>spark-env</config-type>
         <config-type>spark-log4j-properties</config-type>
         <config-type>spark-metrics-properties</config-type>
+        <config-type>spark-javaopts-properties</config-type>
       </configuration-dependencies>
 
       <commandScript>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ea6b198/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
index 9e0253c..3ad1098 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
@@ -47,6 +47,7 @@ stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_sta
 
 if stack_is_hdp22_or_further:
   hadoop_home = "/usr/hdp/current/hadoop-client"
+  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   spark_conf = '/etc/spark/conf'
   spark_log_dir = config['configurations']['spark-env']['spark_log_dir']
   spark_pid_dir = status_params.spark_pid_dir
@@ -65,10 +66,14 @@ else:
 
 java_home = config['hostLevelParams']['java_home']
 hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
+hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 
 spark_user = status_params.spark_user
 spark_group = status_params.spark_group
 user_group = status_params.user_group
+spark_hdfs_user_dir = format("/user/{spark_user}")
 spark_history_server_pid_file = status_params.spark_history_server_pid_file
 
 spark_history_server_start = format("{spark_home}/sbin/start-history-server.sh")
@@ -88,6 +93,7 @@ else:
 
 # spark-defaults params
 spark_yarn_historyServer_address = default(spark_history_server_host, "localhost")
+
 spark_yarn_applicationMaster_waitTries = default(
   "/configurations/spark-defaults/spark.yarn.applicationMaster.waitTries", '10')
 spark_yarn_submit_file_replication = default("/configurations/spark-defaults/spark.yarn.submit.file.replication",
'3')
@@ -132,3 +138,18 @@ spark_kerberos_keytab =  config['configurations']['spark-defaults']['spark.histo
 spark_kerberos_principal =  config['configurations']['spark-defaults']['spark.history.kerberos.principal']
 if security_enabled:
   spark_principal = spark_kerberos_principal.replace('_HOST',spark_history_server_host.lower())
+
+
+
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_principal_name if security_enabled else hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local,
+  bin_dir = hadoop_bin_dir
+)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ea6b198/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
index f73011b..382dc88 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
@@ -38,6 +38,11 @@ def setup_spark(env):
             group=params.user_group,
             recursive=True
   )
+  params.HdfsDirectory(params.spark_hdfs_user_dir,
+                       action="create",
+                       owner=params.spark_user,
+                       mode=0775
+  )
 
   file_path = params.spark_conf + '/spark-defaults.conf'
   create_file(file_path)
@@ -101,6 +106,10 @@ def get_hive_config():
 def spark_properties(params):
   spark_dict = dict()
 
+  all_spark_config  = params.config['configurations']['spark-defaults']
+  #Add all configs unfiltered first to handle Custom case.
+  spark_dict = all_spark_config.copy()
+
   spark_dict['spark.yarn.executor.memoryOverhead'] = params.spark_yarn_executor_memoryOverhead
   spark_dict['spark.yarn.driver.memoryOverhead'] = params.spark_yarn_driver_memoryOverhead
   spark_dict['spark.yarn.applicationMaster.waitTries'] = params.spark_yarn_applicationMaster_waitTries
@@ -121,6 +130,7 @@ def spark_properties(params):
   spark_dict['spark.driver.extraJavaOptions'] = params.spark_driver_extraJavaOptions
   spark_dict['spark.yarn.am.extraJavaOptions'] = params.spark_yarn_am_extraJavaOptions
 
+
   return spark_dict
 
 
@@ -184,6 +194,7 @@ def get_hdp_version():
       'Unable to determine the current version because of a non-zero return code of {0}'.format(str(return_code)))
 
   hdp_version = re.sub('hadoop-client - ', '', hdp_output)
+  hdp_version = hdp_version.rstrip()
   match = re.match('[0-9]+.[0-9]+.[0-9]+.[0-9]+-[0-9]+', hdp_version)
 
   if match is None:


Mime
View raw message