ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From alejan...@apache.org
Subject ambari git commit: AMBARI-14330. In some stack service scripts "commandParams/version" is misused (Oliver Szabo via alejandro)
Date Mon, 14 Dec 2015 21:58:06 GMT
Repository: ambari
Updated Branches:
  refs/heads/branch-2.2 01e49ea93 -> fce013b54


AMBARI-14330. In some stack service scripts "commandParams/version" is misused (Oliver Szabo
via alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fce013b5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fce013b5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fce013b5

Branch: refs/heads/branch-2.2
Commit: fce013b54d94fb55a8953518332a099d4d1c01f3
Parents: 01e49ea
Author: Alejandro Fernandez <afernandez@hortonworks.com>
Authored: Mon Dec 14 13:56:31 2015 -0800
Committer: Alejandro Fernandez <afernandez@hortonworks.com>
Committed: Mon Dec 14 13:56:31 2015 -0800

----------------------------------------------------------------------
 .../0.5.0.2.2/package/scripts/params_linux.py   |  2 --
 .../SPARK/1.2.0.2.2/package/scripts/params.py   | 35 +++++++++++---------
 2 files changed, 19 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/fce013b5/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
index 46a6f9a..25e2569 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
@@ -40,8 +40,6 @@ tmp_dir = Script.get_tmp_dir()
 stack_name = default("/hostLevelParams/stack_name", None)
 upgrade_direction = default("/commandParams/upgrade_direction", None)
 version = default("/commandParams/version", None)
-if version is None:
-  version = get_hdp_version('knox-server')
 # E.g., 2.3.2.0
 version_formatted = format_hdp_stack_version(version)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/fce013b5/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
index c4bbdc1..04410b1 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
@@ -55,6 +55,13 @@ host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 # New Cluster Stack Version that is defined during the RESTART of a Stack Upgrade
 version = default("/commandParams/version", None)
 
+# TODO! FIXME! Version check is not working as of today :
+#   $ yum list installed | grep hdp-select
+#   hdp-select.noarch                            2.2.1.0-2340.el6           @HDP-2.2
+# And hdp_stack_version returned from hostLevelParams/stack_version is : 2.2.0.0
+# Commenting out for time being
+#stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version,
'2.2.1.0') >= 0
+
 spark_conf = '/etc/spark/conf'
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_bin_dir = hdp_select.get_hadoop_dir("bin")
@@ -146,22 +153,18 @@ if security_enabled:
 # thrift server support - available on HDP 2.3 or higher
 spark_thrift_sparkconf = None
 spark_thrift_cmd_opts_properties = ''
-
-if 'spark-thrift-sparkconf' in config['configurations']:
-  if version is None: # set hdp version by hdp-select if "/commandParams/version" is missing
-    version = get_hdp_version('spark-thriftserver')
-  if version and compare_versions(format_hdp_stack_version(version), '2.3.2.0') >= 0 :
-    spark_thrift_sparkconf = config['configurations']['spark-thrift-sparkconf']
-    spark_thrift_cmd_opts_properties = config['configurations']['spark-env']['spark_thrift_cmd_opts']
-    if is_hive_installed:
-      # update default metastore client properties (async wait for metastore component) it
is useful in case of
-      # blueprint provisioning when hive-metastore and spark-thriftserver is not on the same
host.
-      spark_hive_properties.update({
-        'hive.metastore.client.connect.retry.delay' : config['configurations']['hive-site']['hive.metastore.client.connect.retry.delay'],
-        'hive.metastore.connect.retries' : config['configurations']['hive-site']['hive.metastore.connect.retries'],
-        'hive.metastore.client.socket.timeout' : config['configurations']['hive-site']['hive.metastore.client.socket.timeout']
-      })
-      spark_hive_properties.update(config['configurations']['spark-hive-site-override'])
+if has_spark_thriftserver and 'spark-thrift-sparkconf' in config['configurations']:
+  spark_thrift_sparkconf = config['configurations']['spark-thrift-sparkconf']
+  spark_thrift_cmd_opts_properties = config['configurations']['spark-env']['spark_thrift_cmd_opts']
+  if is_hive_installed:
+    # update default metastore client properties (async wait for metastore component) it
is useful in case of
+    # blueprint provisioning when hive-metastore and spark-thriftserver is not on the same
host.
+    spark_hive_properties.update({
+      'hive.metastore.client.connect.retry.delay' : config['configurations']['hive-site']['hive.metastore.client.connect.retry.delay'],
+      'hive.metastore.connect.retries' : config['configurations']['hive-site']['hive.metastore.connect.retries'],
+      'hive.metastore.client.socket.timeout' : config['configurations']['hive-site']['hive.metastore.client.socket.timeout']
+    })
+    spark_hive_properties.update(config['configurations']['spark-hive-site-override'])
 
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 hdfs_site = config['configurations']['hdfs-site']


Mime
View raw message