ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From d...@apache.org
Subject ambari git commit: AMBARI-8966 Ambari 1.7.0 hive client host also have /etc/hive/conf.server populated (dsen)
Date Fri, 02 Jan 2015 15:25:38 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk 1cb1d211f -> 7d299d0c6


AMBARI-8966 Ambari 1.7.0 hive client host also have /etc/hive/conf.server populated (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7d299d0c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7d299d0c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7d299d0c

Branch: refs/heads/trunk
Commit: 7d299d0c69d8125da188a7af2ab4371e03fd725d
Parents: 1cb1d21
Author: Dmytro Sen <dsen@apache.org>
Authored: Fri Jan 2 17:24:51 2015 +0200
Committer: Dmytro Sen <dsen@apache.org>
Committed: Fri Jan 2 17:25:18 2015 +0200

----------------------------------------------------------------------
 .../HIVE/0.12.0.2.0/package/scripts/params.py   |  12 +-
 .../1.3.2/configs/default.hbasedecom.json       |   2 +-
 .../python/stacks/1.3.2/configs/default.json    |   2 +-
 .../1.3.2/configs/default.non_gmetad_host.json  |   2 +-
 .../stacks/1.3.2/configs/default_client.json    |   2 +-
 .../default_update_exclude_file_only.json       |   2 +-
 .../python/stacks/1.3.2/configs/secured.json    |   2 +-
 .../stacks/1.3.2/configs/secured_client.json    |   2 +-
 .../1.3.2/configs/secured_no_jce_name.json      |   2 +-
 .../stacks/2.0.6/HIVE/test_hive_client.py       |  66 ---
 .../stacks/2.0.6/HIVE/test_hive_metastore.py    | 291 +++++-------
 .../stacks/2.0.6/HIVE/test_hive_server.py       | 454 ++++++++-----------
 .../default_update_exclude_file_only.json       |   2 +-
 .../python/stacks/2.0.6/configs/flume_22.json   |   2 +-
 .../stacks/2.1/HIVE/test_hive_metastore.py      | 238 +++++-----
 .../test/python/stacks/2.1/configs/default.json |   2 +-
 .../test/python/stacks/2.1/configs/secured.json |   2 +-
 17 files changed, 442 insertions(+), 643 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7d299d0c/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
index 41785c6..8eae687 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
@@ -27,6 +27,9 @@ import os
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
+# node hostname
+hostname = config["hostname"]
+
 # This is expected to be of the form #.#.#.#
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
@@ -122,7 +125,8 @@ downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
 prepackaged_ojdbc_symlink = format("{hive_lib}/ojdbc6.jar")
 
 #common
-hive_metastore_host = config['clusterHostInfo']['hive_metastore_host'][0]
+hive_metastore_hosts = config['clusterHostInfo']['hive_metastore_host']
+hive_metastore_host = hive_metastore_hosts[0]
 hive_metastore_port = get_port_from_url(config['configurations']['hive-site']['hive.metastore.uris']) #"9083"
 hive_var_lib = '/var/lib/hive'
 ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
@@ -150,7 +154,10 @@ hive_log_dir = config['configurations']['hive-env']['hive_log_dir']
 hive_pid_dir = status_params.hive_pid_dir
 hive_pid = status_params.hive_pid
 #Default conf dir for client
-hive_conf_dirs_list = [hive_server_conf_dir, hive_client_conf_dir]
+hive_conf_dirs_list = [hive_client_conf_dir]
+
+if hostname in hive_metastore_hosts or hostname in hive_server_hosts:
+  hive_conf_dirs_list.append(hive_server_conf_dir)
 
 if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
   hive_config_dir = hive_server_conf_dir
@@ -231,7 +238,6 @@ hive_hdfs_user_dir = format("/user/{hive_user}")
 hive_hdfs_user_mode = 0700
 hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.warehouse.dir"]
 #for create_hdfs_directory
-hostname = config["hostname"]
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d299d0c/ambari-server/src/test/python/stacks/1.3.2/configs/default.hbasedecom.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/configs/default.hbasedecom.json b/ambari-server/src/test/python/stacks/1.3.2/configs/default.hbasedecom.json
index 9ba4073..3386760 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/configs/default.hbasedecom.json
+++ b/ambari-server/src/test/python/stacks/1.3.2/configs/default.hbasedecom.json
@@ -485,7 +485,7 @@
             "c6401.ambari.apache.org", 
             "c6402.ambari.apache.org"
         ], 
-        "hive_metastore_hosts": [
+        "hive_metastore_host": [
             "c6402.ambari.apache.org"
         ], 
         "all_ping_ports": [

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d299d0c/ambari-server/src/test/python/stacks/1.3.2/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/configs/default.json b/ambari-server/src/test/python/stacks/1.3.2/configs/default.json
index c2dc7d6..eaf44e1 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/configs/default.json
+++ b/ambari-server/src/test/python/stacks/1.3.2/configs/default.json
@@ -552,7 +552,7 @@
             "c6401.ambari.apache.org", 
             "c6402.ambari.apache.org"
         ], 
-        "hive_metastore_hosts": [
+        "hive_metastore_host": [
             "c6402.ambari.apache.org"
         ], 
         "all_ping_ports": [

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d299d0c/ambari-server/src/test/python/stacks/1.3.2/configs/default.non_gmetad_host.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/configs/default.non_gmetad_host.json b/ambari-server/src/test/python/stacks/1.3.2/configs/default.non_gmetad_host.json
index fa8d183..2fdaa5b 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/configs/default.non_gmetad_host.json
+++ b/ambari-server/src/test/python/stacks/1.3.2/configs/default.non_gmetad_host.json
@@ -533,7 +533,7 @@
             "c6401.ambari.apache.org", 
             "c6402.ambari.apache.org"
         ], 
-        "hive_metastore_hosts": [
+        "hive_metastore_host": [
             "c6402.ambari.apache.org"
         ], 
         "all_ping_ports": [

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d299d0c/ambari-server/src/test/python/stacks/1.3.2/configs/default_client.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/configs/default_client.json b/ambari-server/src/test/python/stacks/1.3.2/configs/default_client.json
index 77a06f2..a9b33d2 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/configs/default_client.json
+++ b/ambari-server/src/test/python/stacks/1.3.2/configs/default_client.json
@@ -536,7 +536,7 @@
             "c6401.ambari.apache.org", 
             "c6402.ambari.apache.org"
         ], 
-        "hive_metastore_hosts": [
+        "hive_metastore_host": [
             "c6402.ambari.apache.org"
         ], 
         "all_ping_ports": [

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d299d0c/ambari-server/src/test/python/stacks/1.3.2/configs/default_update_exclude_file_only.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/configs/default_update_exclude_file_only.json b/ambari-server/src/test/python/stacks/1.3.2/configs/default_update_exclude_file_only.json
index e4fac9f..839ef23 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/configs/default_update_exclude_file_only.json
+++ b/ambari-server/src/test/python/stacks/1.3.2/configs/default_update_exclude_file_only.json
@@ -551,7 +551,7 @@
             "c6401.ambari.apache.org", 
             "c6402.ambari.apache.org"
         ], 
-        "hive_metastore_hosts": [
+        "hive_metastore_host": [
             "c6402.ambari.apache.org"
         ], 
         "all_ping_ports": [

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d299d0c/ambari-server/src/test/python/stacks/1.3.2/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/configs/secured.json b/ambari-server/src/test/python/stacks/1.3.2/configs/secured.json
index 1278350..cf07b9d 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/1.3.2/configs/secured.json
@@ -721,7 +721,7 @@
             "c6401.ambari.apache.org", 
             "c6402.ambari.apache.org"
         ], 
-        "hive_metastore_hosts": [
+        "hive_metastore_host": [
             "c6402.ambari.apache.org"
         ], 
         "all_ping_ports": [

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d299d0c/ambari-server/src/test/python/stacks/1.3.2/configs/secured_client.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/configs/secured_client.json b/ambari-server/src/test/python/stacks/1.3.2/configs/secured_client.json
index b41af13..d57d6b7 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/configs/secured_client.json
+++ b/ambari-server/src/test/python/stacks/1.3.2/configs/secured_client.json
@@ -714,7 +714,7 @@
             "c6401.ambari.apache.org", 
             "c6402.ambari.apache.org"
         ], 
-        "hive_metastore_hosts": [
+        "hive_metastore_host": [
             "c6402.ambari.apache.org"
         ], 
         "all_ping_ports": [

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d299d0c/ambari-server/src/test/python/stacks/1.3.2/configs/secured_no_jce_name.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/configs/secured_no_jce_name.json b/ambari-server/src/test/python/stacks/1.3.2/configs/secured_no_jce_name.json
index 124db7c..c5d508c 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/configs/secured_no_jce_name.json
+++ b/ambari-server/src/test/python/stacks/1.3.2/configs/secured_no_jce_name.json
@@ -582,7 +582,7 @@
             "c6401.ambari.apache.org", 
             "c6402.ambari.apache.org"
         ], 
-        "hive_metastore_hosts": [
+        "hive_metastore_host": [
             "c6402.ambari.apache.org"
         ], 
         "all_ping_ports": [

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d299d0c/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
index 0e2fc9e..3c3bf7e 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
@@ -35,39 +35,6 @@ class TestHiveClient(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/hive',
         mode = 0755
     )
-    self.assertResourceCalled('Directory', '/etc/hive/conf.server',
-        owner = 'hive',
-        group = 'hadoop',
-        recursive = True,
-    )
-    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-        group = 'hadoop',
-        conf_dir = '/etc/hive/conf.server',
-        mode = 0644,
-        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
-        owner = 'hive',
-        configurations = self.getConfig()['configurations']['mapred-site'],
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-default.xml.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-exec-log4j.properties',
-        content = 'log4jproperties\nline2',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0644,
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-log4j.properties',
-        content = 'log4jproperties\nline2',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0644,
-    )
     self.assertResourceCalled('Directory', '/etc/hive/conf',
         owner = 'hive',
         group = 'hadoop',
@@ -133,39 +100,6 @@ class TestHiveClient(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/hive',
         mode = 0755
     )
-    self.assertResourceCalled('Directory', '/etc/hive/conf.server',
-        owner = 'hive',
-        group = 'hadoop',
-        recursive = True,
-    )
-    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-        group = 'hadoop',
-        conf_dir = '/etc/hive/conf.server',
-        mode = 0644,
-        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
-        owner = 'hive',
-        configurations = self.getConfig()['configurations']['mapred-site'],
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-default.xml.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-exec-log4j.properties',
-        content = 'log4jproperties\nline2',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0644,
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-log4j.properties',
-        content = 'log4jproperties\nline2',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0644,
-    )
     self.assertResourceCalled('Directory', '/etc/hive/conf',
         owner = 'hive',
         group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d299d0c/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
index fdfb301..6eef93f 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
@@ -98,7 +98,6 @@ class TestHiveMetastore(RMFTestCase):
                        hdp_stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-
     self.assert_configure_secured()
     self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
         not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
@@ -137,79 +136,49 @@ class TestHiveMetastore(RMFTestCase):
 
   def assert_configure_default(self):
     self.assertResourceCalled('Directory', '/etc/hive',
-        mode = 0755
-    )
-    self.assertResourceCalled('Directory', '/etc/hive/conf.server',
-        owner = 'hive',
-        group = 'hadoop',
-        recursive = True,
-    )
-    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-        group = 'hadoop',
-        conf_dir = '/etc/hive/conf.server',
-        mode = 0644,
-        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
-        owner = 'hive',
-        configurations = self.getConfig()['configurations']['mapred-site'],
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-default.xml.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-exec-log4j.properties',
-        content = 'log4jproperties\nline2',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0644,
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-log4j.properties',
-        content = 'log4jproperties\nline2',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0644,
-    )
+                              mode = 0755,
+                              )
     self.assertResourceCalled('Directory', '/etc/hive/conf',
-        owner = 'hive',
-        group = 'hadoop',
-        recursive = True,
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              recursive = True,
+                              )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-        group = 'hadoop',
-        conf_dir = '/etc/hive/conf',
-        mode = 0644,
-        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
-        owner = 'hive',
-        configurations = self.getConfig()['configurations']['mapred-site'],
-    )
+                              group = 'hadoop',
+                              conf_dir = '/etc/hive/conf',
+                              mode = 0644,
+                              configuration_attributes = {u'final': {u'mapred.healthChecker.script.path': u'true',
+                                                                     u'mapreduce.jobtracker.staging.root.dir': u'true'}},
+                              owner = 'hive',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties',
-        content = 'log4jproperties\nline2',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0644,
-    )
+                              content = 'log4jproperties\nline2',
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties',
-        content = 'log4jproperties\nline2',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0644,
-    )
+                              content = 'log4jproperties\nline2',
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
                               group = 'hadoop',
                               conf_dir = '/etc/hive/conf.server',
                               mode = 0644,
-                              configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
+                              configuration_attributes = {u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
+                                                                     u'javax.jdo.option.ConnectionDriverName': u'true',
+                                                                     u'javax.jdo.option.ConnectionPassword': u'true'}},
                               owner = 'hive',
                               configurations = self.getConfig()['configurations']['hive-site'],
                               )
@@ -218,112 +187,85 @@ class TestHiveMetastore(RMFTestCase):
                               owner = 'hive',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('Execute', ('cp', '--remove-destination', '/usr/share/java/mysql-connector-java.jar', '/usr/lib/hive/lib//mysql-connector-java.jar'),
+    self.assertResourceCalled('Execute', ('cp',
+                                          '--remove-destination',
+                                          '/usr/share/java/mysql-connector-java.jar',
+                                          '/usr/lib/hive/lib//mysql-connector-java.jar'),
                               path = ['/bin', '/usr/bin/'],
-                              sudo = True
+                              sudo = True,
                               )
     self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
-        environment = {'no_proxy': 'c6401.ambari.apache.org'},
-        not_if = '[ -f /usr/lib/ambari-agent/DBConnectionVerification.jar ]',
-    )
+                              environment = {'no_proxy': u'c6401.ambari.apache.org'},
+                              not_if = '[ -f /usr/lib/ambari-agent/DBConnectionVerification.jar ]',
+                              )
     self.assertResourceCalled('File', '/tmp/start_metastore_script',
-        content = StaticFile('startMetastore.sh'),
-        mode = 0755,
-    )
+                              content = StaticFile('startMetastore.sh'),
+                              mode = 0755,
+                              )
     self.assertResourceCalled('Directory', '/var/run/hive',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0755,
-        recursive = True,
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              )
     self.assertResourceCalled('Directory', '/var/log/hive',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0755,
-        recursive = True,
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              )
     self.assertResourceCalled('Directory', '/var/lib/hive',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0755,
-        recursive = True,
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/etc/hive',
-        mode = 0755
-    )
-    self.assertResourceCalled('Directory', '/etc/hive/conf.server',
-        owner = 'hive',
-        group = 'hadoop',
-        recursive = True,
-    )
-    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-        group = 'hadoop',
-        conf_dir = '/etc/hive/conf.server',
-        mode = 0644,
-        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
-        owner = 'hive',
-        configurations = self.getConfig()['configurations']['mapred-site'],
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-default.xml.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-exec-log4j.properties',
-        content = 'log4jproperties\nline2',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0644,
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-log4j.properties',
-        content = 'log4jproperties\nline2',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0644,
-    )
+                              mode = 0755,
+                              )
     self.assertResourceCalled('Directory', '/etc/hive/conf',
-        owner = 'hive',
-        group = 'hadoop',
-        recursive = True,
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              recursive = True,
+                              )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-        group = 'hadoop',
-        conf_dir = '/etc/hive/conf',
-        mode = 0644,
-        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
-        owner = 'hive',
-        configurations = self.getConfig()['configurations']['mapred-site'],
-    )
+                              group = 'hadoop',
+                              conf_dir = '/etc/hive/conf',
+                              mode = 0644,
+                              configuration_attributes = {u'final': {u'mapred.healthChecker.script.path': u'true',
+                                                                     u'mapreduce.jobtracker.staging.root.dir': u'true'}},
+                              owner = 'hive',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties',
-        content = 'log4jproperties\nline2',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0644,
-    )
+                              content = 'log4jproperties\nline2',
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties',
-        content = 'log4jproperties\nline2',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0644,
-    )
+                              content = 'log4jproperties\nline2',
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
                               group = 'hadoop',
                               conf_dir = '/etc/hive/conf.server',
                               mode = 0644,
-                              configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
+                              configuration_attributes = {u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
+                                                                     u'javax.jdo.option.ConnectionDriverName': u'true',
+                                                                     u'javax.jdo.option.ConnectionPassword': u'true'}},
                               owner = 'hive',
                               configurations = self.getConfig()['configurations']['hive-site'],
                               )
@@ -332,33 +274,36 @@ class TestHiveMetastore(RMFTestCase):
                               owner = 'hive',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('Execute', ('cp', '--remove-destination', '/usr/share/java/mysql-connector-java.jar', '/usr/lib/hive/lib//mysql-connector-java.jar'),
+    self.assertResourceCalled('Execute', ('cp',
+                                          '--remove-destination',
+                                          '/usr/share/java/mysql-connector-java.jar',
+                                          '/usr/lib/hive/lib//mysql-connector-java.jar'),
                               path = ['/bin', '/usr/bin/'],
-                              sudo = True
+                              sudo = True,
                               )
     self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
-        environment = {'no_proxy': 'c6401.ambari.apache.org'},
-        not_if = '[ -f /usr/lib/ambari-agent/DBConnectionVerification.jar ]',
-    )
+                              environment = {'no_proxy': u'c6401.ambari.apache.org'},
+                              not_if = '[ -f /usr/lib/ambari-agent/DBConnectionVerification.jar ]',
+                              )
     self.assertResourceCalled('File', '/tmp/start_metastore_script',
-        content = StaticFile('startMetastore.sh'),
-        mode = 0755,
-    )
+                              content = StaticFile('startMetastore.sh'),
+                              mode = 0755,
+                              )
     self.assertResourceCalled('Directory', '/var/run/hive',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0755,
-        recursive = True,
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              )
     self.assertResourceCalled('Directory', '/var/log/hive',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0755,
-        recursive = True,
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              )
     self.assertResourceCalled('Directory', '/var/lib/hive',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0755,
-        recursive = True,
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              )

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d299d0c/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index da316b7..f1a65e3 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -171,163 +171,130 @@ class TestHiveServer(RMFTestCase):
 
   def assert_configure_default(self):
     self.assertResourceCalled('HdfsDirectory', '/apps/tez/',
-                              action = ['create_delayed'],
-                              mode = 0755,
-                              owner = 'tez',
                               security_enabled = False,
                               keytab = UnknownConfigurationMock(),
                               conf_dir = '/etc/hadoop/conf',
                               hdfs_user = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              kinit_path_local = "/usr/bin/kinit"
-    )
-
-    self.assertResourceCalled('HdfsDirectory', '/apps/tez/lib/',
-                              action = ['create_delayed'],
+                              kinit_path_local = '/usr/bin/kinit',
                               mode = 0755,
                               owner = 'tez',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/tez/lib/',
                               security_enabled = False,
                               keytab = UnknownConfigurationMock(),
                               conf_dir = '/etc/hadoop/conf',
                               hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0755,
+                              owner = 'tez',
                               bin_dir = '/usr/bin',
-                              kinit_path_local = "/usr/bin/kinit"
-    )
+                              action = ['create_delayed'],
+                              )
     self.assertResourceCalled('HdfsDirectory', None,
                               security_enabled = False,
                               keytab = UnknownConfigurationMock(),
                               conf_dir = '/etc/hadoop/conf',
                               hdfs_user = 'hdfs',
                               kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
                               bin_dir = '/usr/bin',
-                              action = ['create']
-    )
-
+                              )
     self.assertResourceCalled('CopyFromLocal', '/usr/lib/tez/tez*.jar',
-                              mode=0755,
-                              owner='tez',
-                              dest_dir='/apps/tez/',
-                              kinnit_if_needed='',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hadoop_bin_dir='/usr/bin',
-                              hdfs_user='hdfs',
-                              dest_file=None
-    )
-
+                              hadoop_bin_dir = '/usr/bin',
+                              hdfs_user = 'hdfs',
+                              owner = 'tez',
+                              dest_file = None,
+                              kinnit_if_needed = '',
+                              dest_dir = '/apps/tez/',
+                              hadoop_conf_dir = '/etc/hadoop/conf',
+                              mode = 0755,
+                              )
     self.assertResourceCalled('CopyFromLocal', '/usr/lib/tez/lib/*.jar',
-                              mode=0755,
-                              owner='tez',
-                              dest_dir='/apps/tez/lib/',
-                              kinnit_if_needed='',
-                              hadoop_bin_dir='/usr/bin',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hdfs_user='hdfs'
-    )
+                              hadoop_conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              owner = 'tez',
+                              kinnit_if_needed = '',
+                              dest_dir = '/apps/tez/lib/',
+                              hadoop_bin_dir = '/usr/bin',
+                              mode = 0755,
+                              )
     self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
-        security_enabled = False,
-        keytab = UnknownConfigurationMock(),
-        conf_dir = '/etc/hadoop/conf',
-        hdfs_user = 'hdfs',
-        kinit_path_local = '/usr/bin/kinit',
-        mode = 0777,
-        owner = 'hive',
-        bin_dir = '/usr/bin',
-        action = ['create_delayed'],
-    )
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hive',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
     self.assertResourceCalled('HdfsDirectory', '/user/hive',
-        security_enabled = False,
-        keytab = UnknownConfigurationMock(),
-        conf_dir = '/etc/hadoop/conf',
-        hdfs_user = 'hdfs',
-        kinit_path_local = '/usr/bin/kinit',
-        mode = 0700,
-        owner = 'hive',
-        bin_dir = '/usr/bin',
-        action = ['create_delayed'],
-    )
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0700,
+                              owner = 'hive',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
     self.assertResourceCalled('HdfsDirectory', None,
-        security_enabled = False,
-        keytab = UnknownConfigurationMock(),
-        conf_dir = '/etc/hadoop/conf',
-        hdfs_user = 'hdfs',
-        kinit_path_local = '/usr/bin/kinit',
-        bin_dir = '/usr/bin',
-        action = ['create'],
-    )
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              bin_dir = '/usr/bin',
+                              )
     self.assertResourceCalled('Directory', '/etc/hive',
-        mode = 0755
-    )
-    self.assertResourceCalled('Directory', '/etc/hive/conf.server',
-        owner = 'hive',
-        group = 'hadoop',
-        recursive = True,
-    )
-    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-        group = 'hadoop',
-        conf_dir = '/etc/hive/conf.server',
-        mode = 0644,
-        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
-        owner = 'hive',
-        configurations = self.getConfig()['configurations']['mapred-site'],
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-default.xml.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-exec-log4j.properties',
-        content = 'log4jproperties\nline2',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0644,
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-log4j.properties',
-        content = 'log4jproperties\nline2',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0644,
-    )
+                              mode = 0755,
+                              )
     self.assertResourceCalled('Directory', '/etc/hive/conf',
-        owner = 'hive',
-        group = 'hadoop',
-        recursive = True,
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              recursive = True,
+                              )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-        group = 'hadoop',
-        conf_dir = '/etc/hive/conf',
-        mode = 0644,
-        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
-        owner = 'hive',
-        configurations = self.getConfig()['configurations']['mapred-site'],
-    )
+                              group = 'hadoop',
+                              conf_dir = '/etc/hive/conf',
+                              mode = 0644,
+                              configuration_attributes = {u'final': {u'mapred.healthChecker.script.path': u'true',
+                                                                     u'mapreduce.jobtracker.staging.root.dir': u'true'}},
+                              owner = 'hive',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties',
-        content = 'log4jproperties\nline2',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0644,
-    )
+                              content = 'log4jproperties\nline2',
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties',
-        content = 'log4jproperties\nline2',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0644,
-    )
+                              content = 'log4jproperties\nline2',
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
                               group = 'hadoop',
                               conf_dir = '/etc/hive/conf.server',
                               mode = 0644,
-                              configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
+                              configuration_attributes = {u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
+                                                                     u'javax.jdo.option.ConnectionDriverName': u'true',
+                                                                     u'javax.jdo.option.ConnectionPassword': u'true'}},
                               owner = 'hive',
                               configurations = self.getConfig()['configurations']['hive-site'],
                               )
@@ -336,143 +303,117 @@ class TestHiveServer(RMFTestCase):
                               owner = 'hive',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('Execute', ('cp', '--remove-destination', '/usr/share/java/mysql-connector-java.jar', '/usr/lib/hive/lib//mysql-connector-java.jar'),
+    self.assertResourceCalled('Execute', ('cp',
+                                          '--remove-destination',
+                                          '/usr/share/java/mysql-connector-java.jar',
+                                          '/usr/lib/hive/lib//mysql-connector-java.jar'),
                               path = ['/bin', '/usr/bin/'],
-                              sudo = True
+                              sudo = True,
                               )
     self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
-        environment = {'no_proxy': 'c6401.ambari.apache.org'},
-        not_if = '[ -f /usr/lib/ambari-agent/DBConnectionVerification.jar ]',
-    )
+                              environment = {'no_proxy': u'c6401.ambari.apache.org'},
+                              not_if = '[ -f /usr/lib/ambari-agent/DBConnectionVerification.jar ]',
+                              )
     self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
-        content = Template('startHiveserver2.sh.j2'),
-        mode = 0755,
-    )
+                              content = Template('startHiveserver2.sh.j2'),
+                              mode = 0755,
+                              )
     self.assertResourceCalled('Directory', '/var/run/hive',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0755,
-        recursive = True,
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              )
     self.assertResourceCalled('Directory', '/var/log/hive',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0755,
-        recursive = True,
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              )
     self.assertResourceCalled('Directory', '/var/lib/hive',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0755,
-        recursive = True,
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              )
+
 
   def assert_configure_secured(self):
     self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
-        security_enabled = True,
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        conf_dir = '/etc/hadoop/conf',
-        hdfs_user = 'hdfs',
-        kinit_path_local = '/usr/bin/kinit',
-        bin_dir = '/usr/bin',
-        mode = 0777,
-        owner = 'hive',
-        action = ['create_delayed'],
-    )
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hive',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
     self.assertResourceCalled('HdfsDirectory', '/user/hive',
-        security_enabled = True,
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        conf_dir = '/etc/hadoop/conf',
-        hdfs_user = 'hdfs',
-        kinit_path_local = '/usr/bin/kinit',
-        mode = 0700,
-        bin_dir = '/usr/bin',
-        owner = 'hive',
-        action = ['create_delayed'],
-    )
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0700,
+                              owner = 'hive',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
     self.assertResourceCalled('HdfsDirectory', None,
-        security_enabled = True,
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        conf_dir = '/etc/hadoop/conf',
-        hdfs_user = 'hdfs',
-        bin_dir = '/usr/bin',
-        kinit_path_local = '/usr/bin/kinit',
-        action = ['create'],
-    )
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              bin_dir = '/usr/bin',
+                              )
     self.assertResourceCalled('Directory', '/etc/hive',
-        mode = 0755
-    )
-    self.assertResourceCalled('Directory', '/etc/hive/conf.server',
-        owner = 'hive',
-        group = 'hadoop',
-        recursive = True,
-    )
-    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-        group = 'hadoop',
-        conf_dir = '/etc/hive/conf.server',
-        mode = 0644,
-        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
-        owner = 'hive',
-        configurations = self.getConfig()['configurations']['mapred-site'],
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-default.xml.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-exec-log4j.properties',
-        content = 'log4jproperties\nline2',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0644,
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-log4j.properties',
-        content = 'log4jproperties\nline2',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0644,
-    )
+                              mode = 0755,
+                              )
     self.assertResourceCalled('Directory', '/etc/hive/conf',
-        owner = 'hive',
-        group = 'hadoop',
-        recursive = True,
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              recursive = True,
+                              )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-        group = 'hadoop',
-        conf_dir = '/etc/hive/conf',
-        mode = 0644,
-        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
-        owner = 'hive',
-        configurations = self.getConfig()['configurations']['mapred-site'],
-    )
+                              group = 'hadoop',
+                              conf_dir = '/etc/hive/conf',
+                              mode = 0644,
+                              configuration_attributes = {u'final': {u'mapred.healthChecker.script.path': u'true',
+                                                                     u'mapreduce.jobtracker.staging.root.dir': u'true'}},
+                              owner = 'hive',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties',
-        content = 'log4jproperties\nline2',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0644,
-    )
+                              content = 'log4jproperties\nline2',
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties',
-        content = 'log4jproperties\nline2',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0644,
-    )
+                              content = 'log4jproperties\nline2',
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
                               group = 'hadoop',
                               conf_dir = '/etc/hive/conf.server',
                               mode = 0644,
-                              configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
+                              configuration_attributes = {u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
+                                                                     u'javax.jdo.option.ConnectionDriverName': u'true',
+                                                                     u'javax.jdo.option.ConnectionPassword': u'true'}},
                               owner = 'hive',
                               configurations = self.getConfig()['configurations']['hive-site'],
                               )
@@ -481,30 +422,33 @@ class TestHiveServer(RMFTestCase):
                               owner = 'hive',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('Execute', ('cp', '--remove-destination', '/usr/share/java/mysql-connector-java.jar', '/usr/lib/hive/lib//mysql-connector-java.jar'),
+    self.assertResourceCalled('Execute', ('cp',
+                                          '--remove-destination',
+                                          '/usr/share/java/mysql-connector-java.jar',
+                                          '/usr/lib/hive/lib//mysql-connector-java.jar'),
                               path = ['/bin', '/usr/bin/'],
-                              sudo = True
+                              sudo = True,
                               )
     self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
-        environment = {'no_proxy': 'c6401.ambari.apache.org'},
-        not_if = '[ -f /usr/lib/ambari-agent/DBConnectionVerification.jar ]',
-    )
+                              environment = {'no_proxy': u'c6401.ambari.apache.org'},
+                              not_if = '[ -f /usr/lib/ambari-agent/DBConnectionVerification.jar ]',
+                              )
     self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
-        content = Template('startHiveserver2.sh.j2'),
-        mode = 0755,
-    )
+                              content = Template('startHiveserver2.sh.j2'),
+                              mode = 0755,
+                              )
     self.assertResourceCalled('Directory', '/var/run/hive',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0755,
-        recursive = True,
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              )
     self.assertResourceCalled('Directory', '/var/log/hive',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0755,
-        recursive = True,
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              )
     self.assertResourceCalled('Directory', '/var/lib/hive',
         owner = 'hive',
         group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d299d0c/ambari-server/src/test/python/stacks/2.0.6/configs/default_update_exclude_file_only.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/default_update_exclude_file_only.json b/ambari-server/src/test/python/stacks/2.0.6/configs/default_update_exclude_file_only.json
index ee55834..9febfff 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/default_update_exclude_file_only.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/default_update_exclude_file_only.json
@@ -747,7 +747,7 @@
         "logviewer_server_hosts": [
             "c6402.ambari.apache.org"
         ],
-        "hive_metastore_hosts": [
+        "hive_metastore_host": [
             "c6402.ambari.apache.org"
         ], 
         "hbase_rs_hosts": [

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d299d0c/ambari-server/src/test/python/stacks/2.0.6/configs/flume_22.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/flume_22.json b/ambari-server/src/test/python/stacks/2.0.6/configs/flume_22.json
index c4f44d8..3a7aa33 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/flume_22.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/flume_22.json
@@ -742,7 +742,7 @@
         "logviewer_server_hosts": [
             "c6402.ambari.apache.org"
         ],
-        "hive_metastore_hosts": [
+        "hive_metastore_host": [
             "c6402.ambari.apache.org"
         ], 
         "hbase_rs_hosts": [

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d299d0c/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
index 5cf9c34..7ca6a50 100644
--- a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
+++ b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
@@ -138,55 +138,37 @@ class TestHiveMetastore(RMFTestCase):
 
   def assert_configure_default(self):
     self.assertResourceCalled('Directory', '/etc/hive',
-        mode = 0755
-    )
-    self.assertResourceCalled('Directory', '/etc/hive/conf.server',
-        owner = 'hive',
-        group = 'hadoop',
-        recursive = True,
-    )
-    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-        group = 'hadoop',
-        conf_dir = '/etc/hive/conf.server',
-        mode = 0644,
-        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
-        owner = 'hive',
-        configurations = self.getConfig()['configurations']['mapred-site'],
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-default.xml.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
+                              mode = 0755,
+                              )
     self.assertResourceCalled('Directory', '/etc/hive/conf',
-        owner = 'hive',
-        group = 'hadoop',
-        recursive = True,
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              recursive = True,
+                              )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-        group = 'hadoop',
-        conf_dir = '/etc/hive/conf',
-        mode = 0644,
-        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
-        owner = 'hive',
-        configurations = self.getConfig()['configurations']['mapred-site'],
-    )
+                              group = 'hadoop',
+                              conf_dir = '/etc/hive/conf',
+                              mode = 0644,
+                              configuration_attributes = {u'final': {u'mapred.healthChecker.script.path': u'true',
+                                                                     u'mapreduce.jobtracker.staging.root.dir': u'true'}},
+                              owner = 'hive',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
                               group = 'hadoop',
                               conf_dir = '/etc/hive/conf.server',
                               mode = 0644,
-                              configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
+                              configuration_attributes = {u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
+                                                                     u'javax.jdo.option.ConnectionDriverName': u'true',
+                                                                     u'javax.jdo.option.ConnectionPassword': u'true'}},
                               owner = 'hive',
                               configurations = self.getConfig()['configurations']['hive-site'],
                               )
@@ -195,91 +177,76 @@ class TestHiveMetastore(RMFTestCase):
                               owner = 'hive',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('Execute', ('cp', '--remove-destination', '/usr/share/java/mysql-connector-java.jar', '/usr/lib/hive/lib//mysql-connector-java.jar'),
+    self.assertResourceCalled('Execute', ('cp',
+                                          '--remove-destination',
+                                          '/usr/share/java/mysql-connector-java.jar',
+                                          '/usr/lib/hive/lib//mysql-connector-java.jar'),
                               path = ['/bin', '/usr/bin/'],
-                              sudo = True
+                              sudo = True,
                               )
     self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
-        environment = {'no_proxy': 'c6401.ambari.apache.org'},
-        not_if = '[ -f /usr/lib/ambari-agent/DBConnectionVerification.jar ]',
-    )
+                              environment = {'no_proxy': u'c6401.ambari.apache.org'},
+                              not_if = '[ -f /usr/lib/ambari-agent/DBConnectionVerification.jar ]',
+                              )
     self.assertResourceCalled('File', '/tmp/start_metastore_script',
-        content = StaticFile('startMetastore.sh'),
-        mode = 0755,
-    )
+                              content = StaticFile('startMetastore.sh'),
+                              mode = 0755,
+                              )
     self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/lib/hive/bin/schematool -initSchema -dbType mysql -userName hive -passWord aaa',
-        not_if = 'export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/lib/hive/bin/schematool -info -dbType mysql -userName hive -passWord aaa',
-    )
+                              not_if = 'export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/lib/hive/bin/schematool -info -dbType mysql -userName hive -passWord aaa',
+                              )
     self.assertResourceCalled('Directory', '/var/run/hive',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0755,
-        recursive = True,
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              )
     self.assertResourceCalled('Directory', '/var/log/hive',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0755,
-        recursive = True,
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              )
     self.assertResourceCalled('Directory', '/var/lib/hive',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0755,
-        recursive = True,
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/etc/hive',
-        mode = 0755
-    )
-    self.assertResourceCalled('Directory', '/etc/hive/conf.server',
-        owner = 'hive',
-        group = 'hadoop',
-        recursive = True,
-    )
-    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-        group = 'hadoop',
-        conf_dir = '/etc/hive/conf.server',
-        mode = 0644,
-        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
-        owner = 'hive',
-        configurations = self.getConfig()['configurations']['mapred-site'],
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-default.xml.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
+                              mode = 0755,
+                              )
     self.assertResourceCalled('Directory', '/etc/hive/conf',
-        owner = 'hive',
-        group = 'hadoop',
-        recursive = True,
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              recursive = True,
+                              )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-        group = 'hadoop',
-        conf_dir = '/etc/hive/conf',
-        mode = 0644,
-        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
-        owner = 'hive',
-        configurations = self.getConfig()['configurations']['mapred-site'],
-    )
+                              group = 'hadoop',
+                              conf_dir = '/etc/hive/conf',
+                              mode = 0644,
+                              configuration_attributes = {u'final': {u'mapred.healthChecker.script.path': u'true',
+                                                                     u'mapreduce.jobtracker.staging.root.dir': u'true'}},
+                              owner = 'hive',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
-        owner = 'hive',
-        group = 'hadoop',
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
                               group = 'hadoop',
                               conf_dir = '/etc/hive/conf.server',
                               mode = 0644,
-                              configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
+                              configuration_attributes = {u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
+                                                                     u'javax.jdo.option.ConnectionDriverName': u'true',
+                                                                     u'javax.jdo.option.ConnectionPassword': u'true'}},
                               owner = 'hive',
                               configurations = self.getConfig()['configurations']['hive-site'],
                               )
@@ -288,36 +255,39 @@ class TestHiveMetastore(RMFTestCase):
                               owner = 'hive',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('Execute', ('cp', '--remove-destination', '/usr/share/java/mysql-connector-java.jar', '/usr/lib/hive/lib//mysql-connector-java.jar'),
-        path = ['/bin', '/usr/bin/'],
-        sudo = True
-    )
+    self.assertResourceCalled('Execute', ('cp',
+                                          '--remove-destination',
+                                          '/usr/share/java/mysql-connector-java.jar',
+                                          '/usr/lib/hive/lib//mysql-connector-java.jar'),
+                              path = ['/bin', '/usr/bin/'],
+                              sudo = True,
+                              )
     self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
-        environment = {'no_proxy': 'c6401.ambari.apache.org'},
-        not_if = '[ -f /usr/lib/ambari-agent/DBConnectionVerification.jar ]',
-    )
+                              environment = {'no_proxy': u'c6401.ambari.apache.org'},
+                              not_if = '[ -f /usr/lib/ambari-agent/DBConnectionVerification.jar ]',
+                              )
     self.assertResourceCalled('File', '/tmp/start_metastore_script',
-        content = StaticFile('startMetastore.sh'),
-        mode = 0755,
-    )
+                              content = StaticFile('startMetastore.sh'),
+                              mode = 0755,
+                              )
     self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/lib/hive/bin/schematool -initSchema -dbType mysql -userName hive -passWord asd',
-        not_if = 'export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/lib/hive/bin/schematool -info -dbType mysql -userName hive -passWord asd',
-    )
+                              not_if = 'export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/lib/hive/bin/schematool -info -dbType mysql -userName hive -passWord asd',
+                              )
     self.assertResourceCalled('Directory', '/var/run/hive',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0755,
-        recursive = True,
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              )
     self.assertResourceCalled('Directory', '/var/log/hive',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0755,
-        recursive = True,
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              )
     self.assertResourceCalled('Directory', '/var/lib/hive',
-        owner = 'hive',
-        group = 'hadoop',
-        mode = 0755,
-        recursive = True,
-    )
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d299d0c/ambari-server/src/test/python/stacks/2.1/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/configs/default.json b/ambari-server/src/test/python/stacks/2.1/configs/default.json
index f291201..9f22fe8 100644
--- a/ambari-server/src/test/python/stacks/2.1/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.1/configs/default.json
@@ -819,7 +819,7 @@
         "logviewer_server_hosts": [
             "c6402.ambari.apache.org"
         ],
-        "hive_metastore_hosts": [
+        "hive_metastore_host": [
             "c6402.ambari.apache.org"
         ], 
         "hbase_rs_hosts": [

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d299d0c/ambari-server/src/test/python/stacks/2.1/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/configs/secured.json b/ambari-server/src/test/python/stacks/2.1/configs/secured.json
index 3eedb77..0ba831d 100644
--- a/ambari-server/src/test/python/stacks/2.1/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/2.1/configs/secured.json
@@ -803,7 +803,7 @@
         "logviewer_server_hosts": [
             "c6402.ambari.apache.org"
         ], 
-        "hive_metastore_hosts": [
+        "hive_metastore_host": [
             "c6402.ambari.apache.org"
         ], 
         "hbase_rs_hosts": [


Mime
View raw message