incubator-ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From maha...@apache.org
Subject [4/4] git commit: AMBARI-3311. Ambari-Client create unit tests for the methods of ClusterModel. (Andrew Onischuk via mahadev)
Date Thu, 26 Sep 2013 03:59:16 GMT
AMBARI-3311. Ambari-Client create unit tests for the methods of ClusterModel. (Andrew Onischuk via mahadev)


Project: http://git-wip-us.apache.org/repos/asf/incubator-ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ambari/commit/a0386357
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ambari/tree/a0386357
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ambari/diff/a0386357

Branch: refs/heads/trunk
Commit: a0386357b7cd1a2b932c7331f336228e298ee42a
Parents: a8864ba
Author: Mahadev Konar <mahadev@apache.org>
Authored: Wed Sep 25 20:59:07 2013 -0700
Committer: Mahadev Konar <mahadev@apache.org>
Committed: Wed Sep 25 20:59:07 2013 -0700

----------------------------------------------------------------------
 ambari-client/pom.xml                           |    2 +-
 .../src/main/python/ambari_client/model/host.py |    2 +-
 .../src/test/python/TestAmbariClient.py         |  101 +-
 .../src/test/python/TestClusterModel.py         |  406 +++
 .../python/json/clustermodel_get_all_hosts.json |  375 +++
 .../json/clustermodel_get_all_services.json     |  120 +
 .../python/json/clustermodel_get_cluster.json   |  103 +
 .../json/clustermodel_get_core_site_config.json |   32 +
 .../json/clustermodel_get_global_config.json    |   64 +
 .../json/clustermodel_get_hdfs_site_config.json |   53 +
 .../test/python/json/clustermodel_get_host.json |  250 ++
 .../clustermodel_get_mapred_site_config.json    |   58 +
 .../python/json/clustermodel_get_service.json   |   29 +
 .../src/test/python/json/error_adding_host.json |    4 -
 .../test/python/json/error_deleting_host.json   |    4 +
 .../src/test/python/json/get_cluster.json       |  103 -
 .../src/test/python/json/get_cluster_host.json  |  250 --
 .../src/test/python/json/get_cluster_hosts.json | 2770 ------------------
 .../test/python/json/get_cluster_services.json  |  120 -
 .../python/json/get_components_from_stack.json  |   41 +
 .../src/test/python/json/get_service.json       |   29 -
 .../src/test/python/utils/HttpClientInvoker.py  |   67 +
 22 files changed, 1609 insertions(+), 3374 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a0386357/ambari-client/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-client/pom.xml b/ambari-client/pom.xml
index 38d61f1..1bd4c84 100755
--- a/ambari-client/pom.xml
+++ b/ambari-client/pom.xml
@@ -78,7 +78,7 @@
                 <argument>unitTests.py</argument>
               </arguments>
               <environmentVariables>
-                <PYTHONPATH>${project.basedir}/../ambari-common/src/test/python:${project.basedir}/src/main/python/ambari_client:$PYTHONPATH</PYTHONPATH>
+                <PYTHONPATH>${project.basedir}/../ambari-common/src/test/python:${project.basedir}/src/main/python/ambari_client:${project.basedir}/src/test/python/utils:$PYTHONPATH</PYTHONPATH>
               </environmentVariables>
               <skip>${skipTests}</skip>
             </configuration>

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a0386357/ambari-client/src/main/python/ambari_client/model/host.py
----------------------------------------------------------------------
diff --git a/ambari-client/src/main/python/ambari_client/model/host.py b/ambari-client/src/main/python/ambari_client/model/host.py
index c5e94cb..ba3c08b 100755
--- a/ambari-client/src/main/python/ambari_client/model/host.py
+++ b/ambari-client/src/main/python/ambari_client/model/host.py
@@ -195,7 +195,7 @@ class HostModel(BaseModel):
   RW_ATTR = ('host_name', 'ip', 'rack_info')
   REF_ATTR = ('cluster_name',)
   
-  def __init__(self, resource_root, host_name, ip=None , rack_info=None):
+  def __init__(self, resource_root, host_name, ip=None , rack_info='/default-rack'):
     utils.retain_self_helper(BaseModel, **locals())
 
   def __str__(self):

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a0386357/ambari-client/src/test/python/TestAmbariClient.py
----------------------------------------------------------------------
diff --git a/ambari-client/src/test/python/TestAmbariClient.py b/ambari-client/src/test/python/TestAmbariClient.py
index a18aa0d..9bec45e 100755
--- a/ambari-client/src/test/python/TestAmbariClient.py
+++ b/ambari-client/src/test/python/TestAmbariClient.py
@@ -22,7 +22,7 @@ limitations under the License.
 from mock.mock import MagicMock, patch
 from ambari_client.ambari_api import  AmbariClient 
 from ambari_client.core.errors import BadRequest
-
+from HttpClientInvoker import HttpClientInvoker
 import unittest
 
 class TestAmbariClient(unittest.TestCase):
@@ -81,7 +81,7 @@ class TestAmbariClient(unittest.TestCase):
     self.assertEqual(all_clusters.to_json_dict(), expected_output, "to_json_dict should convert ModelList")
     
   @patch("ambari_client.core.http_client.HttpClient")  
-  def test_get_hosts_clusters_valid(self , http_client):
+  def test_get_all_hosts(self , http_client):
     """
     Get all hosts.
     This testcase checks if get_all_hosts returns a list of ModelList.
@@ -135,7 +135,7 @@ class TestAmbariClient(unittest.TestCase):
     mocked_code = "200" 
     mocked_content = "text/plain"
     
-    linestring = open('json/get_cluster.json', 'r').read()
+    linestring = open('json/clustermodel_get_cluster.json', 'r').read()
     mocked_response = linestring
     expected_dict_output = {'cluster_name': u'test1', 'version': u'HDP-1.2.1'}
     
@@ -145,55 +145,6 @@ class TestAmbariClient(unittest.TestCase):
     
     self.assertEqual(cluster.cluster_name, "test1", "cluster_name should be test1 ")
     self.assertEqual(cluster.to_json_dict(), expected_dict_output, "to_json_dict should convert ClusterModel")
-
-
-
-  @patch("ambari_client.core.http_client.HttpClient")  
-  def test_get_cluster_services_valid(self , http_client):
-    """
-    Get all services of a cluster.
-    This testcase checks if get_all_services returns a list of ModelList.
-    """
-    http_client_mock = MagicMock()
-    http_client.returned_obj = http_client_mock
-    mocked_code = "200" 
-    mocked_content = "text/plain"
-    
-    expected_dict_output = {'cluster_name': u'test1', 'version': u'HDP-1.2.1'}
-    
-    http_client_mock.invoke.side_effect = http_client_invoke_side_effects
-    client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock)
-    cluster = client.get_cluster('test1')
-    serviceList = cluster.get_all_services()
-    
-    self.assertEqual(cluster.cluster_name, "test1", "cluster_name should be test1 ")
-    self.assertEqual(cluster.to_json_dict(), expected_dict_output, "to_json_dict should convert ClusterModel")
-    self.assertEqual(len(serviceList), 3, "There should be a 3 services from the response")
- 
-  @patch("ambari_client.core.http_client.HttpClient")  
-  def test_get_cluster_service_valid(self , http_client):
-    """
-    Get the service of a cluster
-    This testcase checks if get_service returns a list of ServiceModel.
-    """
-    http_client_mock = MagicMock()
-    http_client.returned_obj = http_client_mock
-    mocked_code = "200" 
-    mocked_content = "text/plain"
-    
-    expected_dict_output = {'cluster_name': u'test1', 'version': u'HDP-1.2.1'}
-    
-    http_client_mock.invoke.side_effect = http_client_invoke_side_effects
-    client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock)
-    cluster = client.get_cluster('test1')
-    serviceList = cluster.get_all_services()
-    ganglia = cluster.get_service("GANGLIA")  
-
-    self.assertEqual(cluster.cluster_name, "test1", "cluster_name should be test1 ")
-    self.assertEqual(cluster.to_json_dict(), expected_dict_output, "to_json_dict should convert ClusterModel")
-    self.assertEqual(len(serviceList), 3, "There should be a 3 services from the response")
-    self.assertEqual(str(ganglia.state), "STARTED", "The ganglia service state should be fetched as STARTED")
-    self.assertEqual(ganglia.clusterRef.cluster_name, cluster.cluster_name, "The clusterRef value for  service  should be fetched ")
     
   @patch("ambari_client.core.http_client.HttpClient")  
   def test_exceptions(self , http_client):
@@ -205,57 +156,15 @@ class TestAmbariClient(unittest.TestCase):
     mocked_code = "200" 
     mocked_content = "text/plain"
     
-    http_client_mock.invoke.side_effect = http_client_invoke_side_effects
+    http_client_mock.invoke.side_effect = HttpClientInvoker.http_client_invoke_side_effects
     client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock)
     cluster = client.get_cluster('test1')
     
     try:
       cluster.delete_host('deleted_nonexistant_cluster')
+      print http_client_mock.invoke.call_args_list
       self.fail('Exception should have been thrown!')
     except BadRequest, ex:
       self.assertEquals(str(ex), 'exception: 400. Attempted to add unknown hosts to a cluster.  These hosts have not been registered with the server: dev05')
     except Exception, ex:
       self.fail('Wrong exception thrown!')
-
-  
-
-def http_client_invoke_side_effects(*args, **kwargs):
-    print locals()
-    mocked_code = "200" 
-    mocked_content = "text/plain"
-    if args[1] == "//clusters/test1":
-        mocked_response = open('json/get_cluster.json', 'r').read()
-        return mocked_response, mocked_code , mocked_content
-    elif args[1] == "//hosts":
-        mocked_response = open('json/get_all_hosts.json', 'r').read()
-        return mocked_response, mocked_code , mocked_content
-    elif args[1] == "//clusters/test6/hosts/r01wn01":
-        mocked_response = open('json/get_cluster_host.json', 'r').read()
-        return mocked_response, mocked_code , mocked_content
-    elif args[1] == "//clusters/test6/hosts?fields=*":
-        mocked_response = open('json/get_cluster_hosts.json', 'r').read()
-        return mocked_response, mocked_code , mocked_content
-    elif args[1] == "//clusters/test6/services/GANGLIA":
-        mocked_response = open('json/get_cluster_service.json', 'r').read()
-        return mocked_response, mocked_code , mocked_content
-    elif args[1] == "//clusters/test1/services?fields=*":
-        mocked_response = open('json/get_cluster_services.json', 'r').read()
-        return mocked_response, mocked_code , mocked_content
-    elif args[1] == "//clusters/test6/hosts/r01wn01/host_components/NAMENODE":
-        mocked_response = open('json/get_host_component.json', 'r').read()
-        return mocked_response, mocked_code , mocked_content
-    elif args[1] == "//clusters/test6/hosts/r01wn01/host_components?ServiceComponentInfo":
-        mocked_response = open('json/get_host_components.json', 'r').read()
-        return mocked_response, mocked_code , mocked_content
-    elif args[1] == "//clusters/test6/services/GANGLIA/components/GANGLIA_MONITOR":
-        mocked_response = open('json/get_service_component.json', 'r').read()
-        return mocked_response, mocked_code , mocked_content
-    elif args[1] == "//clusters/test6/services/GANGLIA/components?fields=*":
-        mocked_response = open('json/get_service_components.json', 'r').read()
-        return mocked_response, mocked_code , mocked_content
-    elif args[1] == "//clusters/test1/services/GANGLIA":
-        mocked_response = open('json/get_service.json', 'r').read()
-        return mocked_response, mocked_code , mocked_content
-    elif args[1] == "//clusters/test1/hosts/deleted_nonexistant_cluster":
-        mocked_response = open('json/error_adding_host.json', 'r').read()
-        return mocked_response, mocked_code , mocked_content

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a0386357/ambari-client/src/test/python/TestClusterModel.py
----------------------------------------------------------------------
diff --git a/ambari-client/src/test/python/TestClusterModel.py b/ambari-client/src/test/python/TestClusterModel.py
new file mode 100644
index 0000000..3eeef1e
--- /dev/null
+++ b/ambari-client/src/test/python/TestClusterModel.py
@@ -0,0 +1,406 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+
+from mock.mock import MagicMock, patch
+from ambari_client.ambari_api import  AmbariClient
+from HttpClientInvoker import HttpClientInvoker
+from ambari_client.model.host import HostModel
+
+import unittest
+
+class TestAmbariClient(unittest.TestCase):
+  
+  def create_cluster(self, http_client_mock = MagicMock()):    
+    http_client_mock.invoke.side_effect = HttpClientInvoker.http_client_invoke_side_effects
+    client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock)
+    return client.get_cluster('test1')
+   
+  def test_get_service(self):
+    """
+    Get the service of a cluster
+    This testcase checks if get_service returns a list of ServiceModel.
+    """   
+    expected_dict_output = {'cluster_name': 'test1', 'version': 'HDP-1.2.1'}
+    
+    cluster = self.create_cluster()
+    serviceList = cluster.get_all_services()
+    ganglia = cluster.get_service("GANGLIA")  
+
+    self.assertEqual(cluster.cluster_name, "test1", "cluster_name should be test1 ")
+    self.assertEqual(cluster.to_json_dict(), expected_dict_output, "to_json_dict should convert ClusterModel")
+    self.assertEqual(len(serviceList), 3, "There should be a 3 services from the response")
+    self.assertEqual(str(ganglia.state), "STARTED", "The ganglia service state should be fetched as STARTED")
+    self.assertEqual(ganglia.clusterRef.cluster_name, cluster.cluster_name, "The clusterRef value for  service  should be fetched ")
+      
+  def test_get_all_services(self):
+    """
+    Get all services of a cluster.
+    This testcase checks if get_all_services returns a list of ModelList.
+    """
+    expected_dict_output = {'cluster_name': 'test1', 'version': 'HDP-1.2.1'}
+    
+    cluster = self.create_cluster()
+    serviceList = cluster.get_all_services()
+    
+    self.assertEqual(cluster.cluster_name, "test1", "cluster_name should be test1 ")
+    self.assertEqual(cluster.to_json_dict(), expected_dict_output, "to_json_dict should convert ClusterModel")
+    self.assertEqual(len(serviceList), 3, "There should be a 3 services from the response")
+
+  def test_get_all_hosts(self):
+    """
+    Get all cluster hosts
+    This testcase checks if get_all_services returns a list of ModelList.
+    """
+    expected_dict_output = {'items': [{'ip': '10.0.2.15', 'host_name': 'dev05.hortonworks.com', 'rack_info': '/default-rack'}, {'ip': '10.0.2.15', 'host_name': 'dev06.hortonworks.com', 'rack_info': '/default-rack'}]}
+
+    cluster = self.create_cluster()
+    hostlist = cluster.get_all_hosts()
+    
+    self.assertEqual(cluster.cluster_name, "test1")
+    self.assertEqual(hostlist.to_json_dict(), expected_dict_output)
+    self.assertEqual(hostlist[1].host_name, 'dev06.hortonworks.com')
+    self.assertEqual(len(hostlist), 2)  
+    
+  def test_get_host(self):
+    """
+    Get cluster host
+    This testcase checks if get_host returns correct HostModel
+    """
+    expected_dict_output = {'ip': '10.104.44.95', 'host_name': 'myhost', 'rack_info': '/default-rack'}
+    
+    cluster = self.create_cluster()
+    host = cluster.get_host('myhost')
+    
+    self.assertEqual(host.clusterRef.cluster_name, "test1")
+    self.assertEqual(host.to_json_dict(), expected_dict_output)
+    self.assertEqual(host.host_state, "HEALTHY")
+    self.assertEqual(host.public_host_name, "myhost")
+     
+  def test_get_global_config(self):
+    """
+    Get global config
+    This testcase checks if get_host returns correct HostModel
+    """
+    expected_dict_output = {'tag': 'version1', 'type': 'global'}
+    expected_properties = {'dfs_namenode_name_dir': '/hadoop/hdfs/namenode', 'security_enabled': 'false', 'proxyuser_group': 'users', 'hdfs_log_dir_prefix': '/var/log/hadoop', 'dfs_datanode_data_dir': '/hadoop/hdfs/data', 'namenode_formatted_mark_dir': '/var/run/hadoop/hdfs/namenode/formatted/', 'rrdcached_base_dir': '/var/lib/ganglia/rrds', 'user_group': 'hadoop', 'dfs_namenode_checkpoint_dir': '/hadoop/hdfs/namesecondary', 'dfs_namenode_checkpoint_period': '21600', 'hive_user': 'hive', 'fs_checkpoint_size': '0.5', 'hbase_conf_dir': '/etc/hbase', 'datanode_du_reserved': '1', 'dfs_datanode_http_address': '50075', 'namenode_heapsize': '1024m', 'dfs_webhdfs_enabled': 'true', 'oozie_user': 'oozie', 'hcat_conf_dir': '', 'hadoop_conf_dir': '/etc/hadoop/conf', 'dfs_replication': '3', 'namenode_opt_maxnewsize': '640m', 'apache_artifacts_download_url': '', 'dfs_datanode_address': '50010', 'dfs_exclude': 'dfs.exclude', 'yarn_user': 'yarn', 'gpl_artifacts_download_url': '', 'zk_user': 'zookee
 per', 'smokeuser': 'ambari-qa', 'dtnode_heapsize': '1024m', 'gmond_user': 'nobody', 'dfs_datanode_failed_volume_tolerated': '0', 'java64_home': '/usr/jdk/jdk1.6.0_31', 'run_dir': '/var/run/hadoop', 'ganglia_runtime_dir': '/var/run/ganglia/hdp', 'dfs_datanode_data_dir_perm': '750', 'hdfs_enable_shortcircuit_read': 'true', 'hdfs_user': 'hdfs', 'hbase_user': 'hbase', 'webhcat_user': 'hcat', 'gmetad_user': 'nobody', 'dfs_block_local_path_access_user': 'hbase', 'namenode_opt_newsize': '200m', 'mapred_user': 'mapred', 'nagios_group': 'nagios', 'hcat_user': 'hcat', 'hadoop_heapsize': '1024', 'hadoop_pid_dir_prefix': '/var/run/hadoop', 'nagios_user': 'nagios'}
+    
+    cluster = self.create_cluster()
+    global_config = cluster.get_global_config()
+    
+    self.assertEqual(cluster.cluster_name, "test1")
+    self.assertEqual(global_config.properties, expected_properties)
+    self.assertEqual(global_config.to_json_dict(), expected_dict_output)
+    
+  def test_get_core_site_config(self):
+    """
+    Get core-site config
+    """
+    expected_dict_output = {'tag': 'version1', 'type': 'core-site'}
+    expected_properties = {'io.serializations': 'org.apache.hadoop.io.serializer.WritableSerialization', 'fs.checkpoint.size': '0.5', 'fs.trash.interval': '360', 'hadoop.security.authentication': 'simple', 'io.compression.codecs': 'org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec', 'mapreduce.jobtracker.webinterface.trusted': 'false', 'hadoop.security.authorization': 'false', 'fs.checkpoint.edits.dir': '/hadoop/hdfs/namesecondary', 'ipc.client.connection.maxidletime': '30000', 'ipc.client.connect.max.retries': '50', 'hadoop.security.auth_to_local': '\n        RULE:[2:$1@$0]([rn]m@.*)s/.*/yarn/\n        RULE:[2:$1@$0](jhs@.*)s/.*/mapred/\n        RULE:[2:$1@$0]([nd]n@.*)s/.*/hdfs/\n        RULE:[2:$1@$0](hm@.*)s/.*/hbase/\n        RULE:[2:$1@$0](rs@.*)s/.*/hbase/\n        DEFAULT\n    ', 'io.file.buffer.size': '131072', 'dfs.namenode.checkpoint.dir': '/hadoop/hdfs/namesecondary', 'ipc.client.idlethreshold': '8000', 'dfs.namenode.checkpoint.edits.dir'
 : '${dfs.namenode.checkpoint.dir}', 'fs.defaultFS': 'hdfs://dev05.hortonworks.com:8020', 'dfs.namenode.checkpoint.period': '21600'}
+        
+    cluster = self.create_cluster()
+    global_config = cluster.get_core_site_config()
+    
+    self.assertEqual(cluster.cluster_name, "test1")
+    self.assertEqual(global_config.properties, expected_properties)
+    self.assertEqual(global_config.to_json_dict(), expected_dict_output)
+    
+  def test_get_hdfs_site_config(self):
+    """
+    Get hdfs config
+    """
+    expected_dict_output = {'tag': 'version1', 'type': 'hdfs-site'}
+    expected_properties = {'dfs.namenode.avoid.write.stale.datanode': 'true', 'dfs.webhdfs.enabled': 'true', 'dfs.block.access.token.enable': 'true', 'dfs.datanode.address': '0.0.0.0:50010', 'dfs.cluster.administrators': ' hdfs', 'dfs.datanode.balance.bandwidthPerSec': '6250000', 'dfs.namenode.safemode.threshold-pct': '1.0f', 'dfs.permissions.enabled': 'true', 'dfs.client.read.shortcircuit': 'true', 'dfs.journalnode.edits.dir': '/grid/0/hdfs/journal', 'dfs.blocksize': '134217728', 'dfs.datanode.max.transfer.threads': '1024', 'dfs.datanode.du.reserved': '1', 'dfs.replication': '3', 'dfs.namenode.handler.count': '100', 'fs.permissions.umask-mode': '022', 'dfs.datanode.http.address': '0.0.0.0:50075', 'dfs.datanode.ipc.address': '0.0.0.0:8010', 'dfs.datanode.data.dir': '/hadoop/hdfs/data', 'dfs.namenode.http-address': 'dev05.hortonworks.com:50070', 'dfs.blockreport.initialDelay': '120', 'dfs.datanode.failed.volumes.tolerated': '0', 'dfs.namenode.accesstime.precision': '0', 'dfs.block.lo
 cal-path-access.user': 'hbase', 'dfs.https.namenode.https-address': 'dev05.hortonworks.com:50470', 'dfs.namenode.secondary.http-address': 'dev05.hortonworks.com:50090', 'dfs.namenode.stale.datanode.interval': '30000', 'dfs.heartbeat.interval': '3', 'dfs.client.read.shortcircuit.streams.cache.size': '4096', 'dfs.permissions.superusergroup': 'hdfs', 'dfs.journalnode.http-address': '0.0.0.0:8480', 'dfs.domain.socket.path': '/var/lib/hadoop-hdfs/dn_socket', 'dfs.namenode.avoid.read.stale.datanode': 'true', 'dfs.hosts.exclude': '/etc/hadoop/conf/dfs.exclude', 'dfs.datanode.data.dir.perm': '750', 'dfs.namenode.write.stale.datanode.ratio': '1.0f', 'dfs.replication.max': '50', 'dfs.namenode.name.dir': '/hadoop/hdfs/namenode'}
+        
+    cluster = self.create_cluster()
+    global_config = cluster.get_hdfs_site_config()
+    
+    self.assertEqual(cluster.cluster_name, "test1")
+    self.assertEqual(global_config.properties, expected_properties)
+    self.assertEqual(global_config.to_json_dict(), expected_dict_output)
+    
+  def test_get_mapred_site_config(self):
+    """
+    Get mapred config
+    """
+    expected_dict_output = {'tag': 'version1', 'type': 'mapred-site'}
+    expected_properties = {'mapreduce.jobhistory.address': 'dev05.hortonworks.com:10020', 'mapreduce.reduce.input.buffer.percent': '0.0', 'mapred.jobtracker.maxtasks.per.job': '-1', 'mapreduce.framework.name': 'yarn', 'mapreduce.map.speculative': 'false', 'mapreduce.tasktracker.healthchecker.script.path': 'file:////mapred/jobstatus', 'mapreduce.reduce.shuffle.merge.percent': '0.66', 'mapred.userlog.retain.hours': '24', 'yarn.app.mapreduce.am.resource.mb': '1024', 'mapreduce.reduce.shuffle.parallelcopies': '30', 'mapreduce.map.java.opts': '-Xmx320m', 'mapreduce.task.io.sort.factor': '100', 'mapreduce.application.classpath': '$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*,$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*', 'yarn.app.mapreduce.am.command-opts': '-Xmx756m', 'mapreduce.job.reduce.slowstart.completedmaps': '0.05', 'mapreduce.output.fileoutputformat.compress.type': 'BLOCK', 'mapreduce.reduce.speculative': 'false', 'mapreduce.reduce.java.opts': '-Xmx756m', 'mapreduce.am.max-at
 tempts': '2', 'yarn.app.mapreduce.am.admin-command-opts': '-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN', 'mapreduce.jobtracker.system.dir': '/mapred/system', 'mapreduce.map.sort.spill.percent': '0.1', 'mapreduce.task.timeout': '600000', 'mapreduce.map.memory.mb': '1536', 'mapreduce.reduce.log.level': 'INFO', 'mapreduce.jobhistory.intermediate-done-dir': '/mr-history/tmp', 'mapreduce.reduce.memory.mb': '2048', 'mapreduce.tasktracker.map.tasks.maximum': '4', 'yarn.app.mapreduce.am.log.level': 'INFO', 'mapreduce.map.log.level': 'INFO', 'mapreduce.shuffle.port': '13562', 'mapred.jobtracker.taskScheduler': 'org.apache.hadoop.mapred.CapacityTaskScheduler', 'mapreduce.admin.user.env': 'LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:/usr/lib/hadoop/lib/native/`$JAVA_HOME/bin/java -d32 -version &amp;&gt; /dev/null;if [ $? -eq 0 ]; then echo Linux-i386-32; else echo Linux-amd64-64;fi`', 'mapreduce.jobhistory.webapp.address': 'dev05.hortonworks.com:19888', 'mapred.hosts.exclude
 ': '/etc/hadoop/conf/mapred.exclude', 'mapreduce.reduce.shuffle.input.buffer.percent': '0.7', 'yarn.app.mapreduce.am.staging-dir': '/user', 'mapred.hosts': '/etc/hadoop/conf/mapred.include', 'mapreduce.jobhistory.done-dir': '/mr-history/done', 'mapreduce.admin.reduce.child.java.opts': '-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN', 'mapreduce.task.io.sort.mb': '200', 'mapred.task.tracker.task-controller': 'org.apache.hadoop.mapred.DefaultTaskController', 'mapreduce.admin.map.child.java.opts': '-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN'}
+        
+    cluster = self.create_cluster()
+    global_config = cluster.get_mapred_site_config()
+    
+    self.assertEqual(cluster.cluster_name, "test1")
+    self.assertEqual(global_config.properties, expected_properties)
+    self.assertEqual(global_config.to_json_dict(), expected_dict_output)
+    
+  def test_update_global_config(self):
+    """
+    Update global config
+    """
+    http_client_mock = MagicMock()
+    
+    expected_properties = {'dfs_namenode_name_dir': 'abc', 'security_enabled': 'false', 'proxyuser_group': 'users', 'hdfs_log_dir_prefix': '/var/log/hadoop', 'dfs_datanode_data_dir': '/hadoop/hdfs/data', 'namenode_formatted_mark_dir': '/var/run/hadoop/hdfs/namenode/formatted/', 'rrdcached_base_dir': '/var/lib/ganglia/rrds', 'user_group': 'hadoop', 'dfs_namenode_checkpoint_dir': '/hadoop/hdfs/namesecondary', 'dfs_namenode_checkpoint_period': '21600', 'hive_user': 'hive', 'fs_checkpoint_size': '0.5', 'hbase_conf_dir': '/etc/hbase', 'datanode_du_reserved': '1', 'dfs_datanode_http_address': '50075', 'namenode_heapsize': '1024m', 'dfs_webhdfs_enabled': 'true', 'oozie_user': 'oozie', 'hcat_conf_dir': '', 'hadoop_conf_dir': '/etc/hadoop/conf', 'dfs_replication': '3', 'namenode_opt_maxnewsize': '640m', 'apache_artifacts_download_url': '', 'dfs_datanode_address': '50010', 'dfs_exclude': 'dfs.exclude', 'yarn_user': 'yarn', 'gpl_artifacts_download_url': '', 'zk_user': 'zookeeper', 'smokeuser':
  'ambari-qa', 'dtnode_heapsize': '1024m', 'gmond_user': 'nobody', 'dfs_datanode_failed_volume_tolerated': '0', 'java64_home': '/usr/jdk/jdk1.6.0_31', 'run_dir': '/var/run/hadoop', 'ganglia_runtime_dir': '/var/run/ganglia/hdp', 'dfs_datanode_data_dir_perm': '750', 'hdfs_enable_shortcircuit_read': 'true', 'hdfs_user': 'hdfs', 'hbase_user': 'hbase', 'webhcat_user': 'hcat', 'gmetad_user': 'nobody', 'dfs_block_local_path_access_user': 'hbase', 'namenode_opt_newsize': '200m', 'mapred_user': 'mapred', 'nagios_group': 'nagios', 'hcat_user': 'hcat', 'hadoop_heapsize': '1024', 'hadoop_pid_dir_prefix': '/var/run/hadoop', 'nagios_user': 'nagios'}
+    expected_post_path = '//clusters/test1/configurations'
+    expected_post_request = {'Clusters': {'desired_configs': {'tag': 'version1', 'type': 'global', 'properties':expected_properties}}}   
+    expected_get_path = '//clusters/test1/configurations?type=global&tag=version1'
+    expected_get_request = None
+        
+    cluster = self.create_cluster(http_client_mock)
+    existant_global_config = cluster.get_global_config()
+    existant_global_config.properties['dfs_namenode_name_dir'] = 'abc'
+    cluster.update_global_config(existant_global_config)
+    
+    self.assertEqual(cluster.cluster_name, "test1")
+    http_client_mock.invoke.assert_any_call('POST', expected_post_path, headers=None, payload=expected_post_request)
+    http_client_mock.invoke.assert_any_call('GET', expected_get_path, headers=None, payload=expected_get_request)
+ 
+  def test_update_core_site_config(self):
+    """
+    Update core-site config
+    """
+    http_client_mock = MagicMock()
+    
+    expected_properties = {'io.serializations': 'abc', 'fs.checkpoint.size': '0.5', 'fs.trash.interval': '360', 'hadoop.security.authentication': 'simple', 'io.compression.codecs': 'org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec', 'mapreduce.jobtracker.webinterface.trusted': 'false', 'hadoop.security.authorization': 'false', 'fs.checkpoint.edits.dir': '/hadoop/hdfs/namesecondary', 'ipc.client.connection.maxidletime': '30000', 'ipc.client.connect.max.retries': '50', 'hadoop.security.auth_to_local': '\n        RULE:[2:$1@$0]([rn]m@.*)s/.*/yarn/\n        RULE:[2:$1@$0](jhs@.*)s/.*/mapred/\n        RULE:[2:$1@$0]([nd]n@.*)s/.*/hdfs/\n        RULE:[2:$1@$0](hm@.*)s/.*/hbase/\n        RULE:[2:$1@$0](rs@.*)s/.*/hbase/\n        DEFAULT\n    ', 'io.file.buffer.size': '131072', 'dfs.namenode.checkpoint.dir': '/hadoop/hdfs/namesecondary', 'ipc.client.idlethreshold': '8000', 'dfs.namenode.checkpoint.edits.dir': '${dfs.namenode.checkpoint.dir}', 'fs.defaultFS'
 : 'hdfs://dev05.hortonworks.com:8020', 'dfs.namenode.checkpoint.period': '21600'}
+    expected_post_path = '//clusters/test1/configurations'
+    expected_post_request = {'Clusters': {'desired_configs': {'tag': 'version1', 'type': 'core-site', 'properties':expected_properties}}}   
+    expected_get_path = '//clusters/test1/configurations?type=core-site&tag=version1'
+    expected_get_request = None
+        
+    cluster = self.create_cluster(http_client_mock)
+    existant_global_config = cluster.get_core_site_config()
+    existant_global_config.properties['io.serializations'] = 'abc'
+    cluster.update_core_site_config(existant_global_config)
+    
+    self.assertEqual(cluster.cluster_name, "test1")
+    http_client_mock.invoke.assert_any_call('POST', expected_post_path, headers=None, payload=expected_post_request)
+    http_client_mock.invoke.assert_any_call('GET', expected_get_path, headers=None, payload=expected_get_request)
+       
+  def test_update_hdfs_site_config(self):
+    """
+    Update hdfs-site config
+    """
+    http_client_mock = MagicMock()
+    
+    expected_properties = {'dfs.namenode.avoid.write.stale.datanode': 'abc', 'dfs.webhdfs.enabled': 'true', 'dfs.block.access.token.enable': 'true', 'dfs.datanode.address': '0.0.0.0:50010', 'dfs.cluster.administrators': ' hdfs', 'dfs.datanode.balance.bandwidthPerSec': '6250000', 'dfs.namenode.safemode.threshold-pct': '1.0f', 'dfs.permissions.enabled': 'true', 'dfs.client.read.shortcircuit': 'true', 'dfs.journalnode.edits.dir': '/grid/0/hdfs/journal', 'dfs.blocksize': '134217728', 'dfs.datanode.max.transfer.threads': '1024', 'dfs.datanode.du.reserved': '1', 'dfs.replication': '3', 'dfs.namenode.handler.count': '100', 'fs.permissions.umask-mode': '022', 'dfs.datanode.http.address': '0.0.0.0:50075', 'dfs.datanode.ipc.address': '0.0.0.0:8010', 'dfs.datanode.data.dir': '/hadoop/hdfs/data', 'dfs.namenode.http-address': 'dev05.hortonworks.com:50070', 'dfs.blockreport.initialDelay': '120', 'dfs.datanode.failed.volumes.tolerated': '0', 'dfs.namenode.accesstime.precision': '0', 'dfs.block.loc
 al-path-access.user': 'hbase', 'dfs.https.namenode.https-address': 'dev05.hortonworks.com:50470', 'dfs.namenode.secondary.http-address': 'dev05.hortonworks.com:50090', 'dfs.namenode.stale.datanode.interval': '30000', 'dfs.heartbeat.interval': '3', 'dfs.client.read.shortcircuit.streams.cache.size': '4096', 'dfs.permissions.superusergroup': 'hdfs', 'dfs.journalnode.http-address': '0.0.0.0:8480', 'dfs.domain.socket.path': '/var/lib/hadoop-hdfs/dn_socket', 'dfs.namenode.avoid.read.stale.datanode': 'true', 'dfs.hosts.exclude': '/etc/hadoop/conf/dfs.exclude', 'dfs.datanode.data.dir.perm': '750', 'dfs.namenode.write.stale.datanode.ratio': '1.0f', 'dfs.replication.max': '50', 'dfs.namenode.name.dir': '/hadoop/hdfs/namenode'}
+    expected_post_path = '//clusters/test1/configurations'
+    expected_post_request = {'Clusters': {'desired_configs': {'tag': 'version1', 'type': 'hdfs-site', 'properties':expected_properties}}}   
+    expected_get_path = '//clusters/test1/configurations?type=hdfs-site&tag=version1'
+    expected_get_request = None
+        
+    cluster = self.create_cluster(http_client_mock)
+    existant_global_config = cluster.get_hdfs_site_config()
+    existant_global_config.properties['dfs.namenode.avoid.write.stale.datanode'] = 'abc'
+    cluster.update_hdfs_site_config(existant_global_config)
+    
+    self.assertEqual(cluster.cluster_name, "test1")
+    http_client_mock.invoke.assert_any_call('POST', expected_post_path, headers=None, payload=expected_post_request)
+    http_client_mock.invoke.assert_any_call('GET', expected_get_path, headers=None, payload=expected_get_request)\
+    
+  def test_update_mapred_site_config(self):
+    """
+    Update mapred-site config
+    """
+    http_client_mock = MagicMock()
+    
+    expected_properties = {'mapreduce.jobhistory.address': 'abc', 'mapreduce.reduce.input.buffer.percent': '0.0', 'mapred.jobtracker.maxtasks.per.job': '-1', 'mapreduce.framework.name': 'yarn', 'mapreduce.map.speculative': 'false', 'mapreduce.tasktracker.healthchecker.script.path': 'file:////mapred/jobstatus', 'mapreduce.reduce.shuffle.merge.percent': '0.66', 'mapred.userlog.retain.hours': '24', 'yarn.app.mapreduce.am.resource.mb': '1024', 'mapreduce.reduce.shuffle.parallelcopies': '30', 'mapreduce.map.java.opts': '-Xmx320m', 'mapreduce.task.io.sort.factor': '100', 'mapreduce.application.classpath': '$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*,$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*', 'yarn.app.mapreduce.am.command-opts': '-Xmx756m', 'mapreduce.job.reduce.slowstart.completedmaps': '0.05', 'mapreduce.output.fileoutputformat.compress.type': 'BLOCK', 'mapreduce.reduce.speculative': 'false', 'mapreduce.reduce.java.opts': '-Xmx756m', 'mapreduce.am.max-attempts': '2', 'yarn.app.
 mapreduce.am.admin-command-opts': '-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN', 'mapreduce.jobtracker.system.dir': '/mapred/system', 'mapreduce.map.sort.spill.percent': '0.1', 'mapreduce.task.timeout': '600000', 'mapreduce.map.memory.mb': '1536', 'mapreduce.reduce.log.level': 'INFO', 'mapreduce.jobhistory.intermediate-done-dir': '/mr-history/tmp', 'mapreduce.reduce.memory.mb': '2048', 'mapreduce.tasktracker.map.tasks.maximum': '4', 'yarn.app.mapreduce.am.log.level': 'INFO', 'mapreduce.map.log.level': 'INFO', 'mapreduce.shuffle.port': '13562', 'mapred.jobtracker.taskScheduler': 'org.apache.hadoop.mapred.CapacityTaskScheduler', 'mapreduce.admin.user.env': 'LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:/usr/lib/hadoop/lib/native/`$JAVA_HOME/bin/java -d32 -version &amp;&gt; /dev/null;if [ $? -eq 0 ]; then echo Linux-i386-32; else echo Linux-amd64-64;fi`', 'mapreduce.jobhistory.webapp.address': 'dev05.hortonworks.com:19888', 'mapred.hosts.exclude': '/etc/hadoop/conf/map
 red.exclude', 'mapreduce.reduce.shuffle.input.buffer.percent': '0.7', 'yarn.app.mapreduce.am.staging-dir': '/user', 'mapred.hosts': '/etc/hadoop/conf/mapred.include', 'mapreduce.jobhistory.done-dir': '/mr-history/done', 'mapreduce.admin.reduce.child.java.opts': '-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN', 'mapreduce.task.io.sort.mb': '200', 'mapred.task.tracker.task-controller': 'org.apache.hadoop.mapred.DefaultTaskController', 'mapreduce.admin.map.child.java.opts': '-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN'}
+    expected_post_path = '//clusters/test1/configurations'
+    expected_post_request = {'Clusters': {'desired_configs': {'tag': 'version1', 'type': 'mapred-site', 'properties':expected_properties}}}   
+    expected_get_path = '//clusters/test1/configurations?type=mapred-site&tag=version1'
+    expected_get_request = None
+        
+    cluster = self.create_cluster(http_client_mock)
+    existant_global_config = cluster.get_mapred_site_config()
+    existant_global_config.properties['mapreduce.jobhistory.address'] = 'abc'
+    cluster.update_mapred_site_config(existant_global_config)
+    
+    self.assertEqual(cluster.cluster_name, "test1")
+    http_client_mock.invoke.assert_any_call('POST', expected_post_path, headers=None, payload=expected_post_request)
+    http_client_mock.invoke.assert_any_call('GET', expected_get_path, headers=None, payload=expected_get_request)
+    
+  def test_create_services(self):
+    """
+    Create services
+    """   
+    http_client_mock = MagicMock()
+    
+    expected_path = '//clusters/test1/services'
+    expected_request = [{'ServiceInfo': {'service_name': 'HDFS'}}, {'ServiceInfo': {'service_name': 'YARN'}}, {'ServiceInfo': {'service_name': 'MAPREDUCEv2'}}, {'ServiceInfo': {'service_name': 'TEZ'}}]
+    
+    cluster = self.create_cluster(http_client_mock)
+    resp = cluster.create_services(['HDFS','YARN','MAPREDUCEv2','TEZ'])
+    
+    self.assertEqual(cluster.cluster_name, "test1")
+    http_client_mock.invoke.assert_called_with('POST', expected_path, headers=None, payload=expected_request)
+    
+  def test_create_service_components(self):
+    """
+    Create service components
+    """   
+    http_client_mock = MagicMock()
+    
+    expected_path = '//clusters/test1/services/?ServiceInfo/service_name=HDFS'
+    expected_request = {'components': [{'ServiceComponentInfo': {'component_name': u'NODEMANAGER'}}, {'ServiceComponentInfo': {'component_name': u'RESOURCEMANAGER'}}, {'ServiceComponentInfo': {'component_name': u'YARN_CLIENT'}}]}
+    
+    cluster = self.create_cluster(http_client_mock)
+    resp = cluster.create_service_components("2.0.5", "HDFS")
+    
+    self.assertEqual(cluster.cluster_name, "test1")
+    http_client_mock.invoke.assert_called_with('POST', expected_path, headers=None, payload=expected_request)
+    
+  def test_create_service_component(self):
+    """
+    Create service component
+    """   
+    http_client_mock = MagicMock()
+    
+    expected_path = '//clusters/test1/services/HDFS/components/NAMENODE'
+    
+    cluster = self.create_cluster(http_client_mock)
+    resp = cluster.create_service_component("2.0.5", "HDFS","NAMENODE")
+    
+    self.assertEqual(cluster.cluster_name, "test1")
+    http_client_mock.invoke.assert_called_with('POST', expected_path, headers=None, payload=None)
+    
+  def test_create_hosts(self):
+    """
+    Create cluster hosts
+    """   
+    http_client_mock = MagicMock()
+    
+    expected_path = '//clusters/test1/hosts'
+    expected_request = [{'Hosts': {'ip': '1.2.3.4', 'host_name': 'hostname01', 'rack_info': '/default-rack'}}, {'Hosts': {'ip': '2.3.1.22', 'host_name': 'hostname02', 'rack_info': 'rack'}}]
+        
+    cluster = self.create_cluster(http_client_mock)
+    host_list = [HostModel(None, 'hostname01','1.2.3.4'), HostModel(None, 'hostname02','2.3.1.22','rack')]
+    resp = cluster.create_hosts(host_list)
+    
+    self.assertEqual(cluster.cluster_name, "test1")
+    http_client_mock.invoke.assert_called_with('POST', expected_path, headers=None, payload=expected_request)
+    
+  def test_create_host(self):
+    """
+    Create cluster host
+    """   
+    http_client_mock = MagicMock()
+    
+    expected_path = '//clusters/test1/hosts'
+    expected_request = [{'Hosts': {'ip': '1.2.3.4', 'host_name': 'hostname01', 'rack_info': '/default-rack'}}]
+            
+    cluster = self.create_cluster(http_client_mock)
+    resp = cluster.create_host('hostname01','1.2.3.4')
+    
+    self.assertEqual(cluster.cluster_name, "test1")
+    http_client_mock.invoke.assert_called_with('POST', expected_path, headers=None, payload=expected_request)
+    
+    
+  def test_delete_host(self):
+    """
+    Delete cluster host
+    """   
+    http_client_mock = MagicMock()
+    
+    expected_path = '//clusters/test1/hosts/hostname01'
+            
+    cluster = self.create_cluster(http_client_mock)
+    resp = cluster.delete_host('hostname01')
+    
+    self.assertEqual(cluster.cluster_name, "test1")
+    http_client_mock.invoke.assert_called_with('DELETE', expected_path, headers=None, payload=None)
+    
+  def test_start_all_services(self):
+    """
+    Start all services
+    """   
+    http_client_mock = MagicMock()
+    
+    expected_path = '//clusters/test1/services?ServiceInfo/state=INSTALLED&params/run_smoke_test=true&params/reconfigure_client=false'
+    expected_request = {'RequestInfo': {'context': 'Start All Services'}, 'Body': {'ServiceInfo': {'state': 'STARTED'}}}
+            
+    cluster = self.create_cluster(http_client_mock)
+    resp = cluster.start_all_services(True)
+    
+    self.assertEqual(cluster.cluster_name, "test1")
+    http_client_mock.invoke.assert_called_with('PUT', expected_path, headers=None, payload=expected_request)
+    
+  def test_stop_all_services(self):
+    """
+    Stop all services
+    """   
+    http_client_mock = MagicMock()
+    
+    expected_path = '//clusters/test1/services?ServiceInfo'
+    expected_request = {'RequestInfo': {'context': 'Stop All Services'}, 'Body': {'ServiceInfo': {'state': 'INSTALLED'}}}
+            
+    cluster = self.create_cluster(http_client_mock)
+    resp = cluster.stop_all_services()
+    
+    self.assertEqual(cluster.cluster_name, "test1")
+    http_client_mock.invoke.assert_called_with('PUT', expected_path, headers=None, payload=expected_request)
+    
+    
+  def test_install_all_services(self):
+    """
+    Install all services
+    """   
+    http_client_mock = MagicMock()
+    
+    expected_path = '//clusters/test1/services?ServiceInfo/state=INSTALLED'
+    expected_request = {'RequestInfo': {'context': 'Install Services'}, 'Body': {'ServiceInfo': {'state': 'INSTALLED'}}}
+            
+    cluster = self.create_cluster(http_client_mock)
+    resp = cluster.install_all_services()
+    
+    self.assertEqual(cluster.cluster_name, "test1")
+    http_client_mock.invoke.assert_called_with('PUT', expected_path, headers=None, payload=expected_request)
+    
+  def test_add_config(self):
+    """
+    Set desired configurations
+    """   
+    http_client_mock = MagicMock()
+    
+    propr_dict = {"dfs_name_dir":"/data/1/hadoop/hdfs/namenode,/data/2/hadoop/hdfs/namenode,/data/3/hadoop/hdfs/namenode,/data/4/hadoop/hdfs/namenode,/data/5/hadoop/hdfs/namenode,/data/6/hadoop/hdfs/namenode,/data/7/hadoop/hdfs/namenode,/data/8/hadoop/hdfs/namenode", "namenode_heapsize":"1024m", "namenode_opt_newsize":"200m", "fs_checkpoint_dir":"/data/1/hadoop/hdfs/namesecondary", "dfs_data_dir":"/data/1/hadoop/hdfs/data,/data/2/hadoop/hdfs/data,/data/3/hadoop/hdfs/data,/data/4/hadoop/hdfs/data,/data/5/hadoop/hdfs/data,/data/6/hadoop/hdfs/data,/data/7/hadoop/hdfs/data,/data/8/hadoop/hdfs/data,/data/9/hadoop/hdfs/data,/data/10/hadoop/hdfs/data", "dtnode_heapsize":"1024m", "dfs_datanode_failed_volume_tolerated":"0", "dfs_webhdfs_enabled":"true", "hadoop_heapsize":"1024", "datanode_du_reserved":"0", "fs_checkpoint_period":"21600", "fs_checkpoint_size":"67108864", "hdfs_log_dir_prefix":"/var/log/hadoop", "hadoop_pid_dir_prefix":"/var/run/hadoop", "namenode_opt_maxnewsize":"200m", "dfs_
 exclude":"dfs.exclude", "dfs_include":"dfs.include", "dfs_replication":"3", "dfs_block_local_path_access_user":"hbase", "dfs_datanode_data_dir_perm":"750", "security_enabled":"false", "namenode_formatted_mark_dir":"/var/run/hadoop/hdfs/namenode/formatted/", "hcat_conf_dir":"", "jtnode_opt_newsize":"200m", "jtnode_opt_maxnewsize":"200m", "jtnode_heapsize":"1024m", "mapred_local_dir":"/data/1/hadoop/mapred,/data/2/hadoop/mapred,/data/3/hadoop/mapred,/data/4/hadoop/mapred,/data/5/hadoop/mapred,/data/6/hadoop/mapred,/data/7/hadoop/mapred,/data/8/hadoop/mapred,/data/9/hadoop/mapred,/data/10/hadoop/mapred", "mapred_map_tasks_max":"4", "mapred_red_tasks_max":"2", "mapred_child_java_opts_sz":"768", "scheduler_name":"org.apache.hadoop.mapred.CapacityTaskScheduler", "mapred_cluster_map_mem_mb":"1536", "mapred_cluster_red_mem_mb":"2048", "mapred_cluster_max_map_mem_mb":"6144", "mapred_cluster_max_red_mem_mb":"4096", "mapred_job_map_mem_mb":"1536", "mapred_job_red_mem_mb":"2048", "io_sort_mb":"
 200", "io_sort_spill_percent":"0.9", "mapreduce_userlog_retainhours":"24", "maxtasks_per_job":"-1", "lzo_enabled":"true", "snappy_enabled":"true", "rca_enabled":"true", "mapred_system_dir":"/mapred/system", "mapred_hosts_exclude":"mapred.exclude", "mapred_hosts_include":"mapred.include", "mapred_jobstatus_dir":"file:////mapred/jobstatus", "nagios_web_login":"nagiosadmin", "nagios_web_password":"admin", "nagios_contact":"admin@admin.com", "nagios_group":"nagios", "hbase_conf_dir":"/etc/hbase", "proxyuser_group":"users", "dfs_datanode_address":"50010", "dfs_datanode_http_address":"50075", "gpl_artifacts_download_url":"", "apache_artifacts_download_url":"", "ganglia_runtime_dir":"/var/run/ganglia/hdp", "java64_home":"/usr/jdk/jdk1.6.0_31", "run_dir":"/var/run/hadoop", "hadoop_conf_dir":"/etc/hadoop", "hdfs_user":"hdfs", "mapred_user":"mapred", "hbase_user":"hbase", "hive_user":"hive", "hcat_user":"hcat", "webhcat_user":"hcat", "oozie_user":"oozie", "zk_user":"zookeeper", "gmetad_user":
 "nobody", "gmond_user":"nobody", "nagios_user":"nagios", "smokeuser":"ambari-qa", "user_group":"hadoop", "rrdcached_base_dir":"/var/lib/ganglia/rrds"} 
+    expected_path = '//clusters/test1'
+    expected_request = {'Clusters': {'desired_configs': {'tag':'version1', 'type':'global', 'properties':propr_dict}}}
+                
+    cluster = self.create_cluster(http_client_mock)
+    resp = cluster.add_config("global","version1",propr_dict)
+    
+    self.assertEqual(cluster.cluster_name, "test1")
+    http_client_mock.invoke.assert_called_with('PUT', expected_path, headers=None, payload=expected_request)
+    
+  def test_create_config(self):
+    """
+    Add a configuration
+    """   
+    http_client_mock = MagicMock()
+    
+    propr_dict = {"dfs_name_dir":"/data/1/hadoop/hdfs/namenode,/data/2/hadoop/hdfs/namenode,/data/3/hadoop/hdfs/namenode,/data/4/hadoop/hdfs/namenode,/data/5/hadoop/hdfs/namenode,/data/6/hadoop/hdfs/namenode,/data/7/hadoop/hdfs/namenode,/data/8/hadoop/hdfs/namenode", "namenode_heapsize":"1024m", "namenode_opt_newsize":"200m", "fs_checkpoint_dir":"/data/1/hadoop/hdfs/namesecondary", "dfs_data_dir":"/data/1/hadoop/hdfs/data,/data/2/hadoop/hdfs/data,/data/3/hadoop/hdfs/data,/data/4/hadoop/hdfs/data,/data/5/hadoop/hdfs/data,/data/6/hadoop/hdfs/data,/data/7/hadoop/hdfs/data,/data/8/hadoop/hdfs/data,/data/9/hadoop/hdfs/data,/data/10/hadoop/hdfs/data", "dtnode_heapsize":"1024m", "dfs_datanode_failed_volume_tolerated":"0", "dfs_webhdfs_enabled":"true", "hadoop_heapsize":"1024", "datanode_du_reserved":"0", "fs_checkpoint_period":"21600", "fs_checkpoint_size":"67108864", "hdfs_log_dir_prefix":"/var/log/hadoop", "hadoop_pid_dir_prefix":"/var/run/hadoop", "namenode_opt_maxnewsize":"200m", "dfs_
 exclude":"dfs.exclude", "dfs_include":"dfs.include", "dfs_replication":"3", "dfs_block_local_path_access_user":"hbase", "dfs_datanode_data_dir_perm":"750", "security_enabled":"false", "namenode_formatted_mark_dir":"/var/run/hadoop/hdfs/namenode/formatted/", "hcat_conf_dir":"", "jtnode_opt_newsize":"200m", "jtnode_opt_maxnewsize":"200m", "jtnode_heapsize":"1024m", "mapred_local_dir":"/data/1/hadoop/mapred,/data/2/hadoop/mapred,/data/3/hadoop/mapred,/data/4/hadoop/mapred,/data/5/hadoop/mapred,/data/6/hadoop/mapred,/data/7/hadoop/mapred,/data/8/hadoop/mapred,/data/9/hadoop/mapred,/data/10/hadoop/mapred", "mapred_map_tasks_max":"4", "mapred_red_tasks_max":"2", "mapred_child_java_opts_sz":"768", "scheduler_name":"org.apache.hadoop.mapred.CapacityTaskScheduler", "mapred_cluster_map_mem_mb":"1536", "mapred_cluster_red_mem_mb":"2048", "mapred_cluster_max_map_mem_mb":"6144", "mapred_cluster_max_red_mem_mb":"4096", "mapred_job_map_mem_mb":"1536", "mapred_job_red_mem_mb":"2048", "io_sort_mb":"
 200", "io_sort_spill_percent":"0.9", "mapreduce_userlog_retainhours":"24", "maxtasks_per_job":"-1", "lzo_enabled":"true", "snappy_enabled":"true", "rca_enabled":"true", "mapred_system_dir":"/mapred/system", "mapred_hosts_exclude":"mapred.exclude", "mapred_hosts_include":"mapred.include", "mapred_jobstatus_dir":"file:////mapred/jobstatus", "nagios_web_login":"nagiosadmin", "nagios_web_password":"admin", "nagios_contact":"admin@admin.com", "nagios_group":"nagios", "hbase_conf_dir":"/etc/hbase", "proxyuser_group":"users", "dfs_datanode_address":"50010", "dfs_datanode_http_address":"50075", "gpl_artifacts_download_url":"", "apache_artifacts_download_url":"", "ganglia_runtime_dir":"/var/run/ganglia/hdp", "java64_home":"/usr/jdk/jdk1.6.0_31", "run_dir":"/var/run/hadoop", "hadoop_conf_dir":"/etc/hadoop", "hdfs_user":"hdfs", "mapred_user":"mapred", "hbase_user":"hbase", "hive_user":"hive", "hcat_user":"hcat", "webhcat_user":"hcat", "oozie_user":"oozie", "zk_user":"zookeeper", "gmetad_user":
 "nobody", "gmond_user":"nobody", "nagios_user":"nagios", "smokeuser":"ambari-qa", "user_group":"hadoop", "rrdcached_base_dir":"/var/lib/ganglia/rrds"} 
+    expected_path = '//clusters/test1'
+    expected_request = {'tag':'version1', 'type':'global', 'properties':propr_dict}
+                
+    cluster = self.create_cluster(http_client_mock)
+    resp = cluster.create_config("global","version1",propr_dict)
+    
+    self.assertEqual(cluster.cluster_name, "test1")
+    http_client_mock.invoke.assert_called_with('PUT', expected_path, headers=None, payload=expected_request)
+    
+    

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a0386357/ambari-client/src/test/python/json/clustermodel_get_all_hosts.json
----------------------------------------------------------------------
diff --git a/ambari-client/src/test/python/json/clustermodel_get_all_hosts.json b/ambari-client/src/test/python/json/clustermodel_get_all_hosts.json
new file mode 100644
index 0000000..614f8d4
--- /dev/null
+++ b/ambari-client/src/test/python/json/clustermodel_get_all_hosts.json
@@ -0,0 +1,375 @@
+{
+  "href" : "http://localhost:8080/api/v1/clusters/test1/hosts?fields=*",
+  "items" : [
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/hosts/dev05.hortonworks.com",
+      "Hosts" : {
+        "cluster_name" : "test1",
+        "cpu_count" : 4,
+        "disk_info" : [
+          {
+            "available" : "43515968",
+            "used" : "7566036",
+            "percent" : "15%",
+            "size" : "51606140",
+            "type" : "ext4",
+            "mountpoint" : "/"
+          },
+          {
+            "available" : "5517988",
+            "used" : "260",
+            "percent" : "1%",
+            "size" : "5518248",
+            "type" : "tmpfs",
+            "mountpoint" : "/dev/shm"
+          },
+          {
+            "available" : "432210",
+            "used" : "38034",
+            "percent" : "9%",
+            "size" : "495844",
+            "type" : "ext4",
+            "mountpoint" : "/boot"
+          },
+          {
+            "available" : "44459872",
+            "used" : "184220",
+            "percent" : "1%",
+            "size" : "47033288",
+            "type" : "ext4",
+            "mountpoint" : "/home"
+          },
+          {
+            "available" : "57923596",
+            "used" : "918733808",
+            "percent" : "95%",
+            "size" : "976657404",
+            "type" : "vboxsf",
+            "mountpoint" : "/media/sf_share"
+          }
+        ],
+        "host_health_report" : "",
+        "host_name" : "dev05.hortonworks.com",
+        "host_state" : "HEALTHY",
+        "host_status" : "HEALTHY",
+        "ip" : "10.0.2.15",
+        "last_agent_env" : {
+          "stackFoldersAndFiles" : [
+            {
+              "name" : "/etc/hadoop",
+              "type" : "directory"
+            },
+            {
+              "name" : "/etc/hbase",
+              "type" : "directory"
+            },
+            {
+              "name" : "/etc/hcatalog",
+              "type" : "directory"
+            },
+            {
+              "name" : "/etc/hive",
+              "type" : "directory"
+            },
+            {
+              "name" : "/etc/ganglia",
+              "type" : "directory"
+            },
+            {
+              "name" : "/etc/nagios",
+              "type" : "directory"
+            },
+            {
+              "name" : "/etc/oozie",
+              "type" : "directory"
+            },
+            {
+              "name" : "/etc/zookeeper",
+              "type" : "directory"
+            },
+            {
+              "name" : "/var/run/hive",
+              "type" : "directory"
+            },
+            {
+              "name" : "/var/run/hadoop-hdfs",
+              "type" : "directory"
+            },
+            {
+              "name" : "/var/run/hadoop-yarn",
+              "type" : "directory"
+            },
+            {
+              "name" : "/var/run/hadoop-mapreduce",
+              "type" : "directory"
+            },
+            {
+              "name" : "/var/log/hbase",
+              "type" : "directory"
+            },
+            {
+              "name" : "/var/log/hive",
+              "type" : "directory"
+            },
+            {
+              "name" : "/var/log/oozie",
+              "type" : "directory"
+            },
+            {
+              "name" : "/var/log/zookeeper",
+              "type" : "directory"
+            },
+            {
+              "name" : "/var/log/hadoop-hdfs",
+              "type" : "directory"
+            },
+            {
+              "name" : "/var/log/hadoop-yarn",
+              "type" : "directory"
+            },
+            {
+              "name" : "/var/log/hadoop-mapreduce",
+              "type" : "directory"
+            },
+            {
+              "name" : "/usr/lib/hadoop",
+              "type" : "directory"
+            },
+            {
+              "name" : "/usr/lib/hbase",
+              "type" : "directory"
+            },
+            {
+              "name" : "/usr/lib/hive",
+              "type" : "directory"
+            },
+            {
+              "name" : "/usr/lib/nagios",
+              "type" : "directory"
+            },
+            {
+              "name" : "/usr/lib/oozie",
+              "type" : "directory"
+            },
+            {
+              "name" : "/usr/lib/zookeeper",
+              "type" : "directory"
+            },
+            {
+              "name" : "/usr/lib/hadoop-hdfs",
+              "type" : "directory"
+            },
+            {
+              "name" : "/usr/lib/hadoop-yarn",
+              "type" : "directory"
+            },
+            {
+              "name" : "/usr/lib/hadoop-mapreduce",
+              "type" : "directory"
+            },
+            {
+              "name" : "/var/lib/ganglia",
+              "type" : "directory"
+            },
+            {
+              "name" : "/var/lib/oozie",
+              "type" : "directory"
+            },
+            {
+              "name" : "/var/lib/zookeeper",
+              "type" : "directory"
+            }
+          ],
+          "rpms" : [ ],
+          "alternatives" : [
+            {
+              "name" : "hcatalog-conf",
+              "target" : "/etc/hcatalog/conf.dist"
+            },
+            {
+              "name" : "zookeeper-conf",
+              "target" : "/etc/zookeeper/conf.dist"
+            },
+            {
+              "name" : "hadoop-conf",
+              "target" : "/etc/hadoop/conf.empty"
+            },
+            {
+              "name" : "hbase-conf",
+              "target" : "/etc/hbase/conf.dist"
+            },
+            {
+              "name" : "hive-conf",
+              "target" : "/etc/hive/conf.dist"
+            },
+            {
+              "name" : "oozie-conf",
+              "target" : "/etc/oozie/conf.dist"
+            }
+          ],
+          "existingUsers" : [
+            {
+              "userName" : "rrdcached",
+              "userHomeDir" : "/var/rrdtool/rrdcached",
+              "userStatus" : "Available"
+            },
+            {
+              "userName" : "zookeeper",
+              "userHomeDir" : "/var/run/zookeeper",
+              "userStatus" : "Available"
+            },
+            {
+              "userName" : "hdfs",
+              "userHomeDir" : "/var/lib/hadoop-hdfs",
+              "userStatus" : "Available"
+            },
+            {
+              "userName" : "hbase",
+              "userHomeDir" : "/var/run/hbase",
+              "userStatus" : "Available"
+            },
+            {
+              "userName" : "yarn",
+              "userHomeDir" : "/var/lib/hadoop-yarn",
+              "userStatus" : "Available"
+            },
+            {
+              "userName" : "mapred",
+              "userHomeDir" : "/var/lib/hadoop-mapreduce",
+              "userStatus" : "Available"
+            },
+            {
+              "userName" : "hive",
+              "userHomeDir" : "/var/lib/hive",
+              "userStatus" : "Available"
+            },
+            {
+              "userName" : "hcat",
+              "userHomeDir" : "/usr/lib/hcatalog",
+              "userStatus" : "Available"
+            },
+            {
+              "userName" : "oozie",
+              "userHomeDir" : "/var/run/oozie",
+              "userStatus" : "Available"
+            },
+            {
+              "userName" : "nagios",
+              "userHomeDir" : "/var/log/nagios",
+              "userStatus" : "Available"
+            }
+          ],
+          "existingRepos" : [
+            "HDP-2.0.5"
+          ],
+          "installedPackages" : [ ],
+          "hostHealth" : {
+            "activeJavaProcs" : [
+              {
+                "user" : "root",
+                "pid" : 2716,
+                "command" : "/usr/bin/java -Dosgi.requiredJavaVersion=1.6 -XX:MaxPermSize=256m -Xms40m -Xmx512m -jar /usr/lib/eclipse//plugins/org.eclipse.equinox.launcher_1.3.0.v20130327-1440.jar -os linux -ws gtk -arch x86_64 -showsplash /usr/lib/eclipse//plugins/org.eclipse.platform_4.3.0.v20130605-2000/splash.bmp -launcher /usr/lib/eclipse/eclipse -name Eclipse --launcher.library /usr/lib/eclipse//plugins/org.eclipse.equinox.launcher.gtk.linux.x86_64_1.1.200.v20130521-0416/eclipse_1506.so -startup /usr/lib/eclipse//plugins/org.eclipse.equinox.launcher_1.3.0.v20130327-1440.jar --launcher.appendVmargs -exitdata 8000d -product org.eclipse.epp.package.jee.product -vm /usr/bin/java -vmargs -Dosgi.requiredJavaVersion=1.6 -XX:MaxPermSize=256m -Xms40m -Xmx512m -jar /usr/lib/eclipse//plugins/org.eclipse.equinox.launcher_1.3.0.v20130327-1440.jar",
+                "hadoop" : false
+              }
+            ],
+            "agentTimeStampAtReporting" : 1379683509805,
+            "serverTimeStampAtReporting" : 1379683509851,
+            "liveServices" : [
+              {
+                "name" : "ntpd",
+                "desc" : "",
+                "status" : "Healthy"
+              }
+            ],
+            "diskStatus" : [
+              {
+                "available" : "43516404",
+                "used" : "7565600",
+                "percent" : "15%",
+                "size" : "51606140",
+                "type" : "ext4",
+                "mountpoint" : "/"
+              }
+            ]
+          },
+          "umask" : 18
+        },
+        "last_heartbeat_time" : 1379683560126,
+        "last_registration_time" : 1379683395402,
+        "os_arch" : "x86_64",
+        "os_type" : "centos6",
+        "ph_cpu_count" : 1,
+        "public_host_name" : "dev05.hortonworks.com",
+        "rack_info" : "/default-rack",
+        "total_mem" : 11041505,
+        "desired_configs" : { }
+      },
+      "host_components" : [ ]
+    },
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/hosts/dev06.hortonworks.com",
+      "Hosts" : {
+        "cluster_name" : "test1",
+        "cpu_count" : 4,
+        "disk_info" : [
+          {
+            "available" : "45333752",
+            "used" : "5748252",
+            "percent" : "12%",
+            "size" : "51606140",
+            "type" : "ext4",
+            "mountpoint" : "/"
+          },
+          {
+            "available" : "5517976",
+            "used" : "272",
+            "percent" : "1%",
+            "size" : "5518248",
+            "type" : "tmpfs",
+            "mountpoint" : "/dev/shm"
+          },
+          {
+            "available" : "432210",
+            "used" : "38034",
+            "percent" : "9%",
+            "size" : "495844",
+            "type" : "ext4",
+            "mountpoint" : "/boot"
+          },
+          {
+            "available" : "44459840",
+            "used" : "184252",
+            "percent" : "1%",
+            "size" : "47033288",
+            "type" : "ext4",
+            "mountpoint" : "/home"
+          },
+          {
+            "available" : "136400692",
+            "used" : "840256712",
+            "percent" : "87%",
+            "size" : "976657404",
+            "type" : "vboxsf",
+            "mountpoint" : "/media/sf_share"
+          }
+        ],
+        "host_health_report" : "",
+        "host_name" : "dev06.hortonworks.com",
+        "host_state" : "HEARTBEAT_LOST",
+        "host_status" : "UNKNOWN",
+        "ip" : "10.0.2.15",
+        "last_agent_env" : null,
+        "last_heartbeat_time" : 0,
+        "last_registration_time" : 1378228232506,
+        "os_arch" : "x86_64",
+        "os_type" : "centos6",
+        "ph_cpu_count" : 1,
+        "public_host_name" : "dev06.hortonworks.com",
+        "rack_info" : "/default-rack",
+        "total_mem" : 11041505,
+        "desired_configs" : { }
+      },
+      "host_components" : [ ]
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a0386357/ambari-client/src/test/python/json/clustermodel_get_all_services.json
----------------------------------------------------------------------
diff --git a/ambari-client/src/test/python/json/clustermodel_get_all_services.json b/ambari-client/src/test/python/json/clustermodel_get_all_services.json
new file mode 100644
index 0000000..53bb2da
--- /dev/null
+++ b/ambari-client/src/test/python/json/clustermodel_get_all_services.json
@@ -0,0 +1,120 @@
+{
+  "href" : "http://localhost:8080/api/v1/clusters/test1/services?fields=*",
+  "items" : [
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/services/GANGLIA",
+      "ServiceInfo" : {
+        "cluster_name" : "test1",
+        "state" : "STARTED",
+        "service_name" : "GANGLIA",
+        "desired_configs" : {
+          "global" : "version1"
+        }
+      },
+      "components" : [
+        {
+          "href" : "http://localhost:8080/api/v1/clusters/test1/services/GANGLIA/components/GANGLIA_MONITOR",
+          "ServiceComponentInfo" : {
+            "cluster_name" : "test1",
+            "component_name" : "GANGLIA_MONITOR",
+            "service_name" : "GANGLIA"
+          }
+        },
+        {
+          "href" : "http://localhost:8080/api/v1/clusters/test1/services/GANGLIA/components/GANGLIA_SERVER",
+          "ServiceComponentInfo" : {
+            "cluster_name" : "test1",
+            "component_name" : "GANGLIA_SERVER",
+            "service_name" : "GANGLIA"
+          }
+        }
+      ]
+    },
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/services/MAPREDUCE",
+      "ServiceInfo" : {
+        "cluster_name" : "test1",
+        "state" : "STARTED",
+        "service_name" : "MAPREDUCE",
+        "desired_configs" : {
+          "mapred-site" : "version1",
+          "global" : "version1",
+          "core-site" : "version1"
+        }
+      },
+      "components" : [
+        {
+          "href" : "http://localhost:8080/api/v1/clusters/test1/services/MAPREDUCE/components/TASKTRACKER",
+          "ServiceComponentInfo" : {
+            "cluster_name" : "test1",
+            "component_name" : "TASKTRACKER",
+            "service_name" : "MAPREDUCE"
+          }
+        },
+        {
+          "href" : "http://localhost:8080/api/v1/clusters/test1/services/MAPREDUCE/components/MAPREDUCE_CLIENT",
+          "ServiceComponentInfo" : {
+            "cluster_name" : "test1",
+            "component_name" : "MAPREDUCE_CLIENT",
+            "service_name" : "MAPREDUCE"
+          }
+        },
+        {
+          "href" : "http://localhost:8080/api/v1/clusters/test1/services/MAPREDUCE/components/JOBTRACKER",
+          "ServiceComponentInfo" : {
+            "cluster_name" : "test1",
+            "component_name" : "JOBTRACKER",
+            "service_name" : "MAPREDUCE"
+          }
+        }
+      ]
+    },
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/services/HDFS",
+      "ServiceInfo" : {
+        "cluster_name" : "test1",
+        "state" : "STARTED",
+        "service_name" : "HDFS",
+        "desired_configs" : {
+          "global" : "version1",
+          "hdfs-site" : "version1",
+          "core-site" : "version1"
+        }
+      },
+      "components" : [
+        {
+          "href" : "http://localhost:8080/api/v1/clusters/test1/services/HDFS/components/SECONDARY_NAMENODE",
+          "ServiceComponentInfo" : {
+            "cluster_name" : "test1",
+            "component_name" : "SECONDARY_NAMENODE",
+            "service_name" : "HDFS"
+          }
+        },
+        {
+          "href" : "http://localhost:8080/api/v1/clusters/test1/services/HDFS/components/HDFS_CLIENT",
+          "ServiceComponentInfo" : {
+            "cluster_name" : "test1",
+            "component_name" : "HDFS_CLIENT",
+            "service_name" : "HDFS"
+          }
+        },
+        {
+          "href" : "http://localhost:8080/api/v1/clusters/test1/services/HDFS/components/NAMENODE",
+          "ServiceComponentInfo" : {
+            "cluster_name" : "test1",
+            "component_name" : "NAMENODE",
+            "service_name" : "HDFS"
+          }
+        },
+        {
+          "href" : "http://localhost:8080/api/v1/clusters/test1/services/HDFS/components/DATANODE",
+          "ServiceComponentInfo" : {
+            "cluster_name" : "test1",
+            "component_name" : "DATANODE",
+            "service_name" : "HDFS"
+          }
+        }
+      ]
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a0386357/ambari-client/src/test/python/json/clustermodel_get_cluster.json
----------------------------------------------------------------------
diff --git a/ambari-client/src/test/python/json/clustermodel_get_cluster.json b/ambari-client/src/test/python/json/clustermodel_get_cluster.json
new file mode 100644
index 0000000..4c72dea
--- /dev/null
+++ b/ambari-client/src/test/python/json/clustermodel_get_cluster.json
@@ -0,0 +1,103 @@
+{
+  "href" : "http://localhost:8080/api/v1/clusters/test1",
+  "Clusters" : {
+    "cluster_name" : "test1",
+    "cluster_id" : 1,
+    "version" : "HDP-1.2.1"
+  },
+  "requests" : [ ],
+  "services" : [
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/services/GANGLIA",
+      "ServiceInfo" : {
+        "cluster_name" : "test1",
+        "service_name" : "GANGLIA"
+      }
+    },
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/services/MAPREDUCE",
+      "ServiceInfo" : {
+        "cluster_name" : "test1",
+        "service_name" : "MAPREDUCE"
+      }
+    },
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/services/HDFS",
+      "ServiceInfo" : {
+        "cluster_name" : "test1",
+        "service_name" : "HDFS"
+      }
+    }
+  ],
+  "hosts" : [
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/hosts/r01mgt",
+      "Hosts" : {
+        "cluster_name" : "test1",
+        "host_name" : "r01mgt"
+      }
+    },
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/hosts/r01hn01",
+      "Hosts" : {
+        "cluster_name" : "test1",
+        "host_name" : "r01hn01"
+      }
+    },
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/hosts/r01wn03",
+      "Hosts" : {
+        "cluster_name" : "test1",
+        "host_name" : "r01wn03"
+      }
+    },
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/hosts/r01wn02",
+      "Hosts" : {
+        "cluster_name" : "test1",
+        "host_name" : "r01wn02"
+      }
+    },
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/hosts/r01wn01",
+      "Hosts" : {
+        "cluster_name" : "test1",
+        "host_name" : "r01wn01"
+      }
+    }
+  ],
+  "configurations" : [
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/configurations?type=mapred-site&tag=version1",
+      "tag" : "version1",
+      "type" : "mapred-site",
+      "Config" : {
+        "cluster_name" : "test1"
+      }
+    },
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/configurations?type=global&tag=version1",
+      "tag" : "version1",
+      "type" : "global",
+      "Config" : {
+        "cluster_name" : "test1"
+      }
+    },
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/configurations?type=hdfs-site&tag=version1",
+      "tag" : "version1",
+      "type" : "hdfs-site",
+      "Config" : {
+        "cluster_name" : "test1"
+      }
+    },
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/configurations?type=core-site&tag=version1",
+      "tag" : "version1",
+      "type" : "core-site",
+      "Config" : {
+        "cluster_name" : "test1"
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a0386357/ambari-client/src/test/python/json/clustermodel_get_core_site_config.json
----------------------------------------------------------------------
diff --git a/ambari-client/src/test/python/json/clustermodel_get_core_site_config.json b/ambari-client/src/test/python/json/clustermodel_get_core_site_config.json
new file mode 100644
index 0000000..309ba13
--- /dev/null
+++ b/ambari-client/src/test/python/json/clustermodel_get_core_site_config.json
@@ -0,0 +1,32 @@
+{
+  "href" : "http://localhost:8080/api/v1/clusters/test1/configurations?type=core-site&tag=version1",
+  "items" : [
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/configurations?type=core-site&tag=version1",
+      "tag" : "version1",
+      "type" : "core-site",
+      "Config" : {
+        "cluster_name" : "test1"
+      },
+      "properties" : {
+        "dfs.namenode.checkpoint.dir" : "/hadoop/hdfs/namesecondary",
+        "dfs.namenode.checkpoint.edits.dir" : "${dfs.namenode.checkpoint.dir}",
+        "dfs.namenode.checkpoint.period" : "21600",
+        "fs.checkpoint.edits.dir" : "/hadoop/hdfs/namesecondary",
+        "fs.checkpoint.size" : "0.5",
+        "fs.defaultFS" : "hdfs://dev05.hortonworks.com:8020",
+        "fs.trash.interval" : "360",
+        "hadoop.security.auth_to_local" : "\n        RULE:[2:$1@$0]([rn]m@.*)s/.*/yarn/\n        RULE:[2:$1@$0](jhs@.*)s/.*/mapred/\n        RULE:[2:$1@$0]([nd]n@.*)s/.*/hdfs/\n        RULE:[2:$1@$0](hm@.*)s/.*/hbase/\n        RULE:[2:$1@$0](rs@.*)s/.*/hbase/\n        DEFAULT\n    ",
+        "hadoop.security.authentication" : "simple",
+        "hadoop.security.authorization" : "false",
+        "io.compression.codecs" : "org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec",
+        "io.file.buffer.size" : "131072",
+        "io.serializations" : "org.apache.hadoop.io.serializer.WritableSerialization",
+        "ipc.client.connect.max.retries" : "50",
+        "ipc.client.connection.maxidletime" : "30000",
+        "ipc.client.idlethreshold" : "8000",
+        "mapreduce.jobtracker.webinterface.trusted" : "false"
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a0386357/ambari-client/src/test/python/json/clustermodel_get_global_config.json
----------------------------------------------------------------------
diff --git a/ambari-client/src/test/python/json/clustermodel_get_global_config.json b/ambari-client/src/test/python/json/clustermodel_get_global_config.json
new file mode 100644
index 0000000..2b7888d
--- /dev/null
+++ b/ambari-client/src/test/python/json/clustermodel_get_global_config.json
@@ -0,0 +1,64 @@
+{
+  "href" : "http://localhost:8080/api/v1/clusters/test1/configurations?type=global&tag=version1",
+  "items" : [
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/configurations?type=global&tag=version1",
+      "tag" : "version1",
+      "type" : "global",
+      "Config" : {
+        "cluster_name" : "test1"
+      },
+      "properties" : {
+        "apache_artifacts_download_url" : "",
+        "datanode_du_reserved" : "1",
+        "dfs_block_local_path_access_user" : "hbase",
+        "dfs_datanode_address" : "50010",
+        "dfs_datanode_data_dir" : "/hadoop/hdfs/data",
+        "dfs_datanode_data_dir_perm" : "750",
+        "dfs_datanode_failed_volume_tolerated" : "0",
+        "dfs_datanode_http_address" : "50075",
+        "dfs_exclude" : "dfs.exclude",
+        "dfs_namenode_checkpoint_dir" : "/hadoop/hdfs/namesecondary",
+        "dfs_namenode_checkpoint_period" : "21600",
+        "dfs_namenode_name_dir" : "/hadoop/hdfs/namenode",
+        "dfs_replication" : "3",
+        "dfs_webhdfs_enabled" : "true",
+        "dtnode_heapsize" : "1024m",
+        "fs_checkpoint_size" : "0.5",
+        "ganglia_runtime_dir" : "/var/run/ganglia/hdp",
+        "gmetad_user" : "nobody",
+        "gmond_user" : "nobody",
+        "gpl_artifacts_download_url" : "",
+        "hadoop_conf_dir" : "/etc/hadoop/conf",
+        "hadoop_heapsize" : "1024",
+        "hadoop_pid_dir_prefix" : "/var/run/hadoop",
+        "hbase_conf_dir" : "/etc/hbase",
+        "hbase_user" : "hbase",
+        "hcat_conf_dir" : "",
+        "hcat_user" : "hcat",
+        "hdfs_enable_shortcircuit_read" : "true",
+        "hdfs_log_dir_prefix" : "/var/log/hadoop",
+        "hdfs_user" : "hdfs",
+        "hive_user" : "hive",
+        "java64_home" : "/usr/jdk/jdk1.6.0_31",
+        "mapred_user" : "mapred",
+        "nagios_group" : "nagios",
+        "nagios_user" : "nagios",
+        "namenode_formatted_mark_dir" : "/var/run/hadoop/hdfs/namenode/formatted/",
+        "namenode_heapsize" : "1024m",
+        "namenode_opt_maxnewsize" : "640m",
+        "namenode_opt_newsize" : "200m",
+        "oozie_user" : "oozie",
+        "proxyuser_group" : "users",
+        "rrdcached_base_dir" : "/var/lib/ganglia/rrds",
+        "run_dir" : "/var/run/hadoop",
+        "security_enabled" : "false",
+        "smokeuser" : "ambari-qa",
+        "user_group" : "hadoop",
+        "webhcat_user" : "hcat",
+        "yarn_user" : "yarn",
+        "zk_user" : "zookeeper"
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a0386357/ambari-client/src/test/python/json/clustermodel_get_hdfs_site_config.json
----------------------------------------------------------------------
diff --git a/ambari-client/src/test/python/json/clustermodel_get_hdfs_site_config.json b/ambari-client/src/test/python/json/clustermodel_get_hdfs_site_config.json
new file mode 100644
index 0000000..9108aa7
--- /dev/null
+++ b/ambari-client/src/test/python/json/clustermodel_get_hdfs_site_config.json
@@ -0,0 +1,53 @@
+{
+  "href" : "http://localhost:8080/api/v1/clusters/test1/configurations?type=hdfs-site&tag=version1",
+  "items" : [
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/configurations?type=hdfs-site&tag=version1",
+      "tag" : "version1",
+      "type" : "hdfs-site",
+      "Config" : {
+        "cluster_name" : "test1"
+      },
+      "properties" : {
+        "dfs.block.access.token.enable" : "true",
+        "dfs.block.local-path-access.user" : "hbase",
+        "dfs.blockreport.initialDelay" : "120",
+        "dfs.blocksize" : "134217728",
+        "dfs.client.read.shortcircuit" : "true",
+        "dfs.client.read.shortcircuit.streams.cache.size" : "4096",
+        "dfs.cluster.administrators" : " hdfs",
+        "dfs.datanode.address" : "0.0.0.0:50010",
+        "dfs.datanode.balance.bandwidthPerSec" : "6250000",
+        "dfs.datanode.data.dir" : "/hadoop/hdfs/data",
+        "dfs.datanode.data.dir.perm" : "750",
+        "dfs.datanode.du.reserved" : "1",
+        "dfs.datanode.failed.volumes.tolerated" : "0",
+        "dfs.datanode.http.address" : "0.0.0.0:50075",
+        "dfs.datanode.ipc.address" : "0.0.0.0:8010",
+        "dfs.datanode.max.transfer.threads" : "1024",
+        "dfs.domain.socket.path" : "/var/lib/hadoop-hdfs/dn_socket",
+        "dfs.heartbeat.interval" : "3",
+        "dfs.hosts.exclude" : "/etc/hadoop/conf/dfs.exclude",
+        "dfs.https.namenode.https-address" : "dev05.hortonworks.com:50470",
+        "dfs.journalnode.edits.dir" : "/grid/0/hdfs/journal",
+        "dfs.journalnode.http-address" : "0.0.0.0:8480",
+        "dfs.namenode.accesstime.precision" : "0",
+        "dfs.namenode.avoid.read.stale.datanode" : "true",
+        "dfs.namenode.avoid.write.stale.datanode" : "true",
+        "dfs.namenode.handler.count" : "100",
+        "dfs.namenode.http-address" : "dev05.hortonworks.com:50070",
+        "dfs.namenode.name.dir" : "/hadoop/hdfs/namenode",
+        "dfs.namenode.safemode.threshold-pct" : "1.0f",
+        "dfs.namenode.secondary.http-address" : "dev05.hortonworks.com:50090",
+        "dfs.namenode.stale.datanode.interval" : "30000",
+        "dfs.namenode.write.stale.datanode.ratio" : "1.0f",
+        "dfs.permissions.enabled" : "true",
+        "dfs.permissions.superusergroup" : "hdfs",
+        "dfs.replication" : "3",
+        "dfs.replication.max" : "50",
+        "dfs.webhdfs.enabled" : "true",
+        "fs.permissions.umask-mode" : "022"
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a0386357/ambari-client/src/test/python/json/clustermodel_get_host.json
----------------------------------------------------------------------
diff --git a/ambari-client/src/test/python/json/clustermodel_get_host.json b/ambari-client/src/test/python/json/clustermodel_get_host.json
new file mode 100644
index 0000000..b1b09d6
--- /dev/null
+++ b/ambari-client/src/test/python/json/clustermodel_get_host.json
@@ -0,0 +1,250 @@
+{
+  "href" : "http://localhost:8080/api/v1/clusters/test1/hosts/myhost",
+  "Hosts" : {
+    "cluster_name" : "test1",
+    "cpu_count" : 24,
+    "disk_info" : [
+      {
+        "available" : "25938900",
+        "used" : "5743652",
+        "percent" : "19%",
+        "size" : "33378088",
+        "type" : "ext4",
+        "mountpoint" : "/"
+      },
+      {
+        "available" : "49525536",
+        "used" : "0",
+        "percent" : "0%",
+        "size" : "49525536",
+        "type" : "tmpfs",
+        "mountpoint" : "/dev/shm"
+      },
+      {
+        "available" : "433221",
+        "used" : "37023",
+        "percent" : "8%",
+        "size" : "495844",
+        "type" : "ext4",
+        "mountpoint" : "/boot"
+      },
+      {
+        "available" : "3020752",
+        "used" : "71284",
+        "percent" : "3%",
+        "size" : "3257512",
+        "type" : "ext4",
+        "mountpoint" : "/home"
+      },
+      {
+        "available" : "547125892",
+        "used" : "404652",
+        "percent" : "1%",
+        "size" : "576831992",
+        "type" : "ext4",
+        "mountpoint" : "/data/1"
+      },
+      {
+        "available" : "547305068",
+        "used" : "225476",
+        "percent" : "1%",
+        "size" : "576831992",
+        "type" : "ext4",
+        "mountpoint" : "/data/2"
+      },
+      {
+        "available" : "547325924",
+        "used" : "204620",
+        "percent" : "1%",
+        "size" : "576831992",
+        "type" : "ext4",
+        "mountpoint" : "/data/3"
+      },
+      {
+        "available" : "547320928",
+        "used" : "209616",
+        "percent" : "1%",
+        "size" : "576831992",
+        "type" : "ext4",
+        "mountpoint" : "/data/4"
+      },
+      {
+        "available" : "547315544",
+        "used" : "215000",
+        "percent" : "1%",
+        "size" : "576831992",
+        "type" : "ext4",
+        "mountpoint" : "/data/5"
+      },
+      {
+        "available" : "547327008",
+        "used" : "203536",
+        "percent" : "1%",
+        "size" : "576831992",
+        "type" : "ext4",
+        "mountpoint" : "/data/6"
+      },
+      {
+        "available" : "547310644",
+        "used" : "219900",
+        "percent" : "1%",
+        "size" : "576831992",
+        "type" : "ext4",
+        "mountpoint" : "/data/7"
+      },
+      {
+        "available" : "547320544",
+        "used" : "210000",
+        "percent" : "1%",
+        "size" : "576831992",
+        "type" : "ext4",
+        "mountpoint" : "/data/8"
+      }
+    ],
+    "host_health_report" : "",
+    "host_name" : "myhost",
+    "host_state" : "HEALTHY",
+    "host_status" : "HEALTHY",
+    "ip" : "10.104.44.95",
+    "last_agent_env" : {
+      "stackFoldersAndFiles" : [ ],
+      "rpms" : [
+        {
+          "name" : "nagios",
+          "installed" : true,
+          "version" : "nagios-3.5.0-99.x86_64"
+        },
+        {
+          "name" : "ganglia",
+          "installed" : false
+        },
+        {
+          "name" : "hadoop",
+          "installed" : true,
+          "version" : "hadoop-1.2.0.1.3.0.0-107.el6.x86_64"
+        },
+        {
+          "name" : "hadoop-lzo",
+          "installed" : true,
+          "version" : "hadoop-lzo-0.5.0-1.x86_64"
+        },
+        {
+          "name" : "hbase",
+          "installed" : false
+        },
+        {
+          "name" : "oozie",
+          "installed" : true,
+          "version" : "oozie-3.3.2.1.3.0.0-107.el6.noarch"
+        },
+        {
+          "name" : "sqoop",
+          "installed" : false
+        },
+        {
+          "name" : "pig",
+          "installed" : false
+        },
+        {
+          "name" : "zookeeper",
+          "installed" : true,
+          "version" : "zookeeper-3.4.5.1.3.0.0-107.el6.noarch"
+        },
+        {
+          "name" : "hive",
+          "installed" : true,
+          "version" : "hive-0.11.0.1.3.0.0-107.el6.noarch"
+        },
+        {
+          "name" : "libconfuse",
+          "installed" : true,
+          "version" : "libconfuse-2.6-3.el6.x86_64"
+        },
+        {
+          "name" : "ambari-log4j",
+          "installed" : true,
+          "version" : "ambari-log4j-1.2.3.6-1.noarch"
+        }
+      ],
+      "alternatives" : [ ],
+      "existingUsers" : [ ],
+      "existingRepos" : [
+        "unable_to_determine"
+      ],
+      "installedPackages" : [ ],
+      "hostHealth" : {
+        "activeJavaProcs" : [ ],
+        "agentTimeStampAtReporting" : 1377776815389,
+        "serverTimeStampAtReporting" : 1377776801984,
+        "liveServices" : [
+          {
+            "name" : "ntpd",
+            "desc" : "ntpd is stopped\n",
+            "status" : "Unhealthy"
+          }
+        ],
+        "diskStatus" : [
+          {
+            "available" : "25032456",
+            "used" : "6650096",
+            "percent" : "21%",
+            "size" : "33378088",
+            "type" : "ext4",
+            "mountpoint" : "/"
+          }
+        ]
+      }
+    },
+    "last_heartbeat_time" : 1377776827188,
+    "last_registration_time" : 1377605551901,
+    "os_arch" : "x86_64",
+    "os_type" : "redhat6",
+    "ph_cpu_count" : 2,
+    "public_host_name" : "myhost",
+    "rack_info" : "/default-rack",
+    "total_mem" : 99048488,
+    "desired_configs" : { }
+  },
+  "host_components" : [
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/hosts/myhost/host_components/GANGLIA_MONITOR",
+      "HostRoles" : {
+        "cluster_name" : "test1",
+        "component_name" : "GANGLIA_MONITOR",
+        "host_name" : "myhost"
+      }
+    },
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/hosts/myhost/host_components/GANGLIA_SERVER",
+      "HostRoles" : {
+        "cluster_name" : "test1",
+        "component_name" : "GANGLIA_SERVER",
+        "host_name" : "myhost"
+      }
+    },
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/hosts/myhost/host_components/MAPREDUCE_CLIENT",
+      "HostRoles" : {
+        "cluster_name" : "test1",
+        "component_name" : "MAPREDUCE_CLIENT",
+        "host_name" : "myhost"
+      }
+    },
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/hosts/myhost/host_components/NAGIOS_SERVER",
+      "HostRoles" : {
+        "cluster_name" : "test1",
+        "component_name" : "NAGIOS_SERVER",
+        "host_name" : "myhost"
+      }
+    },
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/hosts/myhost/host_components/NAMENODE",
+      "HostRoles" : {
+        "cluster_name" : "test1",
+        "component_name" : "NAMENODE",
+        "host_name" : "myhost"
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a0386357/ambari-client/src/test/python/json/clustermodel_get_mapred_site_config.json
----------------------------------------------------------------------
diff --git a/ambari-client/src/test/python/json/clustermodel_get_mapred_site_config.json b/ambari-client/src/test/python/json/clustermodel_get_mapred_site_config.json
new file mode 100644
index 0000000..1080be1
--- /dev/null
+++ b/ambari-client/src/test/python/json/clustermodel_get_mapred_site_config.json
@@ -0,0 +1,58 @@
+{
+  "href" : "http://localhost:8080/api/v1/clusters/test1/configurations?type=mapred-site&tag=version1",
+  "items" : [
+    {
+      "href" : "http://localhost:8080/api/v1/clusters/test1/configurations?type=mapred-site&tag=version1",
+      "tag" : "version1",
+      "type" : "mapred-site",
+      "Config" : {
+        "cluster_name" : "test1"
+      },
+      "properties" : {
+        "mapred.hosts" : "/etc/hadoop/conf/mapred.include",
+        "mapred.hosts.exclude" : "/etc/hadoop/conf/mapred.exclude",
+        "mapred.jobtracker.maxtasks.per.job" : "-1",
+        "mapred.jobtracker.taskScheduler" : "org.apache.hadoop.mapred.CapacityTaskScheduler",
+        "mapred.task.tracker.task-controller" : "org.apache.hadoop.mapred.DefaultTaskController",
+        "mapred.userlog.retain.hours" : "24",
+        "mapreduce.admin.map.child.java.opts" : "-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN",
+        "mapreduce.admin.reduce.child.java.opts" : "-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN",
+        "mapreduce.admin.user.env" : "LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:/usr/lib/hadoop/lib/native/`$JAVA_HOME/bin/java -d32 -version &amp;&gt; /dev/null;if [ $? -eq 0 ]; then echo Linux-i386-32; else echo Linux-amd64-64;fi`",
+        "mapreduce.am.max-attempts" : "2",
+        "mapreduce.application.classpath" : "$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*,$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*",
+        "mapreduce.framework.name" : "yarn",
+        "mapreduce.job.reduce.slowstart.completedmaps" : "0.05",
+        "mapreduce.jobhistory.address" : "dev05.hortonworks.com:10020",
+        "mapreduce.jobhistory.done-dir" : "/mr-history/done",
+        "mapreduce.jobhistory.intermediate-done-dir" : "/mr-history/tmp",
+        "mapreduce.jobhistory.webapp.address" : "dev05.hortonworks.com:19888",
+        "mapreduce.jobtracker.system.dir" : "/mapred/system",
+        "mapreduce.map.java.opts" : "-Xmx320m",
+        "mapreduce.map.log.level" : "INFO",
+        "mapreduce.map.memory.mb" : "1536",
+        "mapreduce.map.sort.spill.percent" : "0.1",
+        "mapreduce.map.speculative" : "false",
+        "mapreduce.output.fileoutputformat.compress.type" : "BLOCK",
+        "mapreduce.reduce.input.buffer.percent" : "0.0",
+        "mapreduce.reduce.java.opts" : "-Xmx756m",
+        "mapreduce.reduce.log.level" : "INFO",
+        "mapreduce.reduce.memory.mb" : "2048",
+        "mapreduce.reduce.shuffle.input.buffer.percent" : "0.7",
+        "mapreduce.reduce.shuffle.merge.percent" : "0.66",
+        "mapreduce.reduce.shuffle.parallelcopies" : "30",
+        "mapreduce.reduce.speculative" : "false",
+        "mapreduce.shuffle.port" : "13562",
+        "mapreduce.task.io.sort.factor" : "100",
+        "mapreduce.task.io.sort.mb" : "200",
+        "mapreduce.task.timeout" : "600000",
+        "mapreduce.tasktracker.healthchecker.script.path" : "file:////mapred/jobstatus",
+        "mapreduce.tasktracker.map.tasks.maximum" : "4",
+        "yarn.app.mapreduce.am.admin-command-opts" : "-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN",
+        "yarn.app.mapreduce.am.command-opts" : "-Xmx756m",
+        "yarn.app.mapreduce.am.log.level" : "INFO",
+        "yarn.app.mapreduce.am.resource.mb" : "1024",
+        "yarn.app.mapreduce.am.staging-dir" : "/user"
+      }
+    }
+  ]
+}
\ No newline at end of file


Mime
View raw message