incubator-ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dmitriu...@apache.org
Subject [3/7] AMBARI-3810. Unittests for File resource an all it's attributes (Eugene Chekanskiy via dlysnichenko)
Date Tue, 19 Nov 2013 18:17:17 GMT
http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestHostInfo.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestHostInfo.py b/ambari-agent/src/test/python/ambari_agent/TestHostInfo.py
new file mode 100644
index 0000000..95712f0
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestHostInfo.py
@@ -0,0 +1,525 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from unittest import TestCase
+import logging
+import unittest
+import subprocess
+from mock.mock import patch
+from mock.mock import MagicMock
+from mock.mock import create_autospec
+from ambari_agent.HostCheckReportFileHandler import HostCheckReportFileHandler
+from ambari_agent.PackagesAnalyzer import PackagesAnalyzer
+from ambari_agent.HostInfo import HostInfo
+from ambari_agent.Hardware import Hardware
+from ambari_agent.AmbariConfig import AmbariConfig
+
+class TestHostInfo(TestCase):
+
+  logger = logging.getLogger()
+
+  @patch.object(PackagesAnalyzer, 'hasZypper')
+  @patch.object(PackagesAnalyzer, 'subprocessWithTimeout')
+  def test_analyze_zypper_out(self, spwt_mock, hasZy_mock):
+    packageAnalyzer = PackagesAnalyzer()
+    stringToRead = """Refreshing service 'susecloud'.
+           Loading repository data...
+           Reading installed packages...
+
+           S | Name                              | Type    | Version                | Arch   | Repository
+           --+-----------------------------------+---------+------------------------+--------+----------------------
+           i | ConsoleKit                        | package | 0.2.10-64.65.1         | x86_64 | SLES11-SP1-Updates
+           i | gweb                              | package | 2.2.0-99               | noarch | Hortonworks Data Platform Utils Version - HDP-UTILS-1.1.0.15
+           i | hadoop                            | package | 1.2.0.1.3.0.0-107      | x86_64 | HDP
+           i | hadoop-libhdfs                    | package | 1.2.0.1.3.0.0-107      | x86_64 | HDP
+           i | ambari-server                     | package | 1.2.4.9-1              | noarch | Ambari 1.x
+           i | hdp_mon_ganglia_addons            | package | 1.2.4.9-1              | noarch | Ambari 1.x
+           i | Minimal                           | pattern | 11-38.13.9             | x86_64 | SLES11-SP1"""
+    result = {}
+    result['out'] = stringToRead
+    result['err'] = ""
+    result['retCode'] = 0
+
+    spwt_mock.return_value = result
+    hasZy_mock.return_value = True
+    installedPackages = []
+    packageAnalyzer.allInstalledPackages(installedPackages)
+    self.assertEqual(7, len(installedPackages))
+    self.assertTrue(installedPackages[1][0], "gweb")
+    self.assertTrue(installedPackages[3][2], "HDP")
+    self.assertTrue(installedPackages[6][1], "11-38.13.9")
+
+  def test_getReposToRemove(self):
+    l1 = ["Hortonworks Data Platform Utils Version - HDP-UTILS-1.1.0.15", "Ambari 1.x", "HDP"]
+    l2 = ["Ambari", "HDP-UTIL"]
+    hostInfo = HostInfo()
+    l3 = hostInfo.getReposToRemove(l1, l2)
+    self.assertTrue(1, len(l3))
+    self.assertEqual(l3[0], "HDP")
+
+    l1 = ["AMBARI.dev-1.x", "HDP-1.3.0"]
+    l3 = hostInfo.getReposToRemove(l1, l2)
+    self.assertTrue(1, len(l3))
+    self.assertEqual(l3[0], "HDP-1.3.0")
+
+  def test_perform_package_analysis(self):
+    packageAnalyzer = PackagesAnalyzer()
+    installedPackages = [
+      ["hadoop-a", "2.3", "HDP"], ["zk", "3.1", "HDP"], ["webhcat", "3.1", "HDP"],
+      ["hadoop-b", "2.3", "HDP-epel"], ["epel", "3.1", "HDP-epel"], ["epel-2", "3.1", "HDP-epel"],
+      ["hadoop-c", "2.3", "Ambari"], ["ambari-s", "3.1", "Ambari"],
+      ["nagios", "2.3", "NAGIOS"], ["rrd", "3.1", "RRD"],
+      ["keeper-1", "2.3", "NAGIOS"], ["keeper-2", "3.1", "base"],["def-def.x86", "2.2", "DEF.3"],
+      ["def.1", "1.2", "NewDEF"]
+    ]
+    availablePackages = [
+      ["hadoop-d", "2.3", "HDP"], ["zk-2", "3.1", "HDP"], ["pig", "3.1", "HDP"],
+      ["epel-3", "2.3", "HDP-epel"], ["hadoop-e", "3.1", "HDP-epel"],
+      ["ambari-a", "3.1", "Ambari"],
+      ["keeper-3", "3.1", "base"]
+    ]
+
+    packagesToLook = ["webhcat", "hadoop", "*-def"]
+    reposToIgnore = ["ambari"]
+    additionalPackages = ["nagios", "rrd"]
+
+    repos = []
+    packageAnalyzer.getInstalledRepos(packagesToLook, installedPackages + availablePackages, reposToIgnore, repos)
+    self.assertEqual(3, len(repos))
+    expected = ["HDP", "HDP-epel", "DEF.3"]
+    for repo in expected:
+      self.assertTrue(repo in repos)
+
+    packagesInstalled = packageAnalyzer.getInstalledPkgsByRepo(repos, ["epel"], installedPackages)
+    self.assertEqual(5, len(packagesInstalled))
+    expected = ["hadoop-a", "zk", "webhcat", "hadoop-b", "def-def.x86"]
+    for repo in expected:
+      self.assertTrue(repo in packagesInstalled)
+
+    additionalPkgsInstalled = packageAnalyzer.getInstalledPkgsByNames(
+        additionalPackages, installedPackages)
+    self.assertEqual(2, len(additionalPkgsInstalled))
+    expected = ["nagios", "rrd"]
+    for additionalPkg in expected:
+      self.assertTrue(additionalPkg in additionalPkgsInstalled)
+
+    allPackages = list(set(packagesInstalled + additionalPkgsInstalled))
+    self.assertEqual(7, len(allPackages))
+    expected = ["hadoop-a", "zk", "webhcat", "hadoop-b", "nagios", "rrd", "def-def.x86"]
+    for package in expected:
+      self.assertTrue(package in allPackages)
+
+  @patch.object(PackagesAnalyzer, 'hasZypper')
+  @patch.object(PackagesAnalyzer, 'subprocessWithTimeout')
+  def test_analyze_yum_output(self, subprocessWithTimeout_mock, hasZy_mock):
+    packageAnalyzer = PackagesAnalyzer()
+    stringToRead = """Loaded plugins: amazon-id, product-id, rhui-lb, security, subscription-manager
+                      Updating certificate-based repositories.
+                      Installed Packages
+                      AMBARI.dev.noarch             1.x-1.el6             installed
+                      PyXML.x86_64                  0.8.4-19.el6          @koji-override-0
+                      Red_Hat_Enterprise_Linux-Release_Notes-6-en-US.noarch
+                              3-7.el6               @koji-override-0
+                      hcatalog.noarch               0.11.0.1.3.0.0-107.el6
+                                                    @HDP-1.3.0
+                      hesiod.x86_64                 3.1.0-19.el6          @koji-override-0/$releasever
+                      hive.noarch                   0.11.0.1.3.0.0-107.el6
+                                                    @HDP-1.3.0
+                      oracle-server-db.x86          1.3.17-2
+                                                    @Oracle-11g
+                      ambari-log4j.noarch           1.2.5.9-1             @AMBARI.dev-1.x
+                      libconfuse.x86_64             2.7-4.el6             @HDP-epel"""
+    result = {}
+    result['out'] = stringToRead
+    result['err'] = ""
+    result['retCode'] = 0
+
+    subprocessWithTimeout_mock.return_value = result
+    hasZy_mock.return_value = False
+    installedPackages = []
+    packageAnalyzer.allInstalledPackages(installedPackages)
+    self.assertEqual(9, len(installedPackages))
+    for package in installedPackages:
+      self.assertTrue(package[0] in ["AMBARI.dev.noarch", "PyXML.x86_64", "oracle-server-db.x86",
+                                 "Red_Hat_Enterprise_Linux-Release_Notes-6-en-US.noarch",
+                                 "hcatalog.noarch", "hesiod.x86_64", "hive.noarch", "ambari-log4j.noarch", "libconfuse.x86_64"])
+      self.assertTrue(package[1] in ["1.x-1.el6", "0.8.4-19.el6", "3-7.el6", "3.1.0-19.el6",
+                                 "0.11.0.1.3.0.0-107.el6", "1.2.5.9-1", "1.3.17-2", "1.2.5.9-1", "2.7-4.el6"])
+      self.assertTrue(package[2] in ["installed", "koji-override-0", "HDP-1.3.0",
+                                 "koji-override-0/$releasever", "AMBARI.dev-1.x", "Oracle-11g", "HDP-epel"])
+
+    packages = packageAnalyzer.getInstalledPkgsByNames(["AMBARI", "Red_Hat_Enterprise", "hesiod", "hive"],
+                                                       installedPackages)
+    self.assertEqual(4, len(packages))
+    expected = ["AMBARI.dev.noarch", "Red_Hat_Enterprise_Linux-Release_Notes-6-en-US.noarch",
+                                "hesiod.x86_64", "hive.noarch"]
+    for package in expected:
+      self.assertTrue(package in packages)
+
+    detailedPackages = packageAnalyzer.getPackageDetails(installedPackages, packages)
+    self.assertEqual(4, len(detailedPackages))
+    for package in detailedPackages:
+      self.assertTrue(package['version'] in ["1.x-1.el6", "3-7.el6", "3.1.0-19.el6",
+                                            "0.11.0.1.3.0.0-107.el6"])
+      self.assertTrue(package['repoName'] in ["installed", "koji-override-0", "HDP-1.3.0",
+                                              "koji-override-0/$releasever"])
+      self.assertFalse(package['repoName'] in ["AMBARI.dev-1.x"])
+
+  @patch.object(PackagesAnalyzer, 'subprocessWithTimeout')
+  def test_analyze_yum_output_err(self, subprocessWithTimeout_mock):
+    packageAnalyzer = PackagesAnalyzer()
+
+    result = {}
+    result['out'] = ""
+    result['err'] = ""
+    result['retCode'] = 1
+
+    subprocessWithTimeout_mock.return_value = result
+    installedPackages = []
+    packageAnalyzer.allInstalledPackages(installedPackages)
+    self.assertEqual(installedPackages, [])
+
+
+  @patch('os.path.exists')
+  def test_checkFolders(self, path_mock):
+    path_mock.return_value = True
+    hostInfo = HostInfo()
+    results = []
+    existingUsers = [{'name':'a1', 'homeDir':'/home/a1'}, {'name':'b1', 'homeDir':'/home/b1'}]
+    hostInfo.checkFolders(["/etc/conf", "/var/lib", "/home/"], ["a1", "b1"], existingUsers, results)
+    self.assertEqual(4, len(results))
+    names = [i['name'] for i in results]
+    for item in ['/etc/conf/a1', '/var/lib/a1', '/etc/conf/b1', '/var/lib/b1']:
+      self.assertTrue(item in names)
+
+  @patch('os.path.exists')
+  @patch('__builtin__.open')
+  def test_checkUsers(self, builtins_open_mock, path_mock):
+    builtins_open_mock.return_value = [
+      "hdfs:x:493:502:Hadoop HDFS:/usr/lib/hadoop:/bin/bash",
+      "zookeeper:x:492:502:ZooKeeper:/var/run/zookeeper:/bin/bash"]
+    path_mock.side_effect = [True, False]
+
+    hostInfo = HostInfo()
+    results = []
+    hostInfo.checkUsers(["zookeeper", "hdfs"], results)
+    self.assertEqual(2, len(results))
+    newlist = sorted(results, key=lambda k: k['name'])
+    self.assertTrue(newlist[0]['name'], "hdfs")
+    self.assertTrue(newlist[1]['name'], "zookeeper")
+    self.assertTrue(newlist[0]['homeDir'], "/usr/lib/hadoop")
+    self.assertTrue(newlist[1]['homeDir'], "/var/run/zookeeper")
+    self.assertTrue(newlist[0]['status'], "Available")
+    self.assertTrue(newlist[1]['status'], "Invalid home directory")
+
+
+  @patch.object(HostInfo, 'get_os_type')
+  @patch('os.umask')
+  @patch.object(HostCheckReportFileHandler, 'writeHostCheckFile')
+  @patch.object(PackagesAnalyzer, 'allAvailablePackages')
+  @patch.object(PackagesAnalyzer, 'allInstalledPackages')
+  @patch.object(PackagesAnalyzer, 'getPackageDetails')
+  @patch.object(PackagesAnalyzer, 'getInstalledPkgsByNames')
+  @patch.object(PackagesAnalyzer, 'getInstalledPkgsByRepo')
+  @patch.object(PackagesAnalyzer, 'getInstalledRepos')
+  @patch.object(HostInfo, 'checkUsers')
+  @patch.object(HostInfo, 'checkLiveServices')
+  @patch.object(HostInfo, 'javaProcs')
+  @patch.object(HostInfo, 'checkFolders')
+  @patch.object(HostInfo, 'etcAlternativesConf')
+  @patch.object(HostInfo, 'hadoopVarRunCount')
+  @patch.object(HostInfo, 'hadoopVarLogCount')
+  def test_hostinfo_register_suse(self, hvlc_mock, hvrc_mock, eac_mock, cf_mock, jp_mock,
+                             cls_mock, cu_mock, gir_mock, gipbr_mock, gipbn_mock,
+                             gpd_mock, aip_mock, aap_mock, whcf_mock, os_umask_mock, get_os_type_mock):
+    hvlc_mock.return_value = 1
+    hvrc_mock.return_value = 1
+    gipbr_mock.return_value = ["pkg1"]
+    gipbn_mock.return_value = ["pkg2"]
+    gpd_mock.return_value = ["pkg1", "pkg2"]
+    get_os_type_mock.return_value = "suse"
+
+    hostInfo = HostInfo()
+    dict = {}
+    hostInfo.register(dict, False, False)
+    self.assertFalse(gir_mock.called)
+    self.assertFalse(gpd_mock.called)
+    self.assertFalse(aip_mock.called)
+    self.assertFalse(aap_mock.called)
+    self.assertTrue(os_umask_mock.called)
+    self.assertFalse(whcf_mock.called)
+
+    self.assertTrue(0 == len(dict['installedPackages']))
+    self.assertTrue('agentTimeStampAtReporting' in dict['hostHealth'])
+
+
+  @patch.object(HostInfo, 'get_os_type')
+  @patch('os.umask')
+  @patch.object(HostCheckReportFileHandler, 'writeHostCheckFile')
+  @patch.object(PackagesAnalyzer, 'allAvailablePackages')
+  @patch.object(PackagesAnalyzer, 'allInstalledPackages')
+  @patch.object(PackagesAnalyzer, 'getPackageDetails')
+  @patch.object(PackagesAnalyzer, 'getInstalledPkgsByNames')
+  @patch.object(PackagesAnalyzer, 'getInstalledPkgsByRepo')
+  @patch.object(PackagesAnalyzer, 'getInstalledRepos')
+  @patch.object(HostInfo, 'checkUsers')
+  @patch.object(HostInfo, 'checkLiveServices')
+  @patch.object(HostInfo, 'javaProcs')
+  @patch.object(HostInfo, 'checkFolders')
+  @patch.object(HostInfo, 'etcAlternativesConf')
+  @patch.object(HostInfo, 'hadoopVarRunCount')
+  @patch.object(HostInfo, 'hadoopVarLogCount')
+  @patch.object(HostInfo, 'checkIptables')
+  def test_hostinfo_register(self, cit_mock, hvlc_mock, hvrc_mock, eac_mock, cf_mock, jp_mock,
+                             cls_mock, cu_mock, gir_mock, gipbr_mock, gipbn_mock,
+                             gpd_mock, aip_mock, aap_mock, whcf_mock, os_umask_mock, get_os_type_mock):
+    cit_mock.return_value = True
+    hvlc_mock.return_value = 1
+    hvrc_mock.return_value = 1
+    gipbr_mock.return_value = ["pkg1"]
+    gipbn_mock.return_value = ["pkg2"]
+    gpd_mock.return_value = ["pkg1", "pkg2"]
+    get_os_type_mock.return_value = "redhat"
+
+    hostInfo = HostInfo()
+    dict = {}
+    hostInfo.register(dict, True, True)
+    self.verifyReturnedValues(dict)
+
+    hostInfo.register(dict, True, False)
+    self.verifyReturnedValues(dict)
+
+    hostInfo.register(dict, False, True)
+    self.verifyReturnedValues(dict)
+    self.assertTrue(os_umask_mock.call_count == 2)
+
+    hostInfo = HostInfo()
+    dict = {}
+    hostInfo.register(dict, False, False)
+    self.assertTrue(gir_mock.called)
+    self.assertTrue(gpd_mock.called)
+    self.assertTrue(aip_mock.called)
+    self.assertTrue(cit_mock.called)
+
+    for existingPkg in ["pkg1", "pkg2"]:
+      self.assertTrue(existingPkg in dict['installedPackages'])
+    args, kwargs = gpd_mock.call_args_list[0]
+    for existingPkg in ["pkg1", "pkg2"]:
+      self.assertTrue(existingPkg in args[1])
+
+  def verifyReturnedValues(self, dict):
+    hostInfo = HostInfo()
+    self.assertEqual(dict['alternatives'], [])
+    self.assertEqual(dict['stackFoldersAndFiles'], [])
+    self.assertEqual(dict['existingUsers'], [])
+    self.assertEqual(dict['existingRepos'][0], hostInfo.RESULT_UNAVAILABLE)
+    self.assertEqual(dict['installedPackages'], [])
+    self.assertTrue(dict['iptablesIsRunning'])
+
+  @patch("os.path.exists")
+  @patch("os.path.islink")
+  @patch("os.path.isdir")
+  @patch("os.path.isfile")
+  def test_dirType(self, os_path_isfile_mock, os_path_isdir_mock, os_path_islink_mock, os_path_exists_mock):
+    host = HostInfo()
+
+    os_path_exists_mock.return_value = False
+    result = host.dirType("/home")
+    self.assertEquals(result, 'not_exist')
+
+    os_path_exists_mock.return_value = True
+    os_path_islink_mock.return_value = True
+    result = host.dirType("/home")
+    self.assertEquals(result, 'sym_link')
+
+    os_path_exists_mock.return_value = True
+    os_path_islink_mock.return_value = False
+    os_path_isdir_mock.return_value = True
+    result = host.dirType("/home")
+    self.assertEquals(result, 'directory')
+
+    os_path_exists_mock.return_value = True
+    os_path_islink_mock.return_value = False
+    os_path_isdir_mock.return_value = False
+    os_path_isfile_mock.return_value = True
+    result = host.dirType("/home")
+    self.assertEquals(result, 'file')
+
+    os_path_exists_mock.return_value = True
+    os_path_islink_mock.return_value = False
+    os_path_isdir_mock.return_value = False
+    os_path_isfile_mock.return_value = False
+    result = host.dirType("/home")
+    self.assertEquals(result, 'unknown')
+
+
+  @patch("os.path.exists")
+  @patch("glob.glob")
+  def test_hadoopVarRunCount(self, glob_glob_mock, os_path_exists_mock):
+    hostInfo = HostInfo()
+
+    os_path_exists_mock.return_value = True
+    glob_glob_mock.return_value = ['pid1','pid2','pid3']
+    result = hostInfo.hadoopVarRunCount()
+    self.assertEquals(result, 3)
+
+    os_path_exists_mock.return_value = False
+    result = hostInfo.hadoopVarRunCount()
+    self.assertEquals(result, 0)
+
+
+  @patch("os.path.exists")
+  @patch("glob.glob")
+  def test_hadoopVarLogCount(self, glob_glob_mock, os_path_exists_mock):
+    hostInfo = HostInfo()
+
+    os_path_exists_mock.return_value = True
+    glob_glob_mock.return_value = ['log1','log2']
+    result = hostInfo.hadoopVarLogCount()
+    self.assertEquals(result, 2)
+
+    os_path_exists_mock.return_value = False
+    result = hostInfo.hadoopVarLogCount()
+    self.assertEquals(result, 0)
+
+
+  @patch("os.listdir", create=True, autospec=True)
+  @patch("__builtin__.open", create=True, autospec=True)
+  @patch("pwd.getpwuid", create=True, autospec=True)
+  def test_javaProcs(self, pwd_getpwuid_mock, buitin_open_mock, os_listdir_mock):
+    hostInfo = HostInfo()
+    openRead = MagicMock()
+    openRead.read.return_value = '/java/;/hadoop/'
+    buitin_open_mock.side_effect = [openRead, ['Uid: 22']]
+    pwuid = MagicMock()
+    pwd_getpwuid_mock.return_value = pwuid
+    pwuid.pw_name = 'user'
+    os_listdir_mock.return_value = ['1']
+    list = []
+    hostInfo.javaProcs(list)
+
+    self.assertEquals(list[0]['command'], '/java/;/hadoop/')
+    self.assertEquals(list[0]['pid'], 1)
+    self.assertTrue(list[0]['hadoop'])
+    self.assertEquals(list[0]['user'], 'user')
+
+
+  @patch("subprocess.Popen")
+  @patch.object(Hardware, 'extractMountInfo')
+  def test_osdiskAvailableSpace(self, extract_mount_info_mock, subproc_popen_mock):
+    hostInfo = HostInfo()
+    p = MagicMock()
+    p.communicate.return_value = ['some']
+    subproc_popen_mock.return_value = p
+    extract_mount_info_mock.return_value = {'info' : 'info'}
+    result = hostInfo.osdiskAvailableSpace('')
+
+    self.assertTrue(result['info'], 'info')
+
+    p.communicate.return_value = ''
+    result = hostInfo.osdiskAvailableSpace('')
+
+    self.assertEquals(result, {})
+
+
+  @patch.object(HostInfo, "get_os_type")
+  @patch("subprocess.Popen")
+  def test_checkLiveServices(self, subproc_popen, get_os_type_method):
+    hostInfo = HostInfo()
+    p = MagicMock()
+    p.returncode = 0
+    p.communicate.return_value = ('', 'err')
+    subproc_popen.return_value = p
+    result = []
+    get_os_type_method.return_value = 'redhat'
+    hostInfo.checkLiveServices(['service1'], result)
+
+    self.assertEquals(result[0]['status'], 'Healthy')
+    self.assertEquals(result[0]['name'], 'service1')
+    self.assertEquals(result[0]['desc'], '')
+    self.assertEquals(str(subproc_popen.call_args_list),
+                      "[call(['/sbin/service', 'service1', 'status'], stderr=-1, stdout=-1)]")
+
+    p.returncode = 1
+    p.communicate.return_value = ('out', 'err')
+    result = []
+    hostInfo.checkLiveServices(['service1'], result)
+
+    self.assertEquals(result[0]['status'], 'Unhealthy')
+    self.assertEquals(result[0]['name'], 'service1')
+    self.assertEquals(result[0]['desc'], 'out')
+
+    p.communicate.return_value = ('', 'err')
+    result = []
+    hostInfo.checkLiveServices(['service1'], result)
+
+    self.assertEquals(result[0]['status'], 'Unhealthy')
+    self.assertEquals(result[0]['name'], 'service1')
+    self.assertEquals(result[0]['desc'], 'err')
+
+    p.communicate.return_value = ('', 'err', '')
+    result = []
+    hostInfo.checkLiveServices(['service1'], result)
+
+    self.assertEquals(result[0]['status'], 'Unhealthy')
+    self.assertEquals(result[0]['name'], 'service1')
+    self.assertTrue(len(result[0]['desc']) > 0)
+
+
+  @patch("os.path.exists")
+  @patch("os.listdir", create=True, autospec=True)
+  @patch("os.path.islink")
+  @patch("os.path.realpath")
+  def test_etcAlternativesConf(self, os_path_realpath_mock, os_path_islink_mock, os_listdir_mock, os_path_exists_mock):
+    hostInfo = HostInfo()
+    os_path_exists_mock.return_value = False
+    result = hostInfo.etcAlternativesConf('',[])
+
+    self.assertEquals(result, [])
+
+    os_path_exists_mock.return_value = True
+    os_listdir_mock.return_value = ['config1']
+    os_path_islink_mock.return_value = True
+    os_path_realpath_mock.return_value = 'real_path_to_conf'
+    result = []
+    hostInfo.etcAlternativesConf('project',result)
+
+    self.assertEquals(result[0]['name'], 'config1')
+    self.assertEquals(result[0]['target'], 'real_path_to_conf')
+
+
+  @patch("subprocess.Popen")
+  def test_checkIptables(self, subproc_popen_mock):
+    hostInfo = HostInfo()
+    p = MagicMock()
+    p.returncode = 0
+    subproc_popen_mock.return_value = p
+    result = hostInfo.checkIptables()
+
+    self.assertTrue(result)
+
+    p.returncode = 1
+    result = hostInfo.checkIptables()
+
+    self.assertFalse(result)
+
+
+if __name__ == "__main__":
+  unittest.main()

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestHostname.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestHostname.py b/ambari-agent/src/test/python/ambari_agent/TestHostname.py
new file mode 100644
index 0000000..a319b23
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestHostname.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from unittest import TestCase
+import ambari_agent.hostname as hostname
+import ambari_agent.AmbariConfig as AmbariConfig
+import socket
+import tempfile
+import shutil
+import os, pprint, json,stat
+
+class TestHostname(TestCase):
+
+  def test_hostname(self):
+    self.assertEquals(hostname.hostname(), socket.getfqdn(), 
+                      "hostname should equal the socket-based hostname")
+    pass
+
+  def test_hostname_override(self):
+    fd = tempfile.mkstemp(text=True)
+    tmpname = fd[1]
+    os.close(fd[0])
+    os.chmod(tmpname, os.stat(tmpname).st_mode | stat.S_IXUSR)
+
+    tmpfile = file(tmpname, "w+")
+    config = AmbariConfig.config
+    try:
+      tmpfile.write("#!/bin/sh\n\necho 'test.example.com'")
+      tmpfile.close()
+
+      config.set('agent', 'hostname_script', tmpname)
+
+      self.assertEquals(hostname.hostname(), 'test.example.com', "expected hostname 'test.example.com'")
+    finally:
+      os.remove(tmpname)
+      config.remove_option('agent', 'hostname_script')
+
+    pass
+
+  def test_public_hostname_override(self):
+    fd = tempfile.mkstemp(text=True)
+    tmpname = fd[1]
+    os.close(fd[0])
+    os.chmod(tmpname, os.stat(tmpname).st_mode | stat.S_IXUSR)
+   
+    tmpfile = file(tmpname, "w+")
+
+    config = AmbariConfig.config
+    try:
+      tmpfile.write("#!/bin/sh\n\necho 'test.example.com'")
+      tmpfile.close()
+
+      config.set('agent', 'public_hostname_script', tmpname)
+
+      self.assertEquals(hostname.public_hostname(), 'test.example.com', 
+                        "expected hostname 'test.example.com'")
+    finally:
+      os.remove(tmpname)
+      config.remove_option('agent', 'public_hostname_script')
+
+    pass
+
+
+
+

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestLiveStatus.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestLiveStatus.py b/ambari-agent/src/test/python/ambari_agent/TestLiveStatus.py
new file mode 100644
index 0000000..49b2273
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestLiveStatus.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from unittest import TestCase
+from ambari_agent.LiveStatus import LiveStatus
+from ambari_agent.AmbariConfig import AmbariConfig
+import socket
+import os, sys, StringIO
+from ambari_agent import ActualConfigHandler
+from mock.mock import patch, MagicMock, call
+
+class TestLiveStatus(TestCase):
+
+  def setUp(self):
+    # disable stdout
+    out = StringIO.StringIO()
+    sys.stdout = out
+
+
+  def tearDown(self):
+    # enable stdout
+    sys.stdout = sys.__stdout__
+
+  @patch.object(ActualConfigHandler.ActualConfigHandler, "read_actual_component")
+  def test_build(self, read_actual_component_mock):
+    for component in LiveStatus.COMPONENTS:
+      config = AmbariConfig().getConfig()
+      config.set('agent', 'prefix', "ambari_agent" + os.sep + "dummy_files")
+      livestatus = LiveStatus('', component['serviceName'], component['componentName'], {}, config)
+      livestatus.versionsHandler.versionsFilePath = "ambari_agent" + os.sep + "dummy_files" + os.sep + "dummy_current_stack"
+      result = livestatus.build()
+      print "LiveStatus of {0}: {1}".format(component['serviceName'], str(result))
+      self.assertEquals(len(result) > 0, True, 'Livestatus should not be empty')
+      if component['componentName'] == 'GANGLIA_SERVER':
+        self.assertEquals(result['stackVersion'],'{"stackName":"HDP","stackVersion":"1.2.2"}',
+                      'Livestatus should contain component stack version')
+
+    # Test build status for CLIENT component (in LiveStatus.CLIENT_COMPONENTS)
+    read_actual_component_mock.return_value = "some tags"
+    livestatus = LiveStatus('c1', 'HDFS', 'HDFS_CLIENT', { }, config)
+    result = livestatus.build()
+    self.assertTrue(len(result) > 0, 'Livestatus should not be empty')
+    self.assertTrue(result.has_key('configurationTags'))

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestMain.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestMain.py b/ambari-agent/src/test/python/ambari_agent/TestMain.py
new file mode 100644
index 0000000..b9ae2b9
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestMain.py
@@ -0,0 +1,276 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+import StringIO
+import sys
+
+from ambari_agent import NetUtil, security
+from mock.mock import MagicMock, patch, ANY
+import unittest
+from ambari_agent import ProcessHelper, main
+from ambari_agent import ProcessHelper, main
+import logging
+import signal
+from ambari_agent.AmbariConfig import AmbariConfig
+import ConfigParser
+import os
+import tempfile
+from ambari_agent.PingPortListener import PingPortListener
+from ambari_agent.Controller import Controller
+from optparse import OptionParser
+from ambari_agent.DataCleaner import DataCleaner
+
+
+class TestMain(unittest.TestCase):
+
+  def setUp(self):
+    # disable stdout
+    out = StringIO.StringIO()
+    sys.stdout = out
+
+
+  def tearDown(self):
+    # enable stdout
+    sys.stdout = sys.__stdout__
+
+
+  @patch("os._exit")
+  @patch("os.getpid")
+  @patch.object(ProcessHelper, "stopAgent")
+  def test_signal_handler(self, stopAgent_mock, os_getpid_mock, os_exit_mock):
+    # testing exit of children
+    main.agentPid = 4444
+    os_getpid_mock.return_value = 5555
+    main.signal_handler("signum", "frame")
+    self.assertTrue(os_exit_mock.called)
+
+    os_exit_mock.reset_mock()
+
+    # testing exit of main process
+    os_getpid_mock.return_value = main.agentPid
+    main.signal_handler("signum", "frame")
+    self.assertFalse(os_exit_mock.called)
+    self.assertTrue(stopAgent_mock.called)
+
+
+  @patch.object(main.logger, "addHandler")
+  @patch.object(main.logger, "setLevel")
+  @patch("logging.handlers.RotatingFileHandler")
+  def test_setup_logging(self, rfh_mock, setLevel_mock, addHandler_mock):
+    # Testing silent mode
+    main.setup_logging(False)
+    self.assertTrue(addHandler_mock.called)
+    setLevel_mock.assert_called_with(logging.INFO)
+
+    addHandler_mock.reset_mock()
+    setLevel_mock.reset_mock()
+
+    # Testing verbose mode
+    main.setup_logging(True)
+    self.assertTrue(addHandler_mock.called)
+    setLevel_mock.assert_called_with(logging.DEBUG)
+
+
+  @patch.object(main.logger, "setLevel")
+  @patch("logging.basicConfig")
+  def test_update_log_level(self, basicConfig_mock, setLevel_mock):
+    config = AmbariConfig().getConfig()
+
+    # Testing with default setup (config file does not contain loglevel entry)
+    # Log level should not be changed
+    config.set('agent', 'loglevel', None)
+    main.update_log_level(config)
+    self.assertFalse(setLevel_mock.called)
+
+    setLevel_mock.reset_mock()
+
+    # Testing debug mode
+    config.set('agent', 'loglevel', 'DEBUG')
+    main.update_log_level(config)
+    setLevel_mock.assert_called_with(logging.DEBUG)
+    setLevel_mock.reset_mock()
+
+    # Testing any other mode
+    config.set('agent', 'loglevel', 'INFO')
+    main.update_log_level(config)
+    setLevel_mock.assert_called_with(logging.INFO)
+
+    setLevel_mock.reset_mock()
+
+    config.set('agent', 'loglevel', 'WRONG')
+    main.update_log_level(config)
+    setLevel_mock.assert_called_with(logging.INFO)
+
+
+  @patch("signal.signal")
+  def test_bind_signal_handlers(self, signal_mock):
+    main.bind_signal_handlers()
+    # Check if on SIGINT/SIGTERM agent is configured to terminate
+    signal_mock.assert_any_call(signal.SIGINT, main.signal_handler)
+    signal_mock.assert_any_call(signal.SIGTERM, main.signal_handler)
+    # Check if on SIGUSR1 agent is configured to fall into debug
+    signal_mock.assert_any_call(signal.SIGUSR1, main.debug)
+
+
+  @patch("os.path.exists")
+  @patch("ConfigParser.RawConfigParser.read")
+  def test_resolve_ambari_config(self, read_mock, exists_mock):
+    # Trying case if conf file exists
+    exists_mock.return_value = True
+    main.resolve_ambari_config()
+    self.assertTrue(read_mock.called)
+
+    exists_mock.reset_mock()
+    read_mock.reset_mock()
+
+    # Trying case if conf file does not exist
+    exists_mock.return_value = False
+    main.resolve_ambari_config()
+    self.assertFalse(read_mock.called)
+
+
+  @patch("sys.exit")
+  @patch("os.path.isfile")
+  @patch("os.path.isdir")
+  @patch("hostname.hostname")
+  def test_perform_prestart_checks(self, hostname_mock, isdir_mock, isfile_mock, exit_mock):
+    main.config = AmbariConfig().getConfig()
+
+    # Check expected hostname test
+    hostname_mock.return_value = "test.hst"
+
+    main.perform_prestart_checks("another.hst")
+    self.assertTrue(exit_mock.called)
+
+    exit_mock.reset_mock()
+
+    # Trying case if there is another instance running
+    isfile_mock.return_value = True
+    isdir_mock.return_value = True
+    main.perform_prestart_checks(None)
+    self.assertTrue(exit_mock.called)
+
+    isfile_mock.reset_mock()
+    isdir_mock.reset_mock()
+    exit_mock.reset_mock()
+
+    # Trying case if agent prefix dir does not exist
+    isfile_mock.return_value = False
+    isdir_mock.return_value = False
+    main.perform_prestart_checks(None)
+    self.assertTrue(exit_mock.called)
+
+    isfile_mock.reset_mock()
+    isdir_mock.reset_mock()
+    exit_mock.reset_mock()
+
+    # Trying normal case
+    isfile_mock.return_value = False
+    isdir_mock.return_value = True
+    main.perform_prestart_checks(None)
+    self.assertFalse(exit_mock.called)
+
+
+  @patch("time.sleep")
+  @patch("os.kill")
+  @patch("os._exit")
+  @patch("os.path.exists")
+  def test_daemonize_and_stop(self, exists_mock, _exit_mock, kill_mock, sleep_mock):
+    oldpid = ProcessHelper.pidfile
+    pid = str(os.getpid())
+    _, tmpoutfile = tempfile.mkstemp()
+    ProcessHelper.pidfile = tmpoutfile
+
+    # Test daemonization
+    main.daemonize()
+    saved = open(ProcessHelper.pidfile, 'r').read()
+    self.assertEqual(pid, saved)
+
+    # Reuse pid file when testing agent stop
+    # Testing normal exit
+    exists_mock.return_value = False
+    main.stop_agent()
+    kill_mock.assert_called_with(int(pid), signal.SIGTERM)
+    _exit_mock.assert_called_with(0)
+
+    # Restore
+    kill_mock.reset_mock()
+    _exit_mock.reset_mock()
+
+    # Testing exit when failed to remove pid file
+    exists_mock.return_value = True
+    main.stop_agent()
+    kill_mock.assert_any_call(int(pid), signal.SIGTERM)
+    kill_mock.assert_any_call(int(pid), signal.SIGKILL)
+    _exit_mock.assert_called_with(1)
+
+    # Restore
+    ProcessHelper.pidfile = oldpid
+    os.remove(tmpoutfile)
+
+
+  @patch.object(main, "setup_logging")
+  @patch.object(main, "bind_signal_handlers")
+  @patch.object(main, "stop_agent")
+  @patch.object(main, "resolve_ambari_config")
+  @patch.object(main, "perform_prestart_checks")
+  @patch.object(main, "daemonize")
+  @patch.object(main, "update_log_level")
+  @patch.object(NetUtil.NetUtil, "try_to_connect")
+  @patch.object(Controller, "__init__")
+  @patch.object(Controller, "start")
+  @patch.object(Controller, "join")
+  @patch("optparse.OptionParser.parse_args")
+  @patch.object(DataCleaner,"start")
+  @patch.object(DataCleaner,"__init__")
+  @patch.object(PingPortListener,"start")
+  @patch.object(PingPortListener,"__init__")
+  def test_main(self, ping_port_init_mock, ping_port_start_mock, data_clean_init_mock,data_clean_start_mock,
+                parse_args_mock, join_mock, start_mock, Controller_init_mock, try_to_connect_mock,
+                update_log_level_mock, daemonize_mock, perform_prestart_checks_mock,
+                resolve_ambari_config_mock, stop_mock, bind_signal_handlers_mock, setup_logging_mock):
+    data_clean_init_mock.return_value = None
+    Controller_init_mock.return_value = None
+    ping_port_init_mock.return_value = None
+    options = MagicMock()
+    parse_args_mock.return_value = (options, MagicMock)
+
+    #testing call without command-line arguments
+    main.main()
+
+    self.assertTrue(setup_logging_mock.called)
+    self.assertTrue(bind_signal_handlers_mock.called)
+    self.assertTrue(stop_mock.called)
+    self.assertTrue(resolve_ambari_config_mock.called)
+    self.assertTrue(perform_prestart_checks_mock.called)
+    self.assertTrue(daemonize_mock.called)
+    self.assertTrue(update_log_level_mock.called)
+    try_to_connect_mock.assert_called_once_with(ANY, -1, ANY)
+    self.assertTrue(start_mock.called)
+    self.assertTrue(data_clean_init_mock.called)
+    self.assertTrue(data_clean_start_mock.called)
+    self.assertTrue(ping_port_init_mock.called)
+    self.assertTrue(ping_port_start_mock.called)
+
+    perform_prestart_checks_mock.reset_mock()
+
+    # Testing call with --expected-hostname parameter
+    options.expected_hostname = "test.hst"
+    main.main()
+    perform_prestart_checks_mock.assert_called_once_with(options.expected_hostname)

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestManifestGenerator.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestManifestGenerator.py b/ambari-agent/src/test/python/ambari_agent/TestManifestGenerator.py
new file mode 100644
index 0000000..f4ea6dc
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestManifestGenerator.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+import os, sys, StringIO
+
+from unittest import TestCase
+from ambari_agent import manifestGenerator
+import ambari_agent.AmbariConfig
+import tempfile
+import json
+import shutil
+from ambari_agent.AmbariConfig import AmbariConfig
+from mock.mock import patch, MagicMock, call
+
+
+class TestManifestGenerator(TestCase):
+
+  def setUp(self):
+    # disable stdout
+    out = StringIO.StringIO()
+    sys.stdout = out
+
+    self.dir = tempfile.mkdtemp()
+    self.config = AmbariConfig()
+    jsonCommand = file('../../main/python/ambari_agent/test.json').read()
+    self.parsedJson = json.loads(jsonCommand)
+
+
+  def tearDown(self):
+    shutil.rmtree(self.dir)
+
+    # enable stdout
+    sys.stdout = sys.__stdout__
+
+
+  def testWriteImports(self):
+    tmpFileName = tempfile.mkstemp(dir=self.dir, text=True)[1]
+    print tmpFileName
+    tmpFile = file(tmpFileName, 'r+')
+
+    manifestGenerator.writeImports(tmpFile, '../../main/puppet/modules', self.config.getImports())
+    tmpFile.seek(0)
+    print tmpFile.read()
+    tmpFile.close()
+
+
+    pass
+
+  @patch.object(manifestGenerator, 'writeImports')
+  @patch.object(manifestGenerator, 'writeNodes')
+  @patch.object(manifestGenerator, 'writeParams')
+  @patch.object(manifestGenerator, 'writeTasks')
+  def testGenerateManifest(self, writeTasksMock, writeParamsMock, writeNodesMock, writeImportsMock):
+    tmpFileName = tempfile.mkstemp(dir=self.dir, text=True)[1]
+    self.parsedJson['roleParams'] = 'role param'
+    manifestGenerator.generateManifest(self.parsedJson, tmpFileName, '../../main/puppet/modules', self.config.getConfig())
+
+    self.assertTrue(writeParamsMock.called)
+    self.assertTrue(writeNodesMock.called)
+    self.assertTrue(writeImportsMock.called)
+    self.assertTrue(writeTasksMock.called)
+
+    print file(tmpFileName).read()
+
+    def raiseTypeError():
+      raise TypeError()
+    writeNodesMock.side_effect = raiseTypeError
+    manifestGenerator.generateManifest(self.parsedJson, tmpFileName, '../../main/puppet/modules', self.config.getConfig())
+    pass
+
+  def testEscape(self):
+    shouldBe = '\\\'\\\\'
+    result = manifestGenerator.escape('\'\\')
+    self.assertEqual(result, shouldBe)
+
+
+  def test_writeNodes(self):
+    tmpFileName = tempfile.mkstemp(dir=self.dir, text=True)[1]
+    tmpFile = file(tmpFileName, 'r+')
+
+    clusterHostInfo = self.parsedJson['clusterHostInfo']
+    clusterHostInfo['zookeeper_hosts'] = ["h1.hortonworks.com", "h2.hortonworks.com"]
+    manifestGenerator.writeNodes(tmpFile, clusterHostInfo)
+    tmpFile.seek(0)
+    print tmpFile.read()
+    tmpFile.close()
+    os.remove(tmpFileName)
+
+  def test_writeNodes_failed(self):
+    tmpFileName = tempfile.mkstemp(dir=self.dir, text=True)[1]
+    tmpFile = file(tmpFileName, 'r+')
+
+    clusterHostInfo = self.parsedJson['clusterHostInfo']
+    clusterHostInfo.update({u'ZOOKEEPER':[None]})
+    clusterHostInfo['zookeeper_hosts'] = ["h1.hortonworks.com", "h2.hortonworks.com"]
+    self.assertRaises(TypeError, manifestGenerator.writeNodes, tmpFile, clusterHostInfo)
+    tmpFile.seek(0)
+    print tmpFile.read()
+    tmpFile.close()
+    os.remove(tmpFileName)
+
+  def test_writeHostAttributes(self):
+    tmpFileName = tempfile.mkstemp(dir=self.dir, text=True)[1]
+    tmpFile = file(tmpFileName, 'r+')
+
+    hostAttributes = {'HostAttr1' : '1', 'HostAttr2' : '2'}
+    manifestGenerator.writeHostAttributes(tmpFile, hostAttributes)
+    tmpFile.seek(0)
+    print tmpFile.read()
+    tmpFile.close()
+    os.remove(tmpFileName)
+
+
+  def test_writeTasks(self):
+    tmpFileName = tempfile.mkstemp(dir=self.dir, text=True)[1]
+    tmpFile = file(tmpFileName, 'r+')
+    roles = [{'role' : 'ZOOKEEPER_SERVER',
+              'cmd' : 'NONE',
+              'roleParams' : {'someRoleParams': '-x'}}]
+    clusterHostInfo = self.parsedJson['clusterHostInfo']
+    clusterHostInfo['zookeeper_hosts'] = ["h1.hortonworks.com", "h2.hortonworks.com"]
+    manifestGenerator.writeTasks(tmpFile, roles, self.config, clusterHostInfo, "h1.hortonworks.com")
+    tmpFile.seek(0)
+    print tmpFile.read()
+    tmpFile.close()
+    os.remove(tmpFileName)

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestNetUtil.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestNetUtil.py b/ambari-agent/src/test/python/ambari_agent/TestNetUtil.py
new file mode 100644
index 0000000..e1fe02d
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestNetUtil.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from ambari_agent import NetUtil
+from mock.mock import MagicMock, patch
+import unittest
+
+class TestNetUtil(unittest.TestCase):
+
+  @patch("urlparse.urlparse")
+  @patch("httplib.HTTPSConnection")
+  def test_checkURL(self, httpsConMock, parseMock):
+
+    NetUtil.logger = MagicMock()
+    parseMock.return_value = [1, 2]
+    ca_connection = MagicMock()
+    response = MagicMock()
+    response.status = 200
+    ca_connection.getresponse.return_value = response
+    httpsConMock.return_value = ca_connection
+
+    # test 200
+    netutil = NetUtil.NetUtil()
+    self.assertTrue(netutil.checkURL("url"))
+
+    # test fail
+    response.status = 404
+    self.assertFalse(netutil.checkURL("url"))
+
+    # test Exception
+    response.status = 200
+    httpsConMock.side_effect = Exception("test")
+    self.assertFalse(netutil.checkURL("url"))
+
+
+  @patch("time.sleep")
+  def test_try_to_connect(self, sleepMock):
+
+    netutil = NetUtil.NetUtil()
+    checkURL = MagicMock(name="checkURL")
+    checkURL.return_value = True
+    netutil.checkURL = checkURL
+    l = MagicMock()
+
+    # one successful get
+    self.assertEqual(0, netutil.try_to_connect("url", 10))
+
+    # got successful after N retries
+    gets = [True, False, False]
+    def side_effect(*args):
+      return gets.pop()
+    checkURL.side_effect = side_effect
+    self.assertEqual(2, netutil.try_to_connect("url", 10))
+
+    # max retries
+    checkURL.side_effect = None
+    checkURL.return_value = False
+    self.assertEqual(5, netutil.try_to_connect("url", 5))
+
+

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestPingPortListener.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestPingPortListener.py b/ambari-agent/src/test/python/ambari_agent/TestPingPortListener.py
new file mode 100644
index 0000000..a61f7e0
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestPingPortListener.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import unittest
+from mock.mock import patch, MagicMock, call, Mock
+from ambari_agent import PingPortListener
+import socket
+import sys
+
+class TestPingPortListener(unittest.TestCase):
+
+  def setUp(self):
+    self.config = MagicMock()
+    self.config.get.return_value = 55000
+    PingPortListener.logger = MagicMock()
+
+  @patch("socket.socket")
+  def test_init_success(self,socketMock):
+    PingPortListener.logger.reset_mock()
+    allive_daemon = PingPortListener.PingPortListener(self.config)
+    self.assertFalse(PingPortListener.logger.warn.called)
+    self.assertTrue(socketMock.call_args_list[0][0][0] == socket.AF_INET)
+    self.assertTrue(socketMock.call_args_list[0][0][1] == socket.SOCK_STREAM)
+    self.assertTrue(allive_daemon.socket.bind.call_args_list[0][0][0] == ('0.0.0.0',55000))
+    self.assertTrue(allive_daemon.socket.listen.call_args_list[0][0][0] == 1)
+    self.assertTrue(allive_daemon.config.set.call_args_list[0][0][0] == 'agent')
+    self.assertTrue(allive_daemon.config.set.call_args_list[0][0][1] == 'current_ping_port')
+
+
+
+  @patch.object(socket.socket,"bind")
+  @patch.object(socket.socket,"listen")
+  @patch.object(socket.socket,"__init__")
+  @patch.object(sys, "exit")
+  def test_init_warn(self, sys_exit_mock, socketInitMock,socketListenMock,socketBindMock):
+    PingPortListener.logger.reset_mock()
+    allive_daemon = PingPortListener.PingPortListener(self.config)
+    self.assertTrue(socketInitMock.called)
+    self.assertTrue(sys_exit_mock.called)
+
+if __name__ == "__main__":
+  suite = unittest.TestLoader().loadTestsFromTestCase(PingPortListener)
+  unittest.TextTestRunner(verbosity=2).run(suite)

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestProcessHelper.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestProcessHelper.py b/ambari-agent/src/test/python/ambari_agent/TestProcessHelper.py
new file mode 100644
index 0000000..c7a4261
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestProcessHelper.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python2.6
+# -*- coding: utf-8 -*-
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import os
+import tempfile
+import unittest
+from mock.mock import patch, MagicMock
+from ambari_agent import ProcessHelper
+
+
+class TestProcessHelper(unittest.TestCase):
+
+  @patch.object(ProcessHelper, "getTempFiles")
+  def test_clean(self, getTempFilesMock):
+
+    tf1 = tempfile.NamedTemporaryFile(delete=False)
+    tf2 = tempfile.NamedTemporaryFile(delete=False)
+    tf3 = tempfile.NamedTemporaryFile(delete=False)
+
+    getTempFilesMock.return_value = [tf2.name, tf3.name]
+    ProcessHelper.pidfile = tf1.name
+    ProcessHelper.logger = MagicMock()
+
+    ProcessHelper._clean()
+
+    self.assertFalse(os.path.exists(tf1.name))
+    self.assertFalse(os.path.exists(tf2.name))
+    self.assertFalse(os.path.exists(tf3.name))
+
+
+  @patch("os._exit")
+  @patch.object(ProcessHelper, "_clean")
+  def test_stopAgent(self, _clean_mock, exitMock):
+
+    ProcessHelper.stopAgent()
+    self.assertTrue(_clean_mock.called)
+    self.assertTrue(exitMock.called)
+
+
+  @patch("os.execvp")
+  @patch.object(ProcessHelper, "_clean")
+  def test_restartAgent(self, _clean_mock, execMock):
+
+    ProcessHelper.logger = MagicMock()
+    ProcessHelper.restartAgent()
+
+    self.assertTrue(_clean_mock.called)
+    self.assertTrue(execMock.called)
+    self.assertEqual(2, len(execMock.call_args_list[0]))
+

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestPuppetExecutor.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestPuppetExecutor.py b/ambari-agent/src/test/python/ambari_agent/TestPuppetExecutor.py
new file mode 100644
index 0000000..b2d5bce
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestPuppetExecutor.py
@@ -0,0 +1,314 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from unittest import TestCase
+from PuppetExecutor import PuppetExecutor
+from RepoInstaller import RepoInstaller
+from Grep import Grep
+from pprint import pformat
+import socket, threading, tempfile
+import os, time
+import sys
+import json
+from AmbariConfig import AmbariConfig
+from mock.mock import patch, MagicMock, call
+from threading import Thread
+from shell import shellRunner
+import manifestGenerator
+
+class TestPuppetExecutor(TestCase):
+
+
+  def test_build(self):
+    puppetexecutor = PuppetExecutor("/tmp", "/x", "/y", "/z", AmbariConfig().getConfig())
+    command = puppetexecutor.puppetCommand("site.pp")
+    self.assertEquals("puppet", command[0], "puppet binary wrong")
+    self.assertEquals("apply", command[1], "local apply called")
+    self.assertEquals("--confdir=/tmp", command[2],"conf dir tmp")
+    self.assertEquals("--detailed-exitcodes", command[3], "make sure output \
+    correct")
+    
+  @patch.object(shellRunner,'run')
+  def test_isJavaAvailable(self, cmdrun_mock):
+    puppetInstance = PuppetExecutor("/tmp", "/x", "/y", '/tmpdir',
+                                    AmbariConfig().getConfig())
+    command = {'configurations':{'global':{'java64_home':'/usr/jdk/jdk123'}}}
+    
+    cmdrun_mock.return_value = {'exitCode': 1, 'output': 'Command not found', 'error': ''}
+    self.assertEquals(puppetInstance.isJavaAvailable(command), False)
+    
+    cmdrun_mock.return_value = {'exitCode': 0, 'output': 'OK', 'error': ''}
+    self.assertEquals(puppetInstance.isJavaAvailable(command), True)
+
+  @patch.object(manifestGenerator, 'generateManifest')
+  @patch.object(PuppetExecutor, 'isJavaAvailable')
+  @patch.object(PuppetExecutor, 'runPuppetFile')
+  def test_run_command(self, runPuppetFileMock, isJavaAvailableMock, generateManifestMock):
+    tmpdir = tempfile.gettempdir()
+    puppetInstance = PuppetExecutor("/tmp", "/x", "/y", tmpdir, AmbariConfig().getConfig())
+    jsonFile = open('../../main/python/ambari_agent/test.json', 'r')
+    jsonStr = jsonFile.read()
+    parsedJson = json.loads(jsonStr)
+    parsedJson["taskId"] = 1
+    def side_effect1(puppetFile, result, puppetEnv, tmpoutfile, tmperrfile):
+        result["exitcode"] = 0
+    runPuppetFileMock.side_effect = side_effect1
+    generateManifestMock.return_value = ''
+    puppetInstance.reposInstalled = False
+    isJavaAvailableMock.return_value = True
+    res = puppetInstance.runCommand(parsedJson, tmpdir + '/out.txt', tmpdir + '/err.txt')
+    self.assertEquals(res["exitcode"], 0)
+    self.assertTrue(puppetInstance.reposInstalled)
+
+    def side_effect2(puppetFile, result, puppetEnv, tmpoutfile, tmperrfile):
+        result["exitcode"] = 999
+    runPuppetFileMock.side_effect = side_effect2
+    puppetInstance.reposInstalled = False
+    isJavaAvailableMock.return_value = True
+    res = puppetInstance.runCommand(parsedJson, tmpdir + '/out.txt', tmpdir + '/err.txt')
+    self.assertEquals(res["exitcode"], 999)
+    self.assertFalse(puppetInstance.reposInstalled)
+
+    generateManifestMock.return_value = 'error during manifest generation'
+    res = puppetInstance.runCommand(parsedJson, tmpdir + '/out.txt', tmpdir + '/err.txt')
+    self.assertTrue(generateManifestMock.called)
+    self.assertEquals(res["exitcode"], 1)
+    generateManifestMock.return_value = ''
+
+    def side_effect2(puppetFile, result, puppetEnv, tmpoutfile, tmperrfile):
+        result["exitcode"] = 0
+    runPuppetFileMock.side_effect = side_effect2
+    puppetInstance.reposInstalled = False
+    isJavaAvailableMock.return_value = False
+    parsedJson['roleCommand'] = "START"
+    parsedJson['configurations'] = {'global':{'java64_home':'/usr/jdk/jdk123'}}
+    res = puppetInstance.runCommand(parsedJson, tmpdir + '/out.txt', tmpdir + '/err.txt')
+    
+    JAVANOTVALID_MSG = "Cannot access JDK! Make sure you have permission to execute {0}/bin/java"
+    errMsg = JAVANOTVALID_MSG.format('/usr/jdk/jdk123')
+    self.assertEquals(res["exitcode"], 1)
+    self.assertEquals(res["stderr"], errMsg)
+    self.assertFalse(puppetInstance.reposInstalled)
+
+    parsedJson['configurations'] = {'random':{'name1':'value2'}}
+    res = puppetInstance.runCommand(parsedJson, tmpdir + '/out.txt', tmpdir + '/err.txt')
+    self.assertEquals(res["exitcode"], 1)
+    self.assertEquals(res["stderr"], "Cannot access JDK! Make sure java64_home is specified in global config")
+
+
+  @patch.object(PuppetExecutor, 'isJavaAvailable')
+  @patch.object(RepoInstaller, 'generate_repo_manifests')
+  @patch.object(PuppetExecutor, 'runPuppetFile')
+  def test_overwrite_repos(self, runPuppetFileMock, generateRepoManifestMock, isJavaAvailableMock):
+    tmpdir = tempfile.gettempdir()
+    puppetInstance = PuppetExecutor("/tmp", "/x", "/y", tmpdir, AmbariConfig().getConfig())
+    jsonFile = open('../../main/python/ambari_agent/test.json', 'r')
+    jsonStr = jsonFile.read()
+    parsedJson = json.loads(jsonStr)
+    parsedJson["taskId"] = 77
+    parsedJson['roleCommand'] = "START"
+    def side_effect(puppetFile, result, puppetEnv, tmpoutfile, tmperrfile):
+      result["exitcode"] = 0
+    runPuppetFileMock.side_effect = side_effect
+    
+    isJavaAvailableMock.return_value = True
+
+    #If ambari-agent has been just started and no any commands were executed by
+    # PuppetExecutor.runCommand, then no repo files were updated by
+    # RepoInstaller.generate_repo_manifests
+    self.assertEquals(0, generateRepoManifestMock.call_count)
+    self.assertFalse(puppetInstance.reposInstalled)
+
+    # After executing of the first command, RepoInstaller.generate_repo_manifests
+    # generates a .pp file for updating repo files
+    puppetInstance.runCommand(parsedJson, tmpdir + '/out.txt', tmpdir + '/err.txt')
+    self.assertTrue(puppetInstance.reposInstalled)
+    self.assertEquals(1, generateRepoManifestMock.call_count)
+    isJavaAvailableMock.assert_called_with("java64_home")
+
+    # After executing of the next commands, repo manifest aren't generated again
+    puppetInstance.runCommand(parsedJson, tmpdir + '/out.txt', tmpdir + '/err.txt')
+    self.assertTrue(puppetInstance.reposInstalled)
+    self.assertEquals(1, generateRepoManifestMock.call_count)
+    puppetInstance.runCommand(parsedJson, tmpdir + '/out.txt', tmpdir + '/err.txt')
+    self.assertTrue(puppetInstance.reposInstalled)
+    self.assertEquals(1, generateRepoManifestMock.call_count)
+
+  @patch("os.path.exists")
+  def test_configure_environ(self, osPathExistsMock):
+    config = AmbariConfig().getConfig()
+    tmpdir = tempfile.gettempdir()
+    puppetInstance = PuppetExecutor("/tmp", "/x", "/y", tmpdir, config)
+    environ = puppetInstance.configureEnviron({})
+    self.assertEquals(environ, {})
+
+    config.set('puppet','ruby_home',"test/ruby_home")
+    puppetInstance = PuppetExecutor("/tmp", "/x", "/y", tmpdir, config)
+    osPathExistsMock.return_value = True
+    environ = puppetInstance.configureEnviron({"PATH" : "test_path"})
+    self.assertEquals(environ["PATH"], "test/ruby_home/bin:test_path")
+    self.assertEquals(environ["MY_RUBY_HOME"], "test/ruby_home")
+
+  def test_condense_bad2(self):
+    puppetexecutor = PuppetExecutor("/tmp", "/x", "/y", "/z", AmbariConfig().getConfig())
+    grep = Grep()
+    puppetexecutor.grep = grep
+    grep.ERROR_LAST_LINES_BEFORE = 2
+    grep.ERROR_LAST_LINES_AFTER = 3
+    string_err = open('ambari_agent' + os.sep + 'dummy_puppet_output_error2.txt', 'r').read().replace("\n", os.linesep)
+    result = puppetexecutor.condenseOutput(string_err, '', 1)
+    stripped_string = string_err.strip()
+    lines = stripped_string.splitlines(True)
+    d = lines[1:6]
+    d = grep.cleanByTemplate("".join(d).strip(), "warning").splitlines(True)
+    result_check = True
+    for l in d:
+      result_check &= grep.filterMarkup(l) in result
+    self.assertEquals(result_check, True, "Failed to condence fail log")
+    self.assertEquals(('warning' in result.lower()), False, "Failed to condence fail log")
+    self.assertEquals(len(result.splitlines(True)), 5, "Failed to condence fail log")
+
+  def test_condense_bad3(self):
+    puppetexecutor = PuppetExecutor("/tmp", "/x", "/y", "/z", AmbariConfig().getConfig())
+    grep = Grep()
+    puppetexecutor.grep = grep
+    string_err = open('ambari_agent' + os.sep + 'dummy_puppet_output_error3.txt', 'r').read().replace("\n", os.linesep)
+    result = puppetexecutor.condenseOutput(string_err, '', 1)
+    stripped_string = string_err.strip()
+    lines = stripped_string.splitlines(True)
+    #sys.stderr.write(result)
+    d = lines[0:31]
+    d = grep.cleanByTemplate("".join(d).strip(), "warning").splitlines(True)
+    result_check = True
+    for l in d:
+      result_check &= grep.filterMarkup(l) in result
+    self.assertEquals(result_check, True, "Failed to condence fail log")
+    self.assertEquals(('warning' in result.lower()), False, "Failed to condence fail log")
+    self.assertEquals(len(result.splitlines(True)), 19, "Failed to condence fail log")
+
+  def test_condense_good(self):
+    puppetexecutor = PuppetExecutor("/tmp", "/x", "/y", "/z", AmbariConfig().getConfig())
+    grep = Grep()
+    puppetexecutor.grep = grep
+    grep.OUTPUT_LAST_LINES = 2
+    string_good = open('ambari_agent' + os.sep + 'dummy_puppet_output_good.txt', 'r').read().replace("\n", os.linesep)
+    result = puppetexecutor.condenseOutput(string_good, PuppetExecutor.NO_ERROR, 0)
+    stripped_string = string_good.strip()
+    lines = stripped_string.splitlines(True)
+    result_check = lines[45].strip() in result and lines[46].strip() in result
+    self.assertEquals(result_check, True, "Failed to condence output log")
+    self.assertEquals(len(result.splitlines(True)), 2, "Failed to condence output log")
+
+  @patch("shell.kill_process_with_children")
+  def test_watchdog_1(self, kill_process_with_children_mock):
+    """
+    Tests whether watchdog works
+    """
+    subproc_mock = self.Subprocess_mockup()
+    config = AmbariConfig().getConfig()
+    config.set('puppet','timeout_seconds',"0.1")
+    executor_mock = self.PuppetExecutor_mock("/home/centos/ambari_repo_info/ambari-agent/src/main/puppet/",
+      "/usr/",
+      "/root/workspace/puppet-install/facter-1.6.10/",
+      "/tmp", config, subproc_mock)
+    _, tmpoutfile = tempfile.mkstemp()
+    _, tmperrfile = tempfile.mkstemp()
+    result = {  }
+    puppetEnv = { "RUBYLIB" : ""}
+    kill_process_with_children_mock.side_effect = lambda pid : subproc_mock.terminate()
+    subproc_mock.returncode = None
+    thread = Thread(target =  executor_mock.runPuppetFile, args = ("fake_puppetFile", result, puppetEnv, tmpoutfile, tmperrfile))
+    thread.start()
+    time.sleep(0.1)
+    subproc_mock.finished_event.wait()
+    self.assertEquals(subproc_mock.was_terminated, True, "Subprocess should be terminated due to timeout")
+
+
+  def test_watchdog_2(self):
+    """
+    Tries to catch false positive watchdog invocations
+    """
+    subproc_mock = self.Subprocess_mockup()
+    config = AmbariConfig().getConfig()
+    config.set('puppet','timeout_seconds',"5")
+    executor_mock = self.PuppetExecutor_mock("/home/centos/ambari_repo_info/ambari-agent/src/main/puppet/",
+    "/usr/",
+    "/root/workspace/puppet-install/facter-1.6.10/",
+    "/tmp", config, subproc_mock)
+    _, tmpoutfile = tempfile.mkstemp()
+    _, tmperrfile = tempfile.mkstemp()
+    result = {  }
+    puppetEnv = { "RUBYLIB" : ""}
+    subproc_mock.returncode = 0
+    thread = Thread(target =  executor_mock.runPuppetFile, args = ("fake_puppetFile", result, puppetEnv, tmpoutfile, tmperrfile))
+    thread.start()
+    time.sleep(0.1)
+    subproc_mock.should_finish_event.set()
+    subproc_mock.finished_event.wait()
+    self.assertEquals(subproc_mock.was_terminated, False, "Subprocess should not be terminated before timeout")
+    self.assertEquals(subproc_mock.returncode, 0, "Subprocess should not be terminated before timeout")
+
+
+  class  PuppetExecutor_mock(PuppetExecutor):
+
+
+
+    def __init__(self, puppetModule, puppetInstall, facterInstall, tmpDir, config, subprocess_mockup):
+      self.subprocess_mockup = subprocess_mockup
+      PuppetExecutor.__init__(self, puppetModule, puppetInstall, facterInstall, tmpDir, config)
+      pass
+
+    def lauch_puppet_subprocess(self, puppetcommand, tmpout, tmperr, puppetEnv):
+      self.subprocess_mockup.tmpout = tmpout
+      self.subprocess_mockup.tmperr = tmperr
+      return self.subprocess_mockup
+
+    def runShellKillPgrp(self, puppet):
+      puppet.terminate()  # note: In real code, subprocess.terminate() is not called
+      pass
+
+  class Subprocess_mockup():
+
+    returncode = 0
+
+    started_event = threading.Event()
+    should_finish_event = threading.Event()
+    finished_event = threading.Event()
+    was_terminated = False
+    tmpout = None
+    tmperr = None
+    pid=-1
+
+    def communicate(self):
+      self.started_event.set()
+      self.tmpout.write("Dummy output")
+      self.tmpout.flush()
+
+      self.tmperr.write("Dummy err")
+      self.tmperr.flush()
+      self.should_finish_event.wait()
+      self.finished_event.set()
+      pass
+
+    def terminate(self):
+      self.was_terminated = True
+      self.returncode = 17
+      self.should_finish_event.set()
+

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestPuppetExecutorManually.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestPuppetExecutorManually.py b/ambari-agent/src/test/python/ambari_agent/TestPuppetExecutorManually.py
new file mode 100644
index 0000000..90151b6
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestPuppetExecutorManually.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from unittest import TestCase
+from ambari_agent.PuppetExecutor import PuppetExecutor
+from pprint import pformat
+import socket
+import os
+import sys
+import logging
+from AmbariConfig import AmbariConfig
+import tempfile
+
+FILEPATH="runme.pp"
+logger = logging.getLogger()
+
+class TestPuppetExecutor(TestCase):
+
+  def test_run(self):
+    """
+    Used to run arbitrary puppet manifest. Test tries to find puppet manifest 'runme.pp' and runs it.
+    Test does not make any assertions
+    """
+    if not os.path.isfile(FILEPATH):
+      return
+
+    logger.info("***** RUNNING " + FILEPATH + " *****")
+    cwd = os.getcwd()
+    puppetexecutor = PuppetExecutor(cwd, "/x", "/y", "/tmp", AmbariConfig().getConfig())
+    result = {}
+    puppetEnv = os.environ
+    _, tmpoutfile = tempfile.mkstemp()
+    _, tmperrfile = tempfile.mkstemp()
+    result = puppetexecutor.runPuppetFile(FILEPATH, result, puppetEnv, tmpoutfile, tmperrfile)
+    logger.info("*** Puppet output: " + str(result['stdout']))
+    logger.info("*** Puppet errors: " + str(result['stderr']))
+    logger.info("*** Puppet retcode: " + str(result['exitcode']))
+    logger.info("****** DONE *****")
+
+

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestPythonExecutor.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestPythonExecutor.py b/ambari-agent/src/test/python/ambari_agent/TestPythonExecutor.py
new file mode 100644
index 0000000..c27c0f5
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestPythonExecutor.py
@@ -0,0 +1,159 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import pprint
+
+from unittest import TestCase
+import threading
+import tempfile
+import time
+from threading import Thread
+
+from PythonExecutor import PythonExecutor
+from AmbariConfig import AmbariConfig
+from mock.mock import MagicMock, patch
+
+
+class TestPythonExecutor(TestCase):
+
+  @patch("shell.kill_process_with_children")
+  def test_watchdog_1(self, kill_process_with_children_mock):
+    """
+    Tests whether watchdog works
+    """
+    subproc_mock = self.Subprocess_mockup()
+    executor = PythonExecutor("/tmp", AmbariConfig().getConfig())
+    _, tmpoutfile = tempfile.mkstemp()
+    _, tmperrfile = tempfile.mkstemp()
+    PYTHON_TIMEOUT_SECONDS = 0.1
+    kill_process_with_children_mock.side_effect = lambda pid : subproc_mock.terminate()
+
+    def launch_python_subprocess_method(command, tmpout, tmperr):
+      subproc_mock.tmpout = tmpout
+      subproc_mock.tmperr = tmperr
+      return subproc_mock
+    executor.launch_python_subprocess = launch_python_subprocess_method
+    runShellKillPgrp_method = MagicMock()
+    runShellKillPgrp_method.side_effect = lambda python : python.terminate()
+    executor.runShellKillPgrp = runShellKillPgrp_method
+    subproc_mock.returncode = None
+    thread = Thread(target =  executor.run_file, args = ("fake_puppetFile", ["arg1", "arg2"],
+                                                    tmpoutfile, tmperrfile, PYTHON_TIMEOUT_SECONDS))
+    thread.start()
+    time.sleep(0.1)
+    subproc_mock.finished_event.wait()
+    self.assertEquals(subproc_mock.was_terminated, True, "Subprocess should be terminated due to timeout")
+
+
+  def test_watchdog_2(self):
+    """
+    Tries to catch false positive watchdog invocations
+    """
+    subproc_mock = self.Subprocess_mockup()
+    executor = PythonExecutor("/tmp", AmbariConfig().getConfig())
+    _, tmpoutfile = tempfile.mkstemp()
+    _, tmperrfile = tempfile.mkstemp()
+    PYTHON_TIMEOUT_SECONDS =  5
+
+    def launch_python_subprocess_method(command, tmpout, tmperr):
+      subproc_mock.tmpout = tmpout
+      subproc_mock.tmperr = tmperr
+      return subproc_mock
+    executor.launch_python_subprocess = launch_python_subprocess_method
+    runShellKillPgrp_method = MagicMock()
+    runShellKillPgrp_method.side_effect = lambda python : python.terminate()
+    executor.runShellKillPgrp = runShellKillPgrp_method
+    subproc_mock.returncode = 0
+    thread = Thread(target =  executor.run_file, args = ("fake_puppetFile", ["arg1", "arg2"],
+                                                      tmpoutfile, tmperrfile, PYTHON_TIMEOUT_SECONDS))
+    thread.start()
+    time.sleep(0.1)
+    subproc_mock.should_finish_event.set()
+    subproc_mock.finished_event.wait()
+    self.assertEquals(subproc_mock.was_terminated, False, "Subprocess should not be terminated before timeout")
+    self.assertEquals(subproc_mock.returncode, 0, "Subprocess should not be terminated before timeout")
+
+
+  def test_execution_results(self):
+    subproc_mock = self.Subprocess_mockup()
+    executor = PythonExecutor("/tmp", AmbariConfig().getConfig())
+    _, tmpoutfile = tempfile.mkstemp()
+    _, tmperrfile = tempfile.mkstemp()
+    PYTHON_TIMEOUT_SECONDS =  5
+
+    def launch_python_subprocess_method(command, tmpout, tmperr):
+      subproc_mock.tmpout = tmpout
+      subproc_mock.tmperr = tmperr
+      return subproc_mock
+    executor.launch_python_subprocess = launch_python_subprocess_method
+    runShellKillPgrp_method = MagicMock()
+    runShellKillPgrp_method.side_effect = lambda python : python.terminate()
+    executor.runShellKillPgrp = runShellKillPgrp_method
+    subproc_mock.returncode = 0
+    subproc_mock.should_finish_event.set()
+    result = executor.run_file("file", ["arg1", "arg2"], tmpoutfile, tmperrfile, PYTHON_TIMEOUT_SECONDS)
+    self.assertEquals(result, {'exitcode': 0, 'stderr': 'Dummy err', 'stdout': 'Dummy output'})
+
+
+  def test_is_successfull(self):
+    executor = PythonExecutor("/tmp", AmbariConfig().getConfig())
+
+    executor.python_process_has_been_killed = False
+    self.assertTrue(executor.isSuccessfull(0))
+    self.assertFalse(executor.isSuccessfull(1))
+
+    executor.python_process_has_been_killed = True
+    self.assertFalse(executor.isSuccessfull(0))
+    self.assertFalse(executor.isSuccessfull(1))
+
+
+
+  class Subprocess_mockup():
+    """
+    It's not trivial to use PyMock instead of class here because we need state
+    and complex logics
+    """
+
+    returncode = 0
+
+    started_event = threading.Event()
+    should_finish_event = threading.Event()
+    finished_event = threading.Event()
+    was_terminated = False
+    tmpout = None
+    tmperr = None
+    pid=-1
+
+    def communicate(self):
+      self.started_event.set()
+      self.tmpout.write("Dummy output")
+      self.tmpout.flush()
+
+      self.tmperr.write("Dummy err")
+      self.tmperr.flush()
+      self.should_finish_event.wait()
+      self.finished_event.set()
+      pass
+
+    def terminate(self):
+      self.was_terminated = True
+      self.returncode = 17
+      self.should_finish_event.set()
+

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestRegistration.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestRegistration.py b/ambari-agent/src/test/python/ambari_agent/TestRegistration.py
new file mode 100644
index 0000000..146203f
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestRegistration.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from unittest import TestCase
+import os
+import tempfile
+from mock.mock import patch
+from mock.mock import MagicMock
+from ambari_agent.Register import Register
+from ambari_agent.AmbariConfig import AmbariConfig
+from ambari_agent.HostInfo import HostInfo
+
+class TestRegistration(TestCase):
+
+  @patch.object(HostInfo, 'get_os_type')
+  def test_registration_build(self, get_os_type_method):
+    config = AmbariConfig().getConfig()
+    tmpdir = tempfile.gettempdir()
+    config.set('agent', 'prefix', tmpdir)
+    config.set('agent', 'current_ping_port', '33777')
+    get_os_type_method.return_value = 'redhat'
+    ver_file = os.path.join(tmpdir, "version")
+    with open(ver_file, "w") as text_file:
+      text_file.write("1.3.0")
+
+    register = Register(config)
+    data = register.build(1)
+    #print ("Register: " + pprint.pformat(data))
+    self.assertEquals(len(data['hardwareProfile']) > 0, True, "hardwareProfile should contain content")
+    self.assertEquals(data['hostname'] != "", True, "hostname should not be empty")
+    self.assertEquals(data['publicHostname'] != "", True, "publicHostname should not be empty")
+    self.assertEquals(data['responseId'], 1)
+    self.assertEquals(data['timestamp'] > 1353678475465L, True, "timestamp should not be empty")
+    self.assertEquals(len(data['agentEnv']) > 0, True, "agentEnv should not be empty")
+    self.assertEquals(data['agentVersion'], '1.3.0', "agentVersion should not be empty")
+    print data['agentEnv']['umask']
+    self.assertEquals(not data['agentEnv']['umask']== "", True, "agents umask should not be empty")
+    self.assertEquals(data['currentPingPort'] == 33777, True, "current ping port should be 33777")
+    self.assertEquals(len(data), 8)
+
+    os.remove(ver_file)

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestRepoInstaller.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestRepoInstaller.py b/ambari-agent/src/test/python/ambari_agent/TestRepoInstaller.py
new file mode 100644
index 0000000..2628cf7
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestRepoInstaller.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from unittest import TestCase
+from ambari_agent.RepoInstaller import RepoInstaller
+import tempfile
+import json, os
+import shutil
+from ambari_agent.AmbariConfig import AmbariConfig
+from mock.mock import patch, MagicMock, call
+
+class TestRepoInstaller(TestCase):
+
+  def setUp(self):
+    self.dir = tempfile.mkdtemp()
+    jsonCommand = file('../../main/python/ambari_agent/test.json').read()
+    self.parsedJson= json.loads(jsonCommand)
+    self.config = AmbariConfig().getConfig()
+    self.repoInstaller = RepoInstaller(self.parsedJson, self.dir, '../../main/puppet/modules', 1, self.config)
+
+    pass
+
+  def tearDown(self):
+    shutil.rmtree(self.dir)
+    pass
+
+
+  def test_prepare_repos_info(self):
+    localParsedJson = json.loads('{"hostLevelParams" : {"repo_info" : {"test" : "test"}}}')
+    localRepoInstaller = RepoInstaller(localParsedJson, self.dir, '../../main/puppet/modules', 1, self.config)
+    localRepoInstaller.prepareReposInfo()
+    self.assertEquals(localRepoInstaller.repoInfoList['test'], "test")
+
+    localParsedJson = json.loads('{"hostLevelParams" : {"repo_info" : "1"}}')
+    localRepoInstaller = RepoInstaller(localParsedJson, self.dir, '../../main/puppet/modules', 1, self.config)
+    localRepoInstaller.prepareReposInfo()
+    self.assertEquals(localRepoInstaller.repoInfoList, 1)
+
+    localParsedJson = json.loads('{"hostLevelParams" : {"repo_info" : ""}}')
+    localRepoInstaller = RepoInstaller(localParsedJson, self.dir, '../../main/puppet/modules', 1, self.config)
+    localRepoInstaller.prepareReposInfo()
+    self.assertEquals(localRepoInstaller.repoInfoList, [])
+
+
+  def test_generate_files(self):
+    localParsedJson = json.loads('{"hostLevelParams": { "repo_info" : [{"baseUrl":"http://public-repo-1.hortonworks.com/HDP-1.1.1.16/repos/centos5"\
+           ,"osType":"centos5","repoId":"HDP-1.1.1.16_TEST","repoName":"HDP_TEST", "mirrorsList":"http://mirrors.fedoraproject.org/mirrorlist"}]}}')
+    localRepoInstaller = RepoInstaller(localParsedJson, self.dir, '../../main/puppet/modules', 1, self.config)
+    localRepoInstaller.prepareReposInfo()
+    result = localRepoInstaller.generateFiles()
+    self.assertTrue(result[0].endswith("HDP-1.1.1.16_TEST-1.pp"))
+
+  @patch.object(RepoInstaller, 'prepareReposInfo')
+  @patch.object(RepoInstaller, 'generateFiles')
+  def testInstallRepos(self, generateFilesMock, prepareReposInfoMock):
+    result = self.repoInstaller.generate_repo_manifests()
+    self.assertTrue(prepareReposInfoMock.called)
+    self.assertTrue(generateFilesMock.called)
+    print('generate_repo_manifests result: ' + result.__str__())
+    pass

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestScript.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestScript.py b/ambari-agent/src/test/python/ambari_agent/TestScript.py
new file mode 100644
index 0000000..e4fc24c
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestScript.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+import ConfigParser
+import os
+
+import pprint
+
+from unittest import TestCase
+import threading
+import tempfile
+import time
+from threading import Thread
+
+
+import StringIO
+import sys, logging, pprint
+from ambari_agent import AgentException
+from resource_management.libraries.script import Script
+from resource_management.core.environment import Environment
+from mock.mock import MagicMock, patch
+
+class TestScript(TestCase):
+
+  def setUp(self):
+    # disable stdout
+    out = StringIO.StringIO()
+    sys.stdout = out
+
+
+
+  @patch("resource_management.core.providers.package.PackageProvider")
+  def test_install_packages(self, package_provider_mock):
+    no_such_entry_config = {
+    }
+    empty_config = {
+      'hostLevelParams' : {
+        'package_list' : ''
+      }
+    }
+    dummy_config = {
+      'hostLevelParams' : {
+        'package_list' : "[{\"type\":\"rpm\",\"name\":\"hbase\"},"
+                         "{\"type\":\"rpm\",\"name\":\"yet-another-package\"}]"
+      }
+    }
+
+    # Testing config without any keys
+    with Environment(".") as env:
+      script = Script()
+      Script.config = no_such_entry_config
+      script.install_packages(env)
+    self.assertEquals(len(env.resource_list), 0)
+
+    # Testing empty package list
+    with Environment(".") as env:
+      script = Script()
+      Script.config = empty_config
+      script.install_packages(env)
+    self.assertEquals(len(env.resource_list), 0)
+
+    # Testing installing of a list of packages
+    with Environment(".") as env:
+      Script.config = dummy_config
+      script.install_packages("env")
+    resource_dump = pprint.pformat(env.resource_list)
+    self.assertEqual(resource_dump, "[Package['hbase'], Package['yet-another-package']]")
+
+
+  def tearDown(self):
+    # enable stdout
+    sys.stdout = sys.__stdout__
+
+


Mime
View raw message