incubator-ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dmitriu...@apache.org
Subject [4/7] AMBARI-3810. Unittests for File resource an all it's attributes (Eugene Chekanskiy via dlysnichenko)
Date Tue, 19 Nov 2013 18:17:18 GMT
http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestCheckWebUI.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestCheckWebUI.py b/ambari-agent/src/test/python/ambari_agent/TestCheckWebUI.py
new file mode 100644
index 0000000..c7d4ac4
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestCheckWebUI.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import unittest
+import StringIO
+import sys
+
+from mock.mock import MagicMock, patch
+
+import checkWebUI
+
+class TestMain(unittest.TestCase):
+
+  def setUp(self):
+    # disable stdout
+    out = StringIO.StringIO()
+    sys.stdout = out
+
+
+  def tearDown(self):
+    # enable stdout
+    sys.stdout = sys.__stdout__
+
+  @patch("optparse.OptionParser.parse_args")
+  @patch('httplib.HTTPConnection')
+  def test_check_web_ui(self, http_mock, parse_args_mock):
+      
+    #Positive scenario
+    options = MagicMock()
+    options.hosts = 'host1,host2'
+    options.port = '10000' 
+    parse_args_mock.return_value = (options, MagicMock)
+    http_conn = http_mock.return_value
+    http_conn.getresponse.return_value = MagicMock(status=200)
+
+    checkWebUI.main()
+
+    self.assertTrue(http_conn.request.called)
+    self.assertTrue(http_conn.getresponse.called)
+    self.assertTrue(http_conn.close.called)
+    
+    #Negative scenario
+    options = MagicMock()
+    options.hosts = 'host1,host2'
+    options.port = '10000'
+    parse_args_mock.return_value = (options, MagicMock)
+    http_conn.getresponse.return_value = MagicMock(status=404)
+
+    try:
+      checkWebUI.main()
+    except SystemExit, e:
+      self.assertEqual(e.code, 1)
+
+    self.assertTrue(http_conn.request.called)
+    self.assertTrue(http_conn.getresponse.called)
+    self.assertTrue(http_conn.close.called)
+
+if __name__ == "__main__":
+  unittest.main()

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestCommandStatusDict.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestCommandStatusDict.py b/ambari-agent/src/test/python/ambari_agent/TestCommandStatusDict.py
new file mode 100644
index 0000000..07182ad
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestCommandStatusDict.py
@@ -0,0 +1,125 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+import tempfile
+from unittest import TestCase
+from ambari_agent.CommandStatusDict import CommandStatusDict
+import os
+import logging
+import json, pprint
+from mock.mock import patch, MagicMock, call
+
+class TestCommandStatusDict(TestCase):
+
+  logger = logging.getLogger()
+
+  def test_put_and_generate(self):
+    callback_mock = MagicMock()
+    commandStatuses = CommandStatusDict(callback_action = callback_mock)
+    command_in_progress1 = {
+      'commandType': 'EXECUTION_COMMAND',
+      'commandId': '1-1',
+      'clusterName': u'cc',
+      'exitCode': 777,
+      'role': u'DATANODE',
+      'roleCommand': u'INSTALL',
+      'serviceName': u'HDFS',
+      'stderr': '',
+      'stdout': "notice: /Stage[1]/Hdp::Iptables/Service[iptables]/ensure: ensure changed 'running' to 'stopped'\nnotice: /Stage[1]/Hdp/File[/tmp/changeUid.sh]/ensure: defined content as '{md5}32b994a2e970f8acc3c91c198b484654'\nnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Package[snappy]/Hdp::Package::Process_pkg[snappy]/Package[snappy]/ensure: created\nnotice: /Stage[1]/Hdp/Hdp::Group[nagios_group]/Group[nagios_group]/ensure: created\nnotice: /Stage[1]/Hdp/Hdp::User[nagios_user]/User[nagios]/ensure: created\nnotice: /Stage[1]/Hdp::Snmp/Hdp::Package[snmp]/Hdp::Package::Process_pkg[snmp]/Package[net-snmp-utils]/ensure: created",
+      'taskId': 5
+    }
+    command_in_progress1_report = {
+      'status': 'IN_PROGRESS',
+      'taskId': 5
+    }
+    command_in_progress2 = {
+      'commandType': 'EXECUTION_COMMAND',
+      'commandId': '1-1',
+      'role': u'DATANODE',
+      'roleCommand': u'INSTALL',
+      'taskId': 6,
+      'clusterName': u'cc',
+      'serviceName': u'HDFS',
+    }
+    command_in_progress2_report = {
+      'status': 'IN_PROGRESS',
+      'taskId': 6
+    }
+    finished_command = {
+      'commandType': 'EXECUTION_COMMAND',
+      'role': u'DATANODE',
+      'roleCommand': u'INSTALL',
+      'commandId': '1-1',
+      'taskId': 4,
+      'clusterName': u'cc',
+      'serviceName': u'HDFS',
+    }
+    finished_command_report = {
+      'status': 'COMPLETE',
+      'taskId': 4,
+    }
+    failed_command = {
+      'commandType': 'EXECUTION_COMMAND',
+      'role': u'DATANODE',
+      'roleCommand': u'INSTALL',
+      'commandId': '1-1',
+      'taskId': 3,
+      'clusterName': u'cc',
+      'serviceName': u'HDFS',
+    }
+    failed_command_report = {
+      'status': 'FAILED',
+      'taskId': 3,
+    }
+    status_command = {
+      'componentName': 'DATANODE',
+      'commandType': 'STATUS_COMMAND',
+    }
+    status_command_report = {
+      'componentName': 'DATANODE',
+      'status': 'HEALTHY'
+    }
+    commandStatuses.put_command_status(command_in_progress1, command_in_progress1_report)
+    commandStatuses.put_command_status(command_in_progress2, command_in_progress2_report)
+    commandStatuses.put_command_status(finished_command, finished_command_report)
+    commandStatuses.put_command_status(failed_command, failed_command_report)
+    commandStatuses.put_command_status(status_command, status_command_report)
+    report = commandStatuses.generate_report()
+    expected = \
+      {'componentStatus': [{'status': 'HEALTHY', 'componentName': 'DATANODE'}],
+       'reports': [{'status': 'FAILED', 'taskId': 3},
+                   {'status': 'COMPLETE', 'taskId': 4},
+                   {'status': 'IN_PROGRESS', 'stderr': '...',
+                    'stdout': '...', 'clusterName': u'cc',
+                    'roleCommand': u'INSTALL', 'serviceName': u'HDFS',
+                    'role': u'DATANODE', 'actionId': '1-1', 'taskId': 5,
+                    'exitCode': 777},
+                   {'status': 'IN_PROGRESS',
+                    'stderr': '...',
+                    'stdout': '...',
+                    'clusterName': u'cc',
+                    'roleCommand': u'INSTALL',
+                    'serviceName': u'HDFS',
+                    'role': u'DATANODE',
+                    'actionId': '1-1',
+                    'taskId': 6,
+                    'exitCode': 777}]
+      }
+    self.assertEquals(report, expected)
+

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestController.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestController.py b/ambari-agent/src/test/python/ambari_agent/TestController.py
new file mode 100644
index 0000000..87e00fe
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestController.py
@@ -0,0 +1,408 @@
+#!/usr/bin/env python2.6
+# -*- coding: utf-8 -*-
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import StringIO
+import ssl
+import unittest, threading
+from ambari_agent import Controller, ActionQueue
+from ambari_agent import hostname
+import sys
+from ambari_agent.Controller import AGENT_AUTO_RESTART_EXIT_CODE
+from mock.mock import patch, MagicMock, call, Mock
+import logging
+from threading import Event
+
+class TestController(unittest.TestCase):
+
+  logger = logging.getLogger()
+
+  @patch("threading.Thread")
+  @patch("threading.Lock")
+  @patch.object(Controller, "NetUtil")
+  @patch.object(hostname, "hostname")
+  def setUp(self, hostname_method, NetUtil_mock, lockMock, threadMock):
+
+    Controller.logger = MagicMock()
+    lockMock.return_value = MagicMock()
+    NetUtil_mock.return_value = MagicMock()
+    hostname_method.return_value = "test_hostname"
+
+
+    config = MagicMock()
+    config.get.return_value = "something"
+
+    self.controller = Controller.Controller(config)
+    self.controller.netutil.MINIMUM_INTERVAL_BETWEEN_HEARTBEATS = 0.1
+    self.controller.netutil.HEARTBEAT_NOT_IDDLE_INTERVAL_SEC = 0.1
+
+
+  @patch("json.dumps")
+  @patch("time.sleep")
+  @patch("pprint.pformat")
+  @patch.object(Controller, "randint")
+  def test_registerWithServer(self, randintMock, pformatMock, sleepMock,
+                              dumpsMock):
+
+    out = StringIO.StringIO()
+    sys.stdout = out
+
+    register = MagicMock()
+    self.controller.register = register
+
+    self.controller.sendRequest = MagicMock()
+
+    dumpsMock.return_value = "request"
+    self.controller.sendRequest.return_value = '{"log":"Error text", "exitstatus":"1"}'
+
+    self.assertEqual({u'exitstatus': u'1', u'log': u'Error text'}, self.controller.registerWithServer())
+
+    self.controller.sendRequest.return_value = '{"responseId":1}'
+    self.assertEqual({"responseId":1}, self.controller.registerWithServer())
+
+    self.controller.sendRequest.return_value = '{"responseId":1, "statusCommands": "commands", "log":"", "exitstatus":"0"}'
+    self.controller.addToQueue = MagicMock(name="addToQueue")
+    self.controller.isRegistered = False
+    self.assertEqual({'exitstatus': '0', 'responseId': 1, 'log': '', 'statusCommands': 'commands'}, self.controller.registerWithServer())
+    self.controller.addToQueue.assert_called_with("commands")
+
+    calls = []
+
+    def side_effect(*args):
+      if len(calls) == 0:
+        calls.append(1)
+        raise Exception("test")
+      return "request"
+
+    self.controller.sendRequest.return_value = '{"responseId":1}'
+
+    dumpsMock.side_effect = side_effect
+    self.controller.isRegistered = False
+    self.assertEqual({"responseId":1}, self.controller.registerWithServer())
+    self.assertTrue(randintMock.called)
+    self.assertTrue(sleepMock.called)
+
+    sys.stdout = sys.__stdout__
+
+    self.controller.sendRequest = Controller.Controller.sendRequest
+    self.controller.addToQueue = Controller.Controller.addToQueue
+
+
+  @patch("pprint.pformat")
+  def test_addToQueue(self, pformatMock):
+
+    actionQueue = MagicMock()
+    self.controller.actionQueue = actionQueue
+    self.controller.addToQueue(None)
+    self.assertFalse(actionQueue.put.called)
+    self.controller.addToQueue("cmd")
+    self.assertTrue(actionQueue.put.called)
+
+
+  @patch("urllib2.build_opener")
+  @patch("urllib2.install_opener")
+  @patch.object(Controller, "ActionQueue")
+  def test_run(self, ActionQueue_mock, installMock, buildMock):
+    aq = MagicMock()
+    ActionQueue_mock.return_value = aq
+
+    buildMock.return_value = "opener"
+    registerAndHeartbeat  = MagicMock("registerAndHeartbeat")
+    calls = []
+    def side_effect():
+      if len(calls) == 0:
+        self.controller.repeatRegistration = True
+      calls.append(1)
+    registerAndHeartbeat.side_effect = side_effect
+    self.controller.registerAndHeartbeat = registerAndHeartbeat
+
+    # repeat registration
+    self.controller.run()
+
+    self.assertTrue(buildMock.called)
+    installMock.called_once_with("opener")
+    self.assertEqual(2, registerAndHeartbeat.call_count)
+
+    # one call, +1
+    registerAndHeartbeat.side_effect = None
+    self.controller.run()
+    self.assertEqual(3, registerAndHeartbeat.call_count)
+
+    # Action queue should be started during calls
+    self.assertTrue(ActionQueue_mock.called)
+    self.assertTrue(aq.start.called)
+
+
+  @patch("urllib2.build_opener")
+  @patch("urllib2.install_opener")
+  @patch.object(ActionQueue.ActionQueue, "run")
+  def test_repeatRegistration(self,
+                              run_mock, installMock, buildMock):
+
+    registerAndHeartbeat = MagicMock(name="registerAndHeartbeat")
+
+    self.controller.registerAndHeartbeat = registerAndHeartbeat
+    self.controller.run()
+    self.assertTrue(installMock.called)
+    self.assertTrue(buildMock.called)
+    self.controller.registerAndHeartbeat.assert_called_once_with()
+
+    calls = []
+    def switchBool():
+      if len(calls) == 0:
+        self.controller.repeatRegistration = True
+        calls.append(1)
+      self.controller.repeatRegistration = False
+
+    registerAndHeartbeat.side_effect = switchBool
+    self.controller.run()
+    self.assertEqual(2, registerAndHeartbeat.call_count)
+
+    self.controller.registerAndHeartbeat = \
+      Controller.Controller.registerAndHeartbeat
+
+
+  @patch("time.sleep")
+  def test_registerAndHeartbeatWithException(self, sleepMock):
+
+    registerWithServer = MagicMock(name="registerWithServer")
+    registerWithServer.return_value = {"response":"resp"}
+    self.controller.registerWithServer = registerWithServer
+    heartbeatWithServer = MagicMock(name="heartbeatWithServer")
+    self.controller.heartbeatWithServer = heartbeatWithServer
+
+    Controller.Controller.__sendRequest__ = MagicMock(side_effect=Exception())
+
+    self.controller.isRegistered = True
+    self.controller.registerAndHeartbeat()
+    registerWithServer.assert_called_once_with()
+    heartbeatWithServer.assert_called_once_with()
+
+    self.controller.registerWithServer =\
+    Controller.Controller.registerWithServer
+    self.controller.heartbeatWithServer =\
+    Controller.Controller.registerWithServer
+
+  @patch("time.sleep")
+  def test_registerAndHeartbeat(self, sleepMock):
+
+    registerWithServer = MagicMock(name="registerWithServer")
+    registerWithServer.return_value = {"response":"resp"}
+    self.controller.registerWithServer = registerWithServer
+    heartbeatWithServer = MagicMock(name="heartbeatWithServer")
+    self.controller.heartbeatWithServer = heartbeatWithServer
+
+    self.controller.isRegistered = True;
+    self.controller.registerAndHeartbeat()
+    registerWithServer.assert_called_once_with()
+    heartbeatWithServer.assert_called_once_with()
+
+    self.controller.registerWithServer = \
+      Controller.Controller.registerWithServer
+    self.controller.heartbeatWithServer = \
+      Controller.Controller.registerWithServer
+
+
+  @patch("os._exit")
+  def test_restartAgent(self, os_exit_mock):
+
+    self.controller.restartAgent()
+    self.assertTrue(os_exit_mock.called)
+    self.assertTrue(os_exit_mock.call_args[0][0] == AGENT_AUTO_RESTART_EXIT_CODE)
+
+
+  @patch("urllib2.Request")
+  @patch.object(Controller, "security")
+  def test_sendRequest(self, security_mock, requestMock):
+
+    conMock = MagicMock()
+    conMock.request.return_value = "response"
+    security_mock.CachedHTTPSConnection.return_value = conMock
+    url = "url"
+    data = "data"
+    requestMock.return_value = "request"
+
+    self.controller.cachedconnect = None
+
+    self.assertEqual("response", self.controller.sendRequest(url, data))
+    security_mock.CachedHTTPSConnection.assert_called_once_with(
+      self.controller.config)
+    requestMock.called_once_with(url, data,
+      {'Content-Type': 'application/json'})
+
+
+  @patch.object(threading._Event, "wait")
+  @patch("time.sleep")
+  @patch("json.loads")
+  @patch("json.dumps")
+  def test_heartbeatWithServer(self, dumpsMock, loadsMock, sleepMock, event_mock):
+
+    out = StringIO.StringIO()
+    sys.stdout = out
+
+    hearbeat = MagicMock()
+    self.controller.heartbeat = hearbeat
+
+    dumpsMock.return_value = "data"
+
+    sendRequest = MagicMock(name="sendRequest")
+    self.controller.sendRequest = sendRequest
+
+    self.controller.responseId = 1
+    response = {"responseId":"2", "restartAgent":"false"}
+    loadsMock.return_value = response
+
+    def one_heartbeat(*args, **kwargs):
+      self.controller.DEBUG_STOP_HEARTBEATING = True
+      return "data"
+
+    sendRequest.side_effect = one_heartbeat
+
+    actionQueue = MagicMock()
+    actionQueue.isIdle.return_value = True
+
+    # one successful request, after stop
+    self.controller.actionQueue = actionQueue
+    self.controller.heartbeatWithServer()
+    self.assertTrue(sendRequest.called)
+
+    calls = []
+    def retry(*args, **kwargs):
+      if len(calls) == 0:
+        calls.append(1)
+        response["responseId"] = "3"
+        raise Exception()
+      if len(calls) > 0:
+        self.controller.DEBUG_STOP_HEARTBEATING = True
+      return "data"
+
+    # exception, retry, successful and stop
+    sendRequest.side_effect = retry
+    self.controller.DEBUG_STOP_HEARTBEATING = False
+    self.controller.heartbeatWithServer()
+
+    self.assertEqual(1, self.controller.DEBUG_SUCCESSFULL_HEARTBEATS)
+
+    # retry registration
+    response["registrationCommand"] = "true"
+    sendRequest.side_effect = one_heartbeat
+    self.controller.DEBUG_STOP_HEARTBEATING = False
+    self.controller.heartbeatWithServer()
+
+    self.assertTrue(self.controller.repeatRegistration)
+
+    # components are not mapped
+    response["registrationCommand"] = "false"
+    response["hasMappedComponents"] = False
+    sendRequest.side_effect = one_heartbeat
+    self.controller.DEBUG_STOP_HEARTBEATING = False
+    self.controller.heartbeatWithServer()
+
+    self.assertFalse(self.controller.hasMappedComponents)
+
+    # components are mapped
+    response["hasMappedComponents"] = True
+    sendRequest.side_effect = one_heartbeat
+    self.controller.DEBUG_STOP_HEARTBEATING = False
+    self.controller.heartbeatWithServer()
+
+    self.assertTrue(self.controller.hasMappedComponents)
+
+    # components are mapped
+    del response["hasMappedComponents"]
+    sendRequest.side_effect = one_heartbeat
+    self.controller.DEBUG_STOP_HEARTBEATING = False
+    self.controller.heartbeatWithServer()
+
+    self.assertTrue(self.controller.hasMappedComponents)
+
+    # wrong responseId => restart
+    response = {"responseId":"2", "restartAgent":"false"}
+    loadsMock.return_value = response
+
+    restartAgent = MagicMock(name="restartAgent")
+    self.controller.restartAgent = restartAgent
+    self.controller.DEBUG_STOP_HEARTBEATING = False
+    self.controller.heartbeatWithServer()
+
+    restartAgent.assert_called_once_with()
+
+    # executionCommands, statusCommands
+    self.controller.responseId = 1
+    addToQueue = MagicMock(name="addToQueue")
+    self.controller.addToQueue = addToQueue
+    response["executionCommands"] = "executionCommands"
+    response["statusCommands"] = "statusCommands"
+    self.controller.DEBUG_STOP_HEARTBEATING = False
+    self.controller.heartbeatWithServer()
+
+    addToQueue.assert_has_calls([call("executionCommands"),
+                                 call("statusCommands")])
+
+    # restartAgent command
+    self.controller.responseId = 1
+    self.controller.DEBUG_STOP_HEARTBEATING = False
+    response["restartAgent"] = "true"
+    restartAgent = MagicMock(name="restartAgent")
+    self.controller.restartAgent = restartAgent
+    self.controller.heartbeatWithServer()
+
+    restartAgent.assert_called_once_with()
+
+    # actionQueue not idle
+    self.controller.responseId = 1
+    self.controller.DEBUG_STOP_HEARTBEATING = False
+    actionQueue.isIdle.return_value = False
+    response["restartAgent"] = "false"
+    self.controller.heartbeatWithServer()
+
+    sleepMock.assert_called_with(
+      self.controller.netutil.MINIMUM_INTERVAL_BETWEEN_HEARTBEATS)
+
+    sys.stdout = sys.__stdout__
+    self.controller.sendRequest = Controller.Controller.sendRequest
+    self.controller.sendRequest = Controller.Controller.addToQueue
+
+  @patch("pprint.pformat")
+  @patch("time.sleep")
+  @patch("json.loads")
+  @patch("json.dumps")
+  def test_certSigningFailed(self, dumpsMock, loadsMock, sleepMock, pformatMock):
+    register = MagicMock()
+    self.controller.register = register
+
+    dumpsMock.return_value = "request"
+    response = {"responseId":1,}
+    loadsMock.return_value = response
+
+    self.controller.sendRequest = Mock(side_effect=ssl.SSLError())
+
+    self.controller.repeatRegistration=True
+    self.controller.registerWithServer()
+
+    #Conroller thread and the agent stop if the repeatRegistration flag is False
+    self.assertFalse(self.controller.repeatRegistration)
+
+if __name__ == "__main__":
+  unittest.main(verbosity=2)
+
+
+
+

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py b/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
new file mode 100644
index 0000000..b9aab2a
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
@@ -0,0 +1,144 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+import ConfigParser
+import os
+
+import pprint
+
+from unittest import TestCase
+import threading
+import tempfile
+import time
+from threading import Thread
+
+from PythonExecutor import PythonExecutor
+from CustomServiceOrchestrator import CustomServiceOrchestrator
+from AmbariConfig import AmbariConfig
+from mock.mock import MagicMock, patch
+import StringIO
+import sys
+from AgentException import AgentException
+from FileCache import FileCache
+
+
+class TestCustomServiceOrchestrator(TestCase):
+
+  def setUp(self):
+    # disable stdout
+    out = StringIO.StringIO()
+    sys.stdout = out
+    # generate sample config
+    tmpdir = tempfile.gettempdir()
+    self.config = ConfigParser.RawConfigParser()
+    self.config.add_section('agent')
+    self.config.set('agent', 'prefix', tmpdir)
+    self.config.set('agent', 'cache_dir', "/cachedir")
+
+
+  def test_dump_command_to_json(self):
+    command = {
+      'commandType': 'EXECUTION_COMMAND',
+      'role': u'DATANODE',
+      'roleCommand': u'INSTALL',
+      'commandId': '1-1',
+      'taskId': 3,
+      'clusterName': u'cc',
+      'serviceName': u'HDFS',
+      'configurations':{'global' : {}},
+      'configurationTags':{'global' : { 'tag': 'v1' }}
+    }
+    config = AmbariConfig().getConfig()
+    tempdir = tempfile.gettempdir()
+    config.set('agent', 'prefix', tempdir)
+    orchestrator = CustomServiceOrchestrator(config)
+    file = orchestrator.dump_command_to_json(command)
+    self.assertTrue(os.path.exists(file))
+    self.assertTrue(os.path.getsize(file) > 0)
+    self.assertEqual(oct(os.stat(file).st_mode & 0777), '0600')
+    os.unlink(file)
+
+
+  @patch("os.path.exists")
+  def test_resolve_script_path(self, exists_mock):
+    config = AmbariConfig().getConfig()
+    orchestrator = CustomServiceOrchestrator(config)
+    # Testing existing path
+    exists_mock.return_value = True
+    path = orchestrator.\
+      resolve_script_path("/HBASE", "scripts/hbase_master.py", "PYTHON")
+    self.assertEqual("/HBASE/package/scripts/hbase_master.py", path)
+    # Testing not existing path
+    exists_mock.return_value = False
+    try:
+      orchestrator.resolve_script_path("/HBASE",
+                                       "scripts/hbase_master.py", "PYTHON")
+      self.fail('ExpectedException not thrown')
+    except AgentException:
+      pass # Expected
+
+
+  @patch.object(CustomServiceOrchestrator, "resolve_script_path")
+  @patch.object(FileCache, "get_service_base_dir")
+  @patch.object(CustomServiceOrchestrator, "dump_command_to_json")
+  @patch.object(PythonExecutor, "run_file")
+  def test_runCommand(self, run_file_mock, dump_command_to_json_mock,
+                      get_service_base_dir_mock, resolve_script_path_mock):
+    command = {
+      'role' : 'REGION_SERVER',
+      'hostLevelParams' : {
+        'stack_name' : 'HDP',
+        'stack_version' : '2.0.7',
+      },
+      'commandParams': {
+        'script_type': 'PYTHON',
+        'script': 'scripts/hbase_regionserver.py',
+        'command_timeout': '600',
+        'service_metadata_folder' : 'HBASE'
+      },
+      'roleCommand': 'INSTALL'
+    }
+    get_service_base_dir_mock.return_value = "/basedir/"
+    resolve_script_path_mock.return_value = "/basedir/scriptpath"
+    orchestrator = CustomServiceOrchestrator(self.config)
+    # normal run case
+    run_file_mock.return_value = {
+        'stdout' : 'sss',
+        'stderr' : 'eee',
+        'exitcode': 0,
+      }
+    ret = orchestrator.runCommand(command, "out.txt", "err.txt")
+    self.assertEqual(ret['exitcode'], 0)
+    self.assertTrue(run_file_mock.called)
+
+    run_file_mock.reset_mock()
+    # unknown script type case
+    command['commandParams']['script_type'] = "PUPPET"
+    ret = orchestrator.runCommand(command, "out.txt", "err.txt")
+    self.assertEqual(ret['exitcode'], 1)
+    self.assertFalse(run_file_mock.called)
+    self.assertTrue("Unknown script type" in ret['stdout'])
+    pass
+
+
+  def tearDown(self):
+    # enable stdout
+    sys.stdout = sys.__stdout__
+
+

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestDataCleaner.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestDataCleaner.py b/ambari-agent/src/test/python/ambari_agent/TestDataCleaner.py
new file mode 100644
index 0000000..2f2a8bc
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestDataCleaner.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python2.6
+# -*- coding: utf-8 -*-
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import unittest
+from mock.mock import patch, MagicMock, call, Mock
+from ambari_agent import DataCleaner
+
+
+class TestDataCleaner(unittest.TestCase):
+
+  def setUp(self):
+    self.test_dir = [('/test_path', [],
+                      ['errors-12.txt','output-12.txt','site-12.pp','site-13.pp','site-15.pp','version'])]
+    self.config = MagicMock()
+    self.config.get.side_effect = [2592000,3600 + 1,"/test_path"]
+    DataCleaner.logger = MagicMock()
+
+  def test_init_success(self):
+    config = MagicMock()
+    config.get.return_value = 2592000
+    DataCleaner.logger.reset_mock()
+    cleaner = DataCleaner.DataCleaner(config)
+    self.assertFalse(DataCleaner.logger.warn.called)
+
+
+  def test_init_warn(self):
+    config = MagicMock()
+    config.get.return_value = 10
+    DataCleaner.logger.reset_mock()
+    cleaner = DataCleaner.DataCleaner(config)
+    self.assertTrue(DataCleaner.logger.warn.called)
+    self.assertTrue(cleaner.file_max_age == 3600)
+
+  @patch('os.walk')
+  @patch('time.time')
+  @patch('os.path.getmtime')
+  @patch('os.remove')
+  def test_cleanup_success(self,remMock,mtimeMock,timeMock,walkMock):
+    self.config.reset_mock()
+    DataCleaner.logger.reset_mock()
+
+    walkMock.return_value = iter(self.test_dir)
+    timeMock.return_value = 2592000 + 2
+    mtimeMock.side_effect = [1,1,1,2,1,1]
+
+    cleaner = DataCleaner.DataCleaner(self.config)
+    cleaner.cleanup()
+
+    self.assertTrue(len(remMock.call_args_list) == 4)
+    remMock.assert_any_call('/test_path/errors-12.txt');
+    remMock.assert_any_call('/test_path/output-12.txt');
+    remMock.assert_any_call('/test_path/site-12.pp');
+    remMock.assert_any_call('/test_path/site-15.pp');
+    pass
+
+  @patch('os.walk')
+  @patch('time.time')
+  @patch('os.path.getmtime')
+  @patch('os.remove')
+  def test_cleanup_remove_error(self,remMock,mtimeMock,timeMock,walkMock):
+    self.config.reset_mock()
+    DataCleaner.logger.reset_mock()
+
+    walkMock.return_value = iter(self.test_dir)
+    timeMock.return_value = 2592000 + 2
+    mtimeMock.side_effect = [1,1,1,2,1,1]
+
+    def side_effect(arg):
+      if arg == '/test_path/site-15.pp':
+        raise Exception("Can't remove file")
+
+    remMock.side_effect = side_effect
+
+    cleaner = DataCleaner.DataCleaner(self.config)
+    cleaner.cleanup()
+
+    self.assertTrue(len(remMock.call_args_list) == 4)
+    self.assertTrue(DataCleaner.logger.error.call_count == 1)
+    pass
+
+if __name__ == "__main__":
+  suite = unittest.TestLoader().loadTestsFromTestCase(TestDataCleaner)
+  unittest.TextTestRunner(verbosity=2).run(suite)

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestFileCache.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestFileCache.py b/ambari-agent/src/test/python/ambari_agent/TestFileCache.py
new file mode 100644
index 0000000..8426012
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestFileCache.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+import ConfigParser
+import os
+
+import pprint
+
+from unittest import TestCase
+import threading
+import tempfile
+import time
+from threading import Thread
+
+from PythonExecutor import PythonExecutor
+from CustomServiceOrchestrator import CustomServiceOrchestrator
+from FileCache import FileCache
+from AmbariConfig import AmbariConfig
+from mock.mock import MagicMock, patch
+import StringIO
+import sys
+from ambari_agent import AgentException
+
+
+class TestFileCache(TestCase):
+
+  def setUp(self):
+    # disable stdout
+    out = StringIO.StringIO()
+    sys.stdout = out
+    # generate sample config
+    tmpdir = tempfile.gettempdir()
+    self.config = ConfigParser.RawConfigParser()
+    self.config.add_section('agent')
+    self.config.set('agent', 'prefix', tmpdir)
+    self.config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
+
+
+  @patch("os.path.isdir")
+  def test_get_service_base_dir(self, isdir_mock):
+    fileCache = FileCache(self.config)
+    isdir_mock.return_value = True
+    base = fileCache.get_service_base_dir("HDP", "2.0.7",
+                                          "HBASE", "REGION_SERVER")
+    self.assertEqual(base, "/var/lib/ambari-agent/cache/stacks/HDP/2.0.7/services/HBASE")
+
+
+  def tearDown(self):
+    # enable stdout
+    sys.stdout = sys.__stdout__
+
+

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestGrep.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestGrep.py b/ambari-agent/src/test/python/ambari_agent/TestGrep.py
new file mode 100644
index 0000000..829236a
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestGrep.py
@@ -0,0 +1,117 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from unittest import TestCase
+from ambari_agent.Grep import Grep
+import socket
+import os, sys
+import logging
+
+class TestGrep(TestCase):
+
+  logger = logging.getLogger()
+  string_good = None
+  string_bad = None
+  grep = Grep()
+
+  def setUp(self):
+    self.string_good = open('ambari_agent' + os.sep + 'dummy_puppet_output_good.txt', 'r').read().replace("\n", os.linesep)
+    self.string_bad = open('ambari_agent' + os.sep + 'dummy_puppet_output_error.txt', 'r').read().replace("\n", os.linesep)
+    pass
+
+  def test_grep_many_lines(self):
+    fragment = self.grep.grep(self.string_bad, "err", 1000, 1000)
+    desired = self.string_bad.strip()
+    self.assertEquals(fragment, desired, "Grep grep function should return all lines if there are less lines than n")
+
+
+  def test_grep_few_lines(self):
+    fragment = self.grep.grep(self.string_bad, "Err", 3, 3)
+    desired = """
+debug: /Schedule[never]: Skipping device resources because running on a host
+debug: Exec[command_good](provider=posix): Executing 'wget e432423423xample.com/badurl444111'
+debug: Executing 'wget e432423423xample.com/badurl444111'
+err: /Stage[main]//Exec[command_good]/returns: change from notrun to 0 failed: wget e432423423xample.com/badurl444111 returned 4 instead of one of [0] at /root/puppet-learn/2-bad.pp:5
+debug: /Schedule[weekly]: Skipping device resources because running on a host
+debug: /Schedule[puppet]: Skipping device resources because running on a host
+debug: Finishing transaction 70171639726240
+""".strip()
+    self.assertEquals(fragment, desired, "Grep grep function should return only last 3 lines of file")
+
+  def test_grep_no_result(self):
+    fragment = self.grep.grep(self.string_good, "Err", 3, 3)
+    desired = None
+    self.assertEquals(fragment, desired, 'Grep grep function should return None if result is not found')
+
+  def test_grep_empty_string(self):
+    fragment = self.grep.grep("", "Err", 1000, 1000)
+    desired = None
+    self.assertEquals(fragment, desired, 'Grep grep function should return None for empty string')
+
+  def test_grep_all(self):
+    fragment = self.grep.grep(self.string_bad, "Err", 35, 9)
+    desired = self.string_bad.strip()
+    self.assertEquals(fragment, desired, 'Grep grep function contains bug in index arithmetics')
+
+
+  def test_tail_many_lines(self):
+    fragment = self.grep.tail(self.string_good, 1000)
+    desired = self.string_good.strip()
+    self.assertEquals(fragment, desired, "Grep tail function should return all lines if there are less lines than n")
+
+  def test_tail_few_lines(self):
+    fragment = self.grep.tail(self.string_good, 3)
+    desired = """
+debug: Finishing transaction 70060456663980
+debug: Received report to process from ambari-dmi
+debug: Processing report from ambari-dmi with processor Puppet::Reports::Store
+""".strip()
+    self.assertEquals(fragment, desired, "Grep tail function should return only last 3 lines of file")
+
+  def test_tail_no_lines(self):
+    fragment = self.grep.tail("", 3)
+    desired = ''
+    self.assertEquals(fragment, desired, 'Grep tail function should return "" for empty string')
+
+  def test_tail_all(self):
+    fragment = self.grep.tail("", 47)
+    desired = ''
+    self.assertEquals(fragment, desired, 'Grep tail function contains bug in index arithmetics')
+
+  def test_filterMarkup(self):
+    string = """notice: /Stage[main]/Hdp-hadoop/Hdp-hadoop::Package[hadoop]/Hdp::Package[hadoop 64]/Hdp::Package::Process_pkg[hadoop 64]/Package[hadoop-libhdfs]/ensure: created"""
+    desired="""notice: /Stage[main]/Hdp-hadoop/Hdp-hadoop::Package[hadoop]/Hdp::Package[hadoop 64]/Hdp::Package::Process_pkg[hadoop 64]/Package[hadoop-libhdfs]/ensure: created"""
+    filtered = self.grep.filterMarkup(string)
+    #sys.stderr.write(filtered)
+    self.assertEquals(filtered, desired)
+
+  def tearDown(self):
+    pass
+
+  def test_cleanByTemplate(self):
+    fragment = self.grep.cleanByTemplate(self.string_bad, "debug")
+    desired = """
+info: Applying configuration version '1352127563'
+err: /Stage[main]//Exec[command_good]/returns: change from notrun to 0 failed: wget e432423423xample.com/badurl444111 returned 4 instead of one of [0] at /root/puppet-learn/2-bad.pp:5
+notice: Finished catalog run in 0.23 seconds
+""".strip()
+    self.assertEquals(fragment, desired, 'Grep cleanByTemplate function should return string without debug lines.')
+
+

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestHardware.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestHardware.py b/ambari-agent/src/test/python/ambari_agent/TestHardware.py
new file mode 100644
index 0000000..18b10d6
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestHardware.py
@@ -0,0 +1,168 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import subprocess, os
+import tempfile
+from unittest import TestCase
+from ambari_agent.Hardware import Hardware
+from mock.mock import MagicMock, patch, ANY
+import mock.mock
+from AmbariConfig import AmbariConfig
+
+class TestHardware(TestCase):
+  def test_build(self):
+    hardware = Hardware(AmbariConfig().getConfig())
+    result = hardware.get()
+    osdisks = hardware.osdisks()
+    for dev_item in result['mounts']:
+      self.assertTrue(dev_item['available'] >= 0)
+      self.assertTrue(dev_item['used'] >= 0)
+      self.assertTrue(dev_item['percent'] != None)
+      self.assertTrue(dev_item['device'] != None)
+      self.assertTrue(dev_item['mountpoint'] != None)
+      self.assertTrue(dev_item['type'] != None)
+      self.assertTrue(dev_item['size'] > 0)
+
+    for os_disk_item in osdisks:
+      self.assertTrue(os_disk_item['available'] >= 0)
+      self.assertTrue(os_disk_item['used'] >= 0)
+      self.assertTrue(os_disk_item['percent'] != None)
+      self.assertTrue(os_disk_item['device'] != None)
+      self.assertTrue(os_disk_item['mountpoint'] != None)
+      self.assertTrue(os_disk_item['type'] != None)
+      self.assertTrue(os_disk_item['size'] > 0)
+
+    self.assertTrue(len(result['mounts']) == len(osdisks))
+
+
+  @patch.object(subprocess, "Popen")
+  @patch.object(Hardware, "facterLib")
+  @patch("os.path.exists")
+  def test_facterInfo(self, os_path_exists_mock, hardware_facterLib_mock, subprocess_popen_mock):
+    config = AmbariConfig().getConfig()
+    tmp_dir = tempfile.gettempdir()
+    config.set("puppet", "facter_home", tmp_dir)
+    hardware = Hardware(config)
+    facter = MagicMock()
+    facter.communicate.return_value = ["memoryfree => 1 GB\n memorysize => 25 MB\n memorytotal => 300 KB\n "
+                                        + "physicalprocessorcount => 25\n is_virtual => true\n", "no errors"]
+    facter.returncode = 0
+    os.environ['RUBYLIB'] = tmp_dir;
+    subprocess_popen_mock.return_value = facter
+    os_path_exists_mock.return_value = True
+    hardware_facterLib_mock.return_value = "bla bla bla"
+    facterInfo = hardware.facterInfo()
+
+    self.assertEquals(facterInfo['memoryfree'], 1048576L)
+    self.assertEquals(facterInfo['memorysize'], 25600L)
+    self.assertEquals(facterInfo['memorytotal'], 300L)
+    self.assertEquals(facterInfo['physicalprocessorcount'], 25)
+    self.assertTrue(facterInfo['is_virtual'])
+    self.assertEquals(subprocess_popen_mock.call_args[1]['env']['RUBYLIB'],
+                      tmp_dir + ":" + "bla bla bla")
+
+    facter.communicate.return_value = ["memoryfree => 1 G\n memorysize => 25 M\n memorytotal => 300 K\n "
+                                         + "someinfo => 12 Byte\n ssh_name_key => Aa06Fdd\n", "no errors"]
+    facterInfo = hardware.facterInfo()
+    facter.returncode = 1
+    self.assertEquals(facterInfo['memoryfree'], 1048576L)
+    self.assertEquals(facterInfo['memorysize'], 25600L)
+    self.assertEquals(facterInfo['memorytotal'], 300L)
+    self.assertEquals(facterInfo['someinfo'], '12 Byte')
+    self.assertFalse(facterInfo.has_key('ssh_name_key'))
+
+    facter.communicate.return_value = ["memoryfree => 1024 M B\n memorytotal => 1024 Byte" , "no errors"]
+
+    facterInfo = hardware.facterInfo()
+
+    self.assertEquals(facterInfo['memoryfree'], 1L)
+    self.assertEquals(facterInfo['memorytotal'], 1L)
+
+    os_path_exists_mock.return_value = False
+    facterInfo = hardware.facterInfo()
+
+    self.assertEquals(facterInfo, {})
+
+
+  @patch("os.path.exists")
+  def test_facterBin(self, ps_path_exists_mock):
+    hardware = Hardware(AmbariConfig().getConfig())
+    ps_path_exists_mock.return_value = False
+    result = hardware.facterBin("bla bla bla")
+    self.assertEquals(result, "facter")
+
+    ps_path_exists_mock.return_value = True
+    result = hardware.facterBin("bla bla bla")
+    self.assertEquals(result, "bla bla bla/bin/facter")
+
+
+  @patch("os.path.exists")
+  @patch.dict('os.environ', {"PATH": ""})
+  @patch.object(subprocess, "Popen")
+  @patch.object(Hardware, "facterInfo")
+  def test_configureEnviron(self, hrdware_facterinfo_mock, subproc_popen, os_path_exists_mock):
+    config = AmbariConfig().getConfig()
+    tmpdir = tempfile.gettempdir()
+    config.set("puppet", "ruby_home", tmpdir)
+    hardware = Hardware(config)
+    os_path_exists_mock.return_value = True
+    result = hardware.configureEnviron({'PATH': ""})
+
+    self.assertEquals(result['PATH'], tmpdir + "/bin:")
+    self.assertEquals(result['MY_RUBY_HOME'], tmpdir)
+    config.remove_option("puppet", "ruby_home")
+
+
+  def test_facterLib(self):
+    hardware = Hardware(AmbariConfig().getConfig())
+    facterLib = hardware.facterLib("/home")
+    self.assertEquals(facterLib, "/home/lib/")
+
+
+  def test_extractMountInfo(self):
+    outputLine = "device type size used available percent mountpoint"
+    result = Hardware.extractMountInfo(outputLine)
+
+    self.assertEquals(result['device'], 'device')
+    self.assertEquals(result['type'], 'type')
+    self.assertEquals(result['size'], 'size')
+    self.assertEquals(result['used'], 'used')
+    self.assertEquals(result['available'], 'available')
+    self.assertEquals(result['percent'], 'percent')
+    self.assertEquals(result['mountpoint'], 'mountpoint')
+
+    outputLine = ""
+    result = Hardware.extractMountInfo(outputLine)
+
+    self.assertEquals(result, None)
+
+    outputLine = "device type size used available percent"
+    result = Hardware.extractMountInfo(outputLine)
+
+    self.assertEquals(result, None)
+
+    outputLine = "device type size used available percent mountpoint info"
+    result = Hardware.extractMountInfo(outputLine)
+
+    self.assertEquals(result, None)
+
+
+
+

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestHeartbeat.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestHeartbeat.py b/ambari-agent/src/test/python/ambari_agent/TestHeartbeat.py
new file mode 100644
index 0000000..5ef5d72
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestHeartbeat.py
@@ -0,0 +1,211 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from unittest import TestCase
+import unittest
+from ambari_agent.Heartbeat import Heartbeat
+from ambari_agent.ActionQueue import ActionQueue
+from ambari_agent.LiveStatus import LiveStatus
+from ambari_agent import AmbariConfig
+import socket
+import os
+import time
+from mock.mock import patch, MagicMock, call
+from ambari_agent.StackVersionsFileHandler import StackVersionsFileHandler
+from ambari_agent.HostInfo import HostInfo
+import StringIO
+import sys
+
+class TestHeartbeat(TestCase):
+
+  def setUp(self):
+    # disable stdout
+    out = StringIO.StringIO()
+    sys.stdout = out
+
+
+  def tearDown(self):
+    # enable stdout
+    sys.stdout = sys.__stdout__
+
+
+  def test_build(self):
+    actionQueue = ActionQueue(AmbariConfig.AmbariConfig().getConfig(),'dummy_controller')
+    heartbeat = Heartbeat(actionQueue)
+    result = heartbeat.build(100)
+    print "Heartbeat: " + str(result)
+    self.assertEquals(result['hostname'] != '', True, "hostname should not be empty")
+    self.assertEquals(result['responseId'], 100)
+    self.assertEquals(result['componentStatus'] is not None, True, "Heartbeat should contain componentStatus")
+    self.assertEquals(result['reports'] is not None, True, "Heartbeat should contain reports")
+    self.assertEquals(result['timestamp'] >= 1353679373880L, True)
+    self.assertEquals(len(result['nodeStatus']), 2)
+    self.assertEquals(result['nodeStatus']['cause'], "NONE")
+    self.assertEquals(result['nodeStatus']['status'], "HEALTHY")
+    # result may or may NOT have an agentEnv structure in it
+    self.assertEquals((len(result) is 6) or (len(result) is 7), True)
+    self.assertEquals(not heartbeat.reports, True, "Heartbeat should not contain task in progress")
+
+
+  @patch.object(ActionQueue, "result")
+  @patch.object(HostInfo, "register")
+  def test_no_mapping(self, register_mock, result_mock):
+    result_mock.return_value = {
+      'reports': [{'status': 'IN_PROGRESS',
+                   'stderr': 'Read from /tmp/errors-3.txt',
+                   'stdout': 'Read from /tmp/output-3.txt',
+                   'clusterName': u'cc',
+                   'roleCommand': u'INSTALL',
+                   'serviceName': u'HDFS',
+                   'role': u'DATANODE',
+                   'actionId': '1-1',
+                   'taskId': 3,
+                   'exitCode': 777}],
+      'componentStatus': [{'status': 'HEALTHY', 'componentName': 'NAMENODE'}]
+    }
+    actionQueue = ActionQueue(AmbariConfig.AmbariConfig().getConfig(),'dummy_controller')
+    heartbeat = Heartbeat(actionQueue)
+    hb = heartbeat.build(id = 10, state_interval=1, componentsMapped=True)
+    self.assertEqual(register_mock.call_args_list[0][0][1], True)
+    register_mock.reset_mock()
+
+    hb = heartbeat.build(id = 0, state_interval=1, componentsMapped=True)
+    self.assertEqual(register_mock.call_args_list[0][0][1], False)
+
+
+  @patch.object(ActionQueue, "result")
+  def test_build_long_result(self, result_mock):
+    actionQueue = ActionQueue(AmbariConfig.AmbariConfig().getConfig(),'dummy_controller')
+    result_mock.return_value = {
+      'reports': [{'status': 'IN_PROGRESS',
+            'stderr': 'Read from /tmp/errors-3.txt',
+            'stdout': 'Read from /tmp/output-3.txt',
+            'clusterName': u'cc',
+            'roleCommand': u'INSTALL',
+            'serviceName': u'HDFS',
+            'role': u'DATANODE',
+            'actionId': '1-1',
+            'taskId': 3,
+            'exitCode': 777},
+
+            {'status': 'COMPLETED',
+             'stderr': 'stderr',
+             'stdout': 'out',
+             'clusterName': 'clusterName',
+             'roleCommand': 'UPGRADE',
+             'serviceName': 'serviceName',
+             'role': 'role',
+             'actionId': 17,
+             'taskId': 'taskId',
+             'exitCode': 0},
+
+            {'status': 'FAILED',
+             'stderr': 'stderr',
+             'stdout': 'out',
+             'clusterName': u'cc',
+             'roleCommand': u'INSTALL',
+             'serviceName': u'HDFS',
+             'role': u'DATANODE',
+             'actionId': '1-1',
+             'taskId': 3,
+             'exitCode': 13},
+
+            {'status': 'COMPLETED',
+             'stderr': 'stderr',
+             'stdout': 'out',
+             'clusterName': u'cc',
+             'configurationTags': {'global': {'tag': 'v1'}},
+             'roleCommand': u'INSTALL',
+             'serviceName': u'HDFS',
+             'role': u'DATANODE',
+             'actionId': '1-1',
+             'taskId': 3,
+             'exitCode': 0}
+
+            ],
+      'componentStatus': [
+        {'status': 'HEALTHY', 'componentName': 'DATANODE'},
+        {'status': 'UNHEALTHY', 'componentName': 'NAMENODE'},
+      ],
+    }
+    heartbeat = Heartbeat(actionQueue)
+    hb = heartbeat.build(10)
+    hb['hostname'] = 'hostname'
+    hb['timestamp'] = 'timestamp'
+    expected = {'nodeStatus':
+                  {'status': 'HEALTHY',
+                   'cause': 'NONE'},
+                'timestamp': 'timestamp', 'hostname': 'hostname',
+                'responseId': 10, 'reports': [
+      {'status': 'IN_PROGRESS', 'roleCommand': u'INSTALL',
+       'serviceName': u'HDFS', 'role': u'DATANODE', 'actionId': '1-1',
+       'stderr': 'Read from /tmp/errors-3.txt',
+       'stdout': 'Read from /tmp/output-3.txt', 'clusterName': u'cc',
+       'taskId': 3, 'exitCode': 777},
+      {'status': 'COMPLETED', 'roleCommand': 'UPGRADE',
+       'serviceName': 'serviceName', 'role': 'role', 'actionId': 17,
+       'stderr': 'stderr', 'stdout': 'out', 'clusterName': 'clusterName',
+       'taskId': 'taskId', 'exitCode': 0},
+      {'status': 'FAILED', 'roleCommand': u'INSTALL', 'serviceName': u'HDFS',
+       'role': u'DATANODE', 'actionId': '1-1', 'stderr': 'stderr',
+       'stdout': 'out', 'clusterName': u'cc', 'taskId': 3, 'exitCode': 13},
+      {'status': 'COMPLETED', 'stdout': 'out',
+       'configurationTags': {'global': {'tag': 'v1'}}, 'taskId': 3,
+       'exitCode': 0, 'roleCommand': u'INSTALL', 'clusterName': u'cc',
+       'serviceName': u'HDFS', 'role': u'DATANODE', 'actionId': '1-1',
+       'stderr': 'stderr'}], 'componentStatus': [
+      {'status': 'HEALTHY', 'componentName': 'DATANODE'},
+      {'status': 'UNHEALTHY', 'componentName': 'NAMENODE'}]}
+    self.assertEquals(hb, expected)
+
+
+  @patch.object(HostInfo, 'register')
+  def test_heartbeat_no_host_check_cmd_in_queue(self, register_mock):
+    actionQueue = ActionQueue(AmbariConfig.AmbariConfig().getConfig(),'dummy_controller')
+    statusCommand = {
+      "serviceName" : 'HDFS',
+      "commandType" : "STATUS_COMMAND",
+      "clusterName" : "c1",
+      "componentName" : "DATANODE",
+      'configurations':{'global' : {}}
+    }
+    actionQueue.put([statusCommand])
+
+    heartbeat = Heartbeat(actionQueue)
+    heartbeat.build(12, 6)
+    self.assertTrue(register_mock.called)
+    args, kwargs = register_mock.call_args_list[0]
+    self.assertTrue(args[2])
+    self.assertFalse(args[1])
+
+
+  @patch.object(HostInfo, 'register')
+  def test_heartbeat_host_check_no_cmd(self, register_mock):
+    actionQueue = ActionQueue(AmbariConfig.AmbariConfig().getConfig(),'dummy_controller')
+    heartbeat = Heartbeat(actionQueue)
+    heartbeat.build(12, 6)
+    self.assertTrue(register_mock.called)
+    args, kwargs = register_mock.call_args_list[0]
+    self.assertFalse(args[1])
+    self.assertFalse(args[2])
+
+
+if __name__ == "__main__":
+  unittest.main(verbosity=2)

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestHostCheckReportFileHandler.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestHostCheckReportFileHandler.py b/ambari-agent/src/test/python/ambari_agent/TestHostCheckReportFileHandler.py
new file mode 100644
index 0000000..211d74b
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestHostCheckReportFileHandler.py
@@ -0,0 +1,161 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from unittest import TestCase
+import unittest
+import os
+import tempfile
+from ambari_agent.HostCheckReportFileHandler import HostCheckReportFileHandler
+import logging
+import ConfigParser
+
+class TestHostCheckReportFileHandler(TestCase):
+
+  logger = logging.getLogger()
+
+  def test_write_host_check_report_really_empty(self):
+    tmpfile = tempfile.mktemp()
+
+    config = ConfigParser.RawConfigParser()
+    config.add_section('agent')
+    config.set('agent', 'prefix', os.path.dirname(tmpfile))
+
+    handler = HostCheckReportFileHandler(config)
+    dict = {}
+    handler.writeHostCheckFile(dict)
+
+    configValidator = ConfigParser.RawConfigParser()
+    configPath = os.path.join(os.path.dirname(tmpfile), HostCheckReportFileHandler.HOST_CHECK_FILE)
+    configValidator.read(configPath)
+    if configValidator.has_section('users'):
+      users = configValidator.get('users', 'usr_list')
+      self.assertEquals(users, '')
+
+  def test_write_host_check_report_empty(self):
+    tmpfile = tempfile.mktemp()
+
+    config = ConfigParser.RawConfigParser()
+    config.add_section('agent')
+    config.set('agent', 'prefix', os.path.dirname(tmpfile))
+
+    handler = HostCheckReportFileHandler(config)
+    dict = {}
+    dict['hostHealth'] = {}
+    dict['existingUsers'] = []
+    dict['alternatives'] = []
+    dict['stackFoldersAndFiles'] = []
+    dict['hostHealth']['activeJavaProcs'] = []
+    dict['installedPackages'] = []
+    dict['existingRepos'] = []
+
+    handler.writeHostCheckFile(dict)
+
+    configValidator = ConfigParser.RawConfigParser()
+    configPath = os.path.join(os.path.dirname(tmpfile), HostCheckReportFileHandler.HOST_CHECK_FILE)
+    configValidator.read(configPath)
+    users = configValidator.get('users', 'usr_list')
+    users = configValidator.get('users', 'usr_homedir_list')
+    self.assertEquals(users, '')
+    names = configValidator.get('alternatives', 'symlink_list')
+    targets = configValidator.get('alternatives', 'target_list')
+    self.assertEquals(names, '')
+    self.assertEquals(targets, '')
+
+    paths = configValidator.get('directories', 'dir_list')
+    self.assertEquals(paths, '')
+
+    procs = configValidator.get('processes', 'proc_list')
+    self.assertEquals(procs, '')
+
+    pkgs = configValidator.get('packages', 'pkg_list')
+    self.assertEquals(pkgs, '')
+
+    repos = configValidator.get('repositories', 'repo_list')
+    self.assertEquals(repos, '')
+
+    time = configValidator.get('metadata', 'created')
+    self.assertTrue(time != None)
+
+  def test_write_host_check_report(self):
+    tmpfile = tempfile.mktemp()
+
+    config = ConfigParser.RawConfigParser()
+    config.add_section('agent')
+    config.set('agent', 'prefix', os.path.dirname(tmpfile))
+
+    handler = HostCheckReportFileHandler(config)
+
+    dict = {}
+    dict['hostHealth'] = {}
+    dict['existingUsers'] = [{'name':'user1', 'homeDir':'/var/log', 'status':'Exists'}]
+    dict['alternatives'] = [
+      {'name':'/etc/alternatives/hadoop-conf', 'target':'/etc/hadoop/conf.dist'},
+      {'name':'/etc/alternatives/hbase-conf', 'target':'/etc/hbase/conf.1'}
+    ]
+    dict['stackFoldersAndFiles'] = [{'name':'/a/b', 'type':'directory'},{'name':'/a/b.txt', 'type':'file'}]
+    dict['hostHealth']['activeJavaProcs'] = [
+      {'pid':355,'hadoop':True,'command':'some command','user':'root'},
+      {'pid':455,'hadoop':True,'command':'some command','user':'hdfs'}
+    ]
+    dict['installedPackages'] = [
+      {'name':'hadoop','version':'3.2.3','repoName':'HDP'},
+      {'name':'hadoop-lib','version':'3.2.3','repoName':'HDP'}
+    ]
+    dict['existingRepos'] = ['HDP', 'HDP-epel']
+    handler.writeHostCheckFile(dict)
+
+    configValidator = ConfigParser.RawConfigParser()
+    configPath = os.path.join(os.path.dirname(tmpfile), HostCheckReportFileHandler.HOST_CHECK_FILE)
+    configValidator.read(configPath)
+    users = configValidator.get('users', 'usr_list')
+    homedirs = configValidator.get('users', 'usr_homedir_list')
+    self.assertEquals(users, 'user1')
+    self.assertEquals(homedirs, '/var/log')
+
+    names = configValidator.get('alternatives', 'symlink_list')
+    targets = configValidator.get('alternatives', 'target_list')
+    self.chkItemsEqual(names, ['/etc/alternatives/hadoop-conf', '/etc/alternatives/hbase-conf'])
+    self.chkItemsEqual(targets, ['/etc/hadoop/conf.dist','/etc/hbase/conf.1'])
+
+    paths = configValidator.get('directories', 'dir_list')
+    self.chkItemsEqual(paths, ['/a/b','/a/b.txt'])
+
+    procs = configValidator.get('processes', 'proc_list')
+    self.chkItemsEqual(procs, ['455', '355'])
+
+    pkgs = configValidator.get('packages', 'pkg_list')
+    self.chkItemsEqual(pkgs, ['hadoop', 'hadoop-lib'])
+
+    repos = configValidator.get('repositories', 'repo_list')
+    self.chkItemsEqual(repos, ['HDP', 'HDP-epel'])
+
+    time = configValidator.get('metadata', 'created')
+    self.assertTrue(time != None)
+
+  def chkItemsEqual(self, commaDelimited, items):
+    items1 = commaDelimited.split(',')
+    items1.sort()
+    items.sort()
+    items1Str = ','.join(items1)
+    items2Str = ','.join(items)
+    self.assertEquals(items1Str, items2Str)
+
+if __name__ == "__main__":
+  unittest.main(verbosity=2)

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/ambari_agent/TestHostCleanup.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestHostCleanup.py b/ambari-agent/src/test/python/ambari_agent/TestHostCleanup.py
new file mode 100644
index 0000000..3534195
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestHostCleanup.py
@@ -0,0 +1,429 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from unittest import TestCase
+import unittest
+from mock.mock import patch, MagicMock, call, create_autospec
+from ambari_agent import HostCleanup
+import StringIO
+import sys
+import tempfile
+import os.path
+import optparse
+import logging
+
+PACKAGE_SECTION = "packages"
+PACKAGE_KEY = "pkg_list"
+USER_SECTION = "users"
+USER_KEY = "usr_list"
+REPO_SECTION = "repositories"
+REPOS_KEY = "pkg_list"
+DIR_SECTION = "directories"
+DIR_KEY = "dir_list"
+PROCESS_SECTION = "processes"
+PROCESS_KEY = "proc_list"
+ALT_SECTION = "alternatives"
+ALT_KEYS = ["symlink_list", "target_list"]
+ALT_ERASE_CMD = "alternatives --remove {0} {1}"
+USER_HOMEDIR_SECTION = "usr_homedir"
+
+class TestHostCleanup(TestCase):
+
+  def setUp(self):
+    HostCleanup.logger = MagicMock()
+    self.hostcleanup = HostCleanup.HostCleanup()
+    # disable stdout
+    out = StringIO.StringIO()
+    sys.stdout = out
+
+
+  def tearDown(self):
+    # enable stdout
+    sys.stdout = sys.__stdout__
+
+  def test_read_host_check_file_with_content(self):
+    out = StringIO.StringIO()
+    sys.stdout = out
+    tmpfile = tempfile.mktemp()
+    f = open(tmpfile,'w')
+    fileContent = """[processes]
+proc_list = 323,434
+
+[users]
+usr_list = rrdcached,ambari-qa,hive,oozie,hbase,hcat,mysql,mapred,hdfs,zookeeper,sqoop,nagios
+
+[repositories]
+repo_list = HDP-1.3.0,HDP-epel
+
+[directories]
+dir_list = /etc/hadoop,/etc/hbase,/etc/hcatalog,/tmp/hive,/tmp/nagios,/var/nagios
+
+[alternatives]
+symlink_list = hcatalog-conf,hadoop-default,hadoop-log,oozie-conf
+target_list = /etc/hcatalog/conf.dist,/usr/share/man/man1/hadoop.1.gz,/etc/oozie/conf.dist,/usr/lib/hadoop
+
+[packages]
+pkg_list = sqoop.noarch,hadoop-libhdfs.x86_64,rrdtool.x86_64,ganglia-gmond.x86_64
+
+[metadata]
+created = 2013-07-02 20:39:22.162757"""
+    f.write(fileContent)
+    f.close()
+
+    propMap = self.hostcleanup.read_host_check_file(tmpfile)
+    self.assertTrue("323" in propMap["processes"])
+    self.assertTrue("mysql" in propMap["users"])
+    self.assertTrue("HDP-epel" in propMap["repositories"])
+    self.assertTrue("/etc/hadoop" in propMap["directories"])
+    self.assertTrue("hcatalog-conf" in propMap["alternatives"]["symlink_list"])
+    self.assertTrue("/etc/oozie/conf.dist" in propMap["alternatives"]["target_list"])
+    self.assertTrue("hadoop-libhdfs.x86_64" in propMap["packages"])
+    sys.stdout = sys.__stdout__
+
+  class HostCleanupOptions:
+    def __init__(self, outputfile, inputfile, skip, verbose, silent):
+      self.outputfile = outputfile
+      self.inputfile = inputfile
+      self.skip = skip
+      self.verbose = verbose
+      self.silent = silent
+
+  @patch.object(HostCleanup, 'get_YN_input')
+  @patch.object(HostCleanup.HostCleanup, 'do_cleanup')
+  @patch.object(HostCleanup.HostCleanup, 'is_current_user_root')
+  @patch.object(logging.FileHandler, 'setFormatter')
+  @patch.object(HostCleanup.HostCleanup,'read_host_check_file')
+  @patch.object(logging,'basicConfig')
+  @patch.object(logging, 'FileHandler')
+  @patch.object(optparse.OptionParser, 'parse_args')
+  def test_options(self, parser_mock, file_handler_mock, logging_mock, read_host_check_file_mock,
+                   set_formatter_mock, user_root_mock, do_cleanup_mock, get_yn_input_mock):
+    parser_mock.return_value = (TestHostCleanup.HostCleanupOptions('/someoutputfile', '/someinputfile', '', False,
+                                                                   False), [])
+    file_handler_mock.return_value = logging.FileHandler('') # disable creating real file
+    user_root_mock.return_value = True
+    get_yn_input_mock.return_value = True
+    HostCleanup.main()
+    
+    # test --out
+    file_handler_mock.assert_called_with('/someoutputfile')
+    # test --skip
+    self.assertEquals([''],HostCleanup.SKIP_LIST)
+    #test --verbose
+    logging_mock.assert_called_with(level=logging.INFO)
+    # test --in
+    read_host_check_file_mock.assert_called_with('/someinputfile')
+    self.assertTrue(get_yn_input_mock.called)
+
+
+  @patch.object(HostCleanup, 'get_YN_input')
+  @patch.object(HostCleanup.HostCleanup, 'do_cleanup')
+  @patch.object(HostCleanup.HostCleanup, 'is_current_user_root')
+  @patch.object(logging.FileHandler, 'setFormatter')
+  @patch.object(HostCleanup.HostCleanup,'read_host_check_file')
+  @patch.object(logging,'basicConfig')
+  @patch.object(logging, 'FileHandler')
+  @patch.object(optparse.OptionParser, 'parse_args')
+  def test_options_silent(self, parser_mock, file_handler_mock, logging_mock, read_host_check_file_mock,
+                   set_formatter_mock, user_root_mock, do_cleanup_mock, get_yn_input_mock):
+    parser_mock.return_value = (TestHostCleanup.HostCleanupOptions('/someoutputfile', '/someinputfile', '', False,
+                                                                   True), [])
+    file_handler_mock.return_value = logging.FileHandler('') # disable creating real file
+    user_root_mock.return_value = True
+    get_yn_input_mock.return_value = True
+    HostCleanup.main()
+
+    # test --out
+    file_handler_mock.assert_called_with('/someoutputfile')
+    # test --skip
+    self.assertEquals([''],HostCleanup.SKIP_LIST)
+    #test --verbose
+    logging_mock.assert_called_with(level=logging.INFO)
+    # test --in
+    read_host_check_file_mock.assert_called_with('/someinputfile')
+    self.assertFalse(get_yn_input_mock.called)
+
+  @patch.object(HostCleanup.HostCleanup, 'do_erase_alternatives')
+  @patch.object(HostCleanup.HostCleanup, 'find_repo_files_for_repos')
+  @patch.object(HostCleanup.HostCleanup, 'get_os_type')
+  @patch.object(HostCleanup.HostCleanup, 'do_kill_processes')
+  @patch.object(HostCleanup.HostCleanup, 'do_erase_files_silent')
+  @patch.object(HostCleanup.HostCleanup, 'do_erase_dir_silent')
+  @patch.object(HostCleanup.HostCleanup, 'do_delete_users')
+  @patch.object(HostCleanup.HostCleanup, 'do_erase_packages')
+  def test_do_cleanup_all(self, do_erase_packages_method, do_delete_users_method,
+                      do_erase_dir_silent_method,
+                      do_erase_files_silent_method, do_kill_processes_method,
+                      get_os_type_method, find_repo_files_for_repos_method,
+                      do_erase_alternatives_method):
+    out = StringIO.StringIO()
+    sys.stdout = out
+    propertyMap = {PACKAGE_SECTION:['abcd', 'pqrst'], USER_SECTION:['abcd', 'pqrst'],
+                   REPO_SECTION:['abcd', 'pqrst'], DIR_SECTION:['abcd', 'pqrst'],
+                   PROCESS_SECTION:['abcd', 'pqrst'],
+                   ALT_SECTION:{ALT_KEYS[0]:['alt1','alt2'], ALT_KEYS[1]:[
+                     'dir1']}, USER_HOMEDIR_SECTION:['decf']}
+    get_os_type_method.return_value = 'redhat'
+    find_repo_files_for_repos_method.return_value = ['abcd', 'pqrst']
+
+    self.hostcleanup.do_cleanup(propertyMap)
+
+    self.assertTrue(do_delete_users_method.called)
+    self.assertTrue(do_erase_dir_silent_method.called)
+    self.assertTrue(do_erase_files_silent_method.called)
+    self.assertTrue(do_erase_packages_method.called)
+    self.assertTrue(do_kill_processes_method.called)
+    self.assertTrue(do_erase_alternatives_method.called)
+    calls = [call(['decf']), call(['abcd', 'pqrst'])]
+    do_erase_dir_silent_method.assert_has_calls(calls)
+    do_erase_packages_method.assert_called_once_with(['abcd', 'pqrst'])
+    do_erase_files_silent_method.assert_called_once_with(['abcd', 'pqrst'])
+    do_delete_users_method.assert_called_once_with(['abcd', 'pqrst'])
+    do_kill_processes_method.assert_called_once_with(['abcd', 'pqrst'])
+    do_erase_alternatives_method.assert_called_once_with({ALT_KEYS[0]:['alt1',
+                                              'alt2'], ALT_KEYS[1]:['dir1']})
+
+    sys.stdout = sys.__stdout__
+
+
+  @patch.object(HostCleanup.HostCleanup, 'do_delete_by_owner')
+  @patch.object(HostCleanup.HostCleanup, 'get_user_ids')
+  @patch.object(HostCleanup.HostCleanup, 'do_erase_alternatives')
+  @patch.object(HostCleanup.HostCleanup, 'find_repo_files_for_repos')
+  @patch.object(HostCleanup.HostCleanup, 'get_os_type')
+  @patch.object(HostCleanup.HostCleanup, 'do_kill_processes')
+  @patch.object(HostCleanup.HostCleanup, 'do_erase_files_silent')
+  @patch.object(HostCleanup.HostCleanup, 'do_erase_dir_silent')
+  @patch.object(HostCleanup.HostCleanup, 'do_delete_users')
+  @patch.object(HostCleanup.HostCleanup, 'do_erase_packages')
+  def test_do_cleanup_default(self, do_erase_packages_method, do_delete_users_method,
+                      do_erase_dir_silent_method,
+                      do_erase_files_silent_method, do_kill_processes_method,
+                      get_os_type_method, find_repo_files_for_repos_method,
+                      do_erase_alternatives_method, get_user_ids_method,
+                      do_delete_by_owner_method):
+
+    global SKIP_LIST
+    oldSkipList = HostCleanup.SKIP_LIST
+    HostCleanup.SKIP_LIST = ["users"]
+    out = StringIO.StringIO()
+    sys.stdout = out
+    propertyMap = {PACKAGE_SECTION:['abcd', 'pqrst'], USER_SECTION:['abcd', 'pqrst'],
+                   REPO_SECTION:['abcd', 'pqrst'], DIR_SECTION:['abcd', 'pqrst'],
+                   PROCESS_SECTION:['abcd', 'pqrst'],
+                   ALT_SECTION:{ALT_KEYS[0]:['alt1','alt2'], ALT_KEYS[1]:[
+                     'dir1']}}
+    get_os_type_method.return_value = 'redhat'
+    find_repo_files_for_repos_method.return_value = ['abcd', 'pqrst']
+
+    self.hostcleanup.do_cleanup(propertyMap)
+
+    self.assertFalse(do_delete_by_owner_method.called)
+    self.assertFalse(get_user_ids_method.called)
+    self.assertFalse(do_delete_users_method.called)
+    self.assertTrue(do_erase_dir_silent_method.called)
+    self.assertTrue(do_erase_files_silent_method.called)
+    self.assertTrue(do_erase_packages_method.called)
+    self.assertTrue(do_kill_processes_method.called)
+    self.assertTrue(do_erase_alternatives_method.called)
+    HostCleanup.SKIP_LIST = oldSkipList
+    sys.stdout = sys.__stdout__
+
+  @patch.object(HostCleanup.HostCleanup, 'find_repo_files_for_repos')
+  @patch.object(HostCleanup.HostCleanup, 'get_os_type')
+  @patch.object(HostCleanup.HostCleanup, 'do_kill_processes')
+  @patch.object(HostCleanup.HostCleanup, 'do_erase_files_silent')
+  @patch.object(HostCleanup.HostCleanup, 'do_erase_dir_silent')
+  @patch.object(HostCleanup.HostCleanup, 'do_delete_users')
+  @patch.object(HostCleanup.HostCleanup, 'do_erase_packages')
+  def test_do_cleanup_with_skip(self, do_erase_packages_method,
+                      do_delete_users_method,
+                      do_erase_dir_silent_method,
+                      do_erase_files_silent_method, do_kill_processes_method,
+                      get_os_type_method, find_repo_files_for_repos_method):
+
+    out = StringIO.StringIO()
+    sys.stdout = out
+    propertyMap = {PACKAGE_SECTION:['abcd', 'pqrst'], USER_SECTION:['abcd', 'pqrst'],
+                   REPO_SECTION:['abcd', 'pqrst'], DIR_SECTION:['abcd', 'pqrst'],
+                   PROCESS_SECTION:['abcd', 'pqrst']}
+    get_os_type_method.return_value = 'redhat'
+    find_repo_files_for_repos_method.return_value = ['abcd', 'pqrst']
+    HostCleanup.SKIP_LIST = [PACKAGE_SECTION, REPO_SECTION]
+
+    self.hostcleanup.do_cleanup(propertyMap)
+
+    self.assertTrue(do_delete_users_method.called)
+    self.assertTrue(do_erase_dir_silent_method.called)
+    self.assertFalse(do_erase_files_silent_method.called)
+    self.assertFalse(do_erase_packages_method.called)
+    self.assertTrue(do_kill_processes_method.called)
+    calls = [call(None), call(['abcd', 'pqrst'])]
+    do_erase_dir_silent_method.assert_has_calls(calls)
+    do_delete_users_method.assert_called_once_with(['abcd', 'pqrst'])
+    do_kill_processes_method.assert_called_once_with(['abcd', 'pqrst'])
+
+    sys.stdout = sys.__stdout__
+
+  @patch.object(HostCleanup.HostCleanup, 'do_erase_dir_silent')
+  @patch("os.stat")
+  @patch("os.path.join")
+  @patch("os.listdir")
+  def test_do_delete_by_owner(self, listdir_mock, join_mock, stat_mock, do_erase_dir_silent_method):
+    listdir_mock.return_value = ["k", "j"]
+    join_mock.return_value = "path"
+    response = MagicMock()
+    response.st_uid = 1
+    stat_mock.return_value = response
+    self.hostcleanup.do_delete_by_owner([1, 2], ["a"])
+    self.assertTrue(do_erase_dir_silent_method.called)
+    calls = [call(["path"]), call(["path"])]
+    do_erase_dir_silent_method.assert_has_calls(calls)
+
+  @patch.object(HostCleanup.HostCleanup, 'run_os_command')
+  def test_do_delete_users(self, run_os_command_mock):
+    run_os_command_mock.return_value = (1, "", "")
+    self.hostcleanup.do_delete_users(["a", "b"])
+    self.assertTrue(run_os_command_mock.called)
+    calls = [call('userdel -rf a'), call('userdel -rf b'), call('groupdel hadoop')]
+    run_os_command_mock.assert_has_calls(calls)
+
+  @patch("ConfigParser.RawConfigParser")
+  @patch("__builtin__.open")
+  def test_read_host_check_file(self, openMock, readMock):
+    out = StringIO.StringIO()
+    sys.stdout = out
+    f = MagicMock()
+    openMock.return_value = f
+
+    propertyMap = self.hostcleanup.read_host_check_file('test')
+
+    self.assertTrue(openMock.called)
+    self.assertTrue(readMock.called)
+    self.assertTrue(propertyMap.has_key(PACKAGE_SECTION))
+    self.assertTrue(propertyMap.has_key(REPO_SECTION))
+    self.assertTrue(propertyMap.has_key(USER_SECTION))
+    self.assertTrue(propertyMap.has_key(DIR_SECTION))
+    self.assertTrue(propertyMap.has_key(PROCESS_SECTION))
+
+    sys.stdout = sys.__stdout__
+
+
+  @patch.object(HostCleanup.HostCleanup, 'run_os_command')
+  @patch.object(HostCleanup.HostCleanup, 'get_os_type')
+  def test_do_earse_packages(self, get_os_type_method, run_os_command_method):
+    out = StringIO.StringIO()
+    sys.stdout = out
+
+    get_os_type_method.return_value = 'redhat'
+    run_os_command_method.return_value = (0, 'success', 'success')
+
+    retval = self.hostcleanup.do_erase_packages(['abcd', 'wxyz'])
+
+    self.assertTrue(get_os_type_method.called)
+    self.assertTrue(run_os_command_method.called)
+    run_os_command_method.assert_called_with("yum erase -y {0}".format(' '
+    .join(['abcd', 'wxyz'])))
+    self.assertEquals(0, retval)
+
+    get_os_type_method.reset()
+    run_os_command_method.reset()
+
+    get_os_type_method.return_value = 'suse'
+    run_os_command_method.return_value = (0, 'success', 'success')
+
+    retval = self.hostcleanup.do_erase_packages(['abcd', 'wxyz'])
+
+    self.assertTrue(get_os_type_method.called)
+    self.assertTrue(run_os_command_method.called)
+    run_os_command_method.assert_called_with("zypper -n -q remove {0}"
+    .format(' '.join(['abcd', 'wxyz'])))
+    self.assertEquals(0, retval)
+
+    sys.stdout = sys.__stdout__
+
+  @patch.object(HostCleanup.HostCleanup, 'get_files_in_dir')
+  @patch.object(HostCleanup.HostCleanup, 'get_os_type')
+  def test_find_repo_files_for_repos(self, get_os_type_method,
+                                    get_files_in_dir_method):
+    out = StringIO.StringIO()
+    sys.stdout = out
+
+    tmpfile = tempfile.mktemp()
+    fileContent = """[###]
+[aass]
+[$$]
+444]saas[333
+1122[naas]2222
+name=sd des derft 3.1
+"""
+    with open(tmpfile,'w') as file:
+      file.write(fileContent)
+    get_os_type_method.return_value = 'redhat'
+    get_files_in_dir_method.return_value = [ tmpfile ]
+
+    repoFiles = self.hostcleanup.find_repo_files_for_repos(['aass'])
+    self.assertTrue(get_files_in_dir_method.called)
+    self.assertTrue(get_os_type_method.called)
+    self.assertEquals(repoFiles, [ tmpfile ])
+
+    repoFiles = self.hostcleanup.find_repo_files_for_repos(['sd des derft 3.1'])
+    self.assertTrue(get_files_in_dir_method.called)
+    self.assertTrue(get_os_type_method.called)
+    self.assertEquals(repoFiles, [ tmpfile ])
+
+    repoFiles = self.hostcleanup.find_repo_files_for_repos(['sd des derft 3.1', 'aass'])
+    self.assertEquals(repoFiles, [ tmpfile ])
+
+    repoFiles = self.hostcleanup.find_repo_files_for_repos(['saas'])
+    self.assertEquals(repoFiles, [])
+
+    repoFiles = self.hostcleanup.find_repo_files_for_repos([''])
+    self.assertEquals(repoFiles, [])
+
+    sys.stdout = sys.__stdout__
+
+
+  @patch.object(HostCleanup.HostCleanup, 'run_os_command')
+  @patch.object(HostCleanup.HostCleanup, 'do_erase_dir_silent')
+  @patch.object(HostCleanup.HostCleanup, 'get_alternatives_desc')
+  def test_do_erase_alternatives(self, get_alternatives_desc_mock,
+                    do_erase_dir_silent_mock, run_os_command_mock):
+    out = StringIO.StringIO()
+    sys.stdout = out
+
+    get_alternatives_desc_mock.return_value = 'somepath to alternative\n'
+    run_os_command_mock.return_value = (0, None, None)
+
+    alt_map = {ALT_KEYS[0]:['alt1'], ALT_KEYS[1]:['dir1']}
+
+    self.hostcleanup.do_erase_alternatives(alt_map)
+
+    self.assertTrue(get_alternatives_desc_mock.called)
+    get_alternatives_desc_mock.called_once_with('alt1')
+    self.assertTrue(run_os_command_mock.called)
+    run_os_command_mock.called_once_with(ALT_ERASE_CMD.format('alt1', 'somepath'))
+    self.assertTrue(do_erase_dir_silent_mock.called)
+    do_erase_dir_silent_mock.called_once_with(['dir1'])
+
+    sys.stdout = sys.__stdout__
+
+if __name__ == "__main__":
+  unittest.main()


Mime
View raw message