ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From yus...@apache.org
Subject [42/51] [partial] AMBARI-7718. Rebase branch-windows-dev against trunk. (Jayush Luniya and Florian Barca via yusaku)
Date Thu, 16 Oct 2014 20:12:06 GMT
http://git-wip-us.apache.org/repos/asf/ambari/blob/9213dcca/ambari-agent/src/main/python/resource_management/libraries/resources/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/resource_management/libraries/resources/__init__.py b/ambari-agent/src/main/python/resource_management/libraries/resources/__init__.py
deleted file mode 100644
index 24b497c..0000000
--- a/ambari-agent/src/main/python/resource_management/libraries/resources/__init__.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management.libraries.resources.execute_hadoop import *
-from resource_management.libraries.resources.template_config import *
-from resource_management.libraries.resources.xml_config import *
-from resource_management.libraries.resources.properties_file import *
-from resource_management.libraries.resources.repository import *
-from resource_management.libraries.resources.monitor_webserver import *
-from resource_management.libraries.resources.hdfs_directory import *
-from resource_management.libraries.resources.copy_from_local import *
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/9213dcca/ambari-agent/src/main/python/resource_management/libraries/resources/copy_from_local.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/resource_management/libraries/resources/copy_from_local.py b/ambari-agent/src/main/python/resource_management/libraries/resources/copy_from_local.py
deleted file mode 100644
index 328d9c2..0000000
--- a/ambari-agent/src/main/python/resource_management/libraries/resources/copy_from_local.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-_all__ = ["CopyFromLocal"]
-from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument, BooleanArgument
-
-class CopyFromLocal(Resource):
-  action = ForcedListArgument(default="run")
-
-  path = ResourceArgument(default=lambda obj: obj.name)
-  dest_dir = ResourceArgument(required=True)
-  owner = ResourceArgument(required=True)
-  group = ResourceArgument()
-  mode = ResourceArgument()
-  kinnit_if_needed = ResourceArgument(default='')
-  hadoop_conf_dir = ResourceArgument(default='/etc/hadoop/conf')
-  hdfs_user = ResourceArgument(default='hdfs')
-
-  actions = Resource.actions + ["run"]
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/9213dcca/ambari-agent/src/main/python/resource_management/libraries/resources/execute_hadoop.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/resource_management/libraries/resources/execute_hadoop.py b/ambari-agent/src/main/python/resource_management/libraries/resources/execute_hadoop.py
deleted file mode 100644
index 94daf5b..0000000
--- a/ambari-agent/src/main/python/resource_management/libraries/resources/execute_hadoop.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-_all__ = ["ExecuteHadoop"]
-from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument, BooleanArgument
-
-class ExecuteHadoop(Resource):
-  action = ForcedListArgument(default="run")
-  command = ResourceArgument(default=lambda obj: obj.name)
-  kinit_override = BooleanArgument(default=False)
-  tries = ResourceArgument(default=1)
-  try_sleep = ResourceArgument(default=0) # seconds
-  user = ResourceArgument()
-  logoutput = BooleanArgument(default=False)
-  principal = ResourceArgument(default=lambda obj: obj.user)
-  
-  conf_dir = ResourceArgument()
-  
-  security_enabled = BooleanArgument(default=False)
-  keytab = ResourceArgument()
-  kinit_path_local = ResourceArgument()
-  
-  actions = Resource.actions + ["run"]
-  

http://git-wip-us.apache.org/repos/asf/ambari/blob/9213dcca/ambari-agent/src/main/python/resource_management/libraries/resources/hdfs_directory.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/resource_management/libraries/resources/hdfs_directory.py b/ambari-agent/src/main/python/resource_management/libraries/resources/hdfs_directory.py
deleted file mode 100644
index 63d9cc2..0000000
--- a/ambari-agent/src/main/python/resource_management/libraries/resources/hdfs_directory.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-_all__ = ["HdfsDirectory"]
-from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument, BooleanArgument
-
-class HdfsDirectory(Resource):
-  action = ForcedListArgument()
-
-  dir_name = ResourceArgument(default=lambda obj: obj.name)
-  owner = ResourceArgument()
-  group = ResourceArgument()
-  mode = ResourceArgument()
-  recursive_chown = BooleanArgument(default=False)
-  recursive_chmod = BooleanArgument(default=False)
-
-  conf_dir = ResourceArgument()
-  security_enabled = BooleanArgument(default=False)
-  keytab = ResourceArgument()
-  kinit_path_local = ResourceArgument()
-  hdfs_user = ResourceArgument()
-
-  #action 'create' immediately creates all pending directory in efficient manner
-  #action 'create_delayed' add directory to list of pending directories
-  actions = Resource.actions + ["create","create_delayed"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/9213dcca/ambari-agent/src/main/python/resource_management/libraries/resources/monitor_webserver.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/resource_management/libraries/resources/monitor_webserver.py b/ambari-agent/src/main/python/resource_management/libraries/resources/monitor_webserver.py
deleted file mode 100644
index dfd1174..0000000
--- a/ambari-agent/src/main/python/resource_management/libraries/resources/monitor_webserver.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-_all__ = ["MonitorWebserver"]
-from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument, BooleanArgument
-
-
-class MonitorWebserver(Resource):
-  action = ForcedListArgument(default=lambda obj: [obj.name])
-  actions = Resource.actions + ["start", "stop", "restart"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/9213dcca/ambari-agent/src/main/python/resource_management/libraries/resources/properties_file.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/resource_management/libraries/resources/properties_file.py b/ambari-agent/src/main/python/resource_management/libraries/resources/properties_file.py
deleted file mode 100644
index 0e5afb4..0000000
--- a/ambari-agent/src/main/python/resource_management/libraries/resources/properties_file.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-_all__ = ["PropertiesFile"]
-from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument, BooleanArgument
-
-class PropertiesFile(Resource):
-  action = ForcedListArgument(default="create")
-  filename = ResourceArgument(default=lambda obj: obj.name)
-
-  properties = ResourceArgument()
-  dir = ResourceArgument()
-
-  mode = ResourceArgument()
-  owner = ResourceArgument()
-  group = ResourceArgument()
-
-  actions = Resource.actions + ["create"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/9213dcca/ambari-agent/src/main/python/resource_management/libraries/resources/repository.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/resource_management/libraries/resources/repository.py b/ambari-agent/src/main/python/resource_management/libraries/resources/repository.py
deleted file mode 100644
index 48a347a..0000000
--- a/ambari-agent/src/main/python/resource_management/libraries/resources/repository.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-_all__ = ["Repository"]
-
-from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument, BooleanArgument
-
-class Repository(Resource):
-  action = ForcedListArgument(default="create")
-  repo_id = ResourceArgument(default=lambda obj: obj.name)
-  base_url = ResourceArgument()
-  mirror_list = ResourceArgument()
-  repo_file_name = ResourceArgument()
-  repo_template = ResourceArgument()
-  components = ForcedListArgument(default=[]) # ubuntu specific
-
-  actions = Resource.actions + ["create","remove"]
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/9213dcca/ambari-agent/src/main/python/resource_management/libraries/resources/template_config.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/resource_management/libraries/resources/template_config.py b/ambari-agent/src/main/python/resource_management/libraries/resources/template_config.py
deleted file mode 100644
index 8ce2a00..0000000
--- a/ambari-agent/src/main/python/resource_management/libraries/resources/template_config.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-_all__ = ["TemplateConfig"]
-from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument, BooleanArgument
-
-class TemplateConfig(Resource):
-  action = ForcedListArgument(default="create")
-  path = ResourceArgument(default=lambda obj: obj.name)
-  mode = ResourceArgument()
-  owner = ResourceArgument()
-  group = ResourceArgument()
-  template_tag = ResourceArgument()
-  extra_imports = ResourceArgument(default=[])
-
-  actions = Resource.actions + ["create"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/9213dcca/ambari-agent/src/main/python/resource_management/libraries/resources/xml_config.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/resource_management/libraries/resources/xml_config.py b/ambari-agent/src/main/python/resource_management/libraries/resources/xml_config.py
deleted file mode 100644
index 2b8ff6f..0000000
--- a/ambari-agent/src/main/python/resource_management/libraries/resources/xml_config.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-_all__ = ["XmlConfig"]
-from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument, BooleanArgument
-
-class XmlConfig(Resource):
-  action = ForcedListArgument(default="create")
-  filename = ResourceArgument(default=lambda obj: obj.name)
-  
-  configurations = ResourceArgument()
-  conf_dir = ResourceArgument()
-  
-  mode = ResourceArgument()
-  owner = ResourceArgument()
-  group = ResourceArgument()
-
-  actions = Resource.actions + ["create"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/9213dcca/ambari-agent/src/main/python/resource_management/libraries/script/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/resource_management/libraries/script/__init__.py b/ambari-agent/src/main/python/resource_management/libraries/script/__init__.py
deleted file mode 100644
index 72d3aaf..0000000
--- a/ambari-agent/src/main/python/resource_management/libraries/script/__init__.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management.libraries.script.script import *
-from resource_management.libraries.script.hook import *
-from resource_management.libraries.script.config_dictionary import *
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/9213dcca/ambari-agent/src/main/python/resource_management/libraries/script/config_dictionary.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/resource_management/libraries/script/config_dictionary.py b/ambari-agent/src/main/python/resource_management/libraries/script/config_dictionary.py
deleted file mode 100644
index 453c546..0000000
--- a/ambari-agent/src/main/python/resource_management/libraries/script/config_dictionary.py
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-from resource_management.core.exceptions import Fail
-
-class ConfigDictionary(dict):
-  """
-  Immutable config dictionary
-  """
-  
-  def __init__(self, dictionary):
-    """
-    Recursively turn dict to ConfigDictionary
-    """
-    for k, v in dictionary.iteritems():
-      if isinstance(v, dict):
-        dictionary[k] = ConfigDictionary(v)
-        
-    super(ConfigDictionary, self).__init__(dictionary)
-
-  def __setitem__(self, name, value):
-    raise Fail("Configuration dictionary is immutable!")
-
-  def __getitem__(self, name):
-    """
-    - use Python types
-    - enable lazy failure for unknown configs. 
-    """
-    try:
-      value = super(ConfigDictionary, self).__getitem__(name)
-    except KeyError:
-      return UnknownConfiguration(name)
-      
-    
-    if value == "true":
-      value = True
-    elif value == "false":
-      value = False
-    else: 
-      try:
-        value = int(value)
-      except (ValueError, TypeError):
-        try:
-          value =  float(value)
-        except (ValueError, TypeError):
-          pass
-    
-    return value
-  
-  
-class UnknownConfiguration():
-  """
-  Lazy failing for unknown configs.
-  """
-  def __init__(self, name):
-    self.name = name
-   
-  def __getattr__(self, name):
-    raise Fail("Configuration parameter '"+self.name+"' was not found in configurations dictionary!")
-  
-  def __getitem__(self, name):
-    """
-    Allow [] 
-    """
-    return self
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/9213dcca/ambari-agent/src/main/python/resource_management/libraries/script/hook.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/resource_management/libraries/script/hook.py b/ambari-agent/src/main/python/resource_management/libraries/script/hook.py
deleted file mode 100644
index 5c8eafc..0000000
--- a/ambari-agent/src/main/python/resource_management/libraries/script/hook.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env ambari-python-wrap
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-
-__all__ = ["Hook"]
-
-from resource_management.libraries.script import Script
-import subprocess
-import sys
-
-class Hook(Script):
-  """
-  Executes a hook for acommand for custom service. stdout and stderr are written to
-  tmpoutfile and to tmperrfile respectively.
-  """
-
-  HOOK_METHOD_NAME = "hook" # This method is always executed at hooks
-
-
-  def choose_method_to_execute(self, command_name):
-    """
-    Changes logics of resolving method name
-    """
-    return super(Hook, self).choose_method_to_execute(self.HOOK_METHOD_NAME)
-
-
-  def run_custom_hook(self, command):
-    """
-    Runs custom hook
-    """
-    args = sys.argv
-    
-    #Hook script to run
-    args[0] = args[0].replace('before-'+args[1], command)
-    args[0] = args[0].replace('after-'+args[1], command)
-    
-    #Hook script base directory
-    args[3] = args[3].replace('before-'+args[1], command)
-    args[3] = args[3].replace('after-'+args[1], command)
-    
-    args[1] = command.split("-")[1]
-
-
-    cmd = [sys.executable]
-    cmd.extend(args)
-
-    if subprocess.call(cmd) != 0:
-      self.fail_with_error("Error: Unable to run the custom hook script " +
-                           cmd.__str__())
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/9213dcca/ambari-agent/src/main/python/resource_management/libraries/script/script.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/resource_management/libraries/script/script.py b/ambari-agent/src/main/python/resource_management/libraries/script/script.py
deleted file mode 100644
index 9ce8b1b..0000000
--- a/ambari-agent/src/main/python/resource_management/libraries/script/script.py
+++ /dev/null
@@ -1,251 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-
-__all__ = ["Script"]
-
-import os
-import sys
-import json
-import logging
-import platform
-
-from resource_management.core.environment import Environment
-from resource_management.core.exceptions import Fail, ClientComponentHasNoStatus, ComponentIsNotRunning
-from resource_management.core.resources.packaging import Package
-from resource_management.libraries.script.config_dictionary import ConfigDictionary, UnknownConfiguration
-from resource_management.libraries.functions.install_hdp_msi import install_windows_msi
-from resource_management.libraries.functions.reload_windows_env import reload_windows_env
-USAGE = """Usage: {0} <COMMAND> <JSON_CONFIG> <BASEDIR> <STROUTPUT> <LOGGING_LEVEL>
-
-<COMMAND> command type (INSTALL/CONFIGURE/START/STOP/SERVICE_CHECK...)
-<JSON_CONFIG> path to command json file. Ex: /var/lib/ambari-agent/data/command-2.json
-<BASEDIR> path to service metadata dir. Ex: /var/lib/ambari-agent/cache/stacks/HDP/2.0.6/services/HDFS
-<STROUTPUT> path to file with structured command output (file will be created). Ex:/tmp/my.txt
-<LOGGING_LEVEL> log level for stdout. Ex:DEBUG,INFO
-"""
-
-_PASSWORD_MAP = {"/configurations/hadoop-env/hadoop.user.name":"/configurations/hadoop-env/hadoop.user.password"}
-
-def get_path_form_configuration(name, configuration):
-  subdicts = filter(None, name.split('/'))
-
-  for x in subdicts:
-    if x in configuration:
-      configuration = configuration[x]
-    else:
-      return None
-
-  return configuration
-
-class Script(object):
-  """
-  Executes a command for custom service. stdout and stderr are written to
-  tmpoutfile and to tmperrfile respectively.
-  Script instances share configuration as a class parameter and therefore
-  different Script instances can not be used from different threads at
-  the same time within a single python process
-
-  Accepted command line arguments mapping:
-  1 command type (START/STOP/...)
-  2 path to command json file
-  3 path to service metadata dir (Directory "package" inside service directory)
-  4 path to file with structured command output (file will be created)
-  """
-  structuredOut = {}
-
-  def put_structured_out(self, sout):
-    Script.structuredOut.update(sout)
-    try:
-      with open(self.stroutfile, 'w') as fp:
-        json.dump(Script.structuredOut, fp)
-    except IOError:
-      Script.structuredOut.update({"errMsg" : "Unable to write to " + self.stroutfile})
-
-  def execute(self):
-    """
-    Sets up logging;
-    Parses command parameters and executes method relevant to command type
-    """
-    # set up logging (two separate loggers for stderr and stdout with different loglevels)
-    logger = logging.getLogger('resource_management')
-    logger.setLevel(logging.DEBUG)
-    formatter = logging.Formatter('%(asctime)s - %(message)s')
-    chout = logging.StreamHandler(sys.stdout)
-    chout.setLevel(logging.INFO)
-    chout.setFormatter(formatter)
-    cherr = logging.StreamHandler(sys.stderr)
-    cherr.setLevel(logging.ERROR)
-    cherr.setFormatter(formatter)
-    logger.addHandler(cherr)
-    logger.addHandler(chout)
-    
-    # parse arguments
-    if len(sys.argv) < 6: 
-     logger.error("Script expects at least 5 arguments")
-     print USAGE.format(os.path.basename(sys.argv[0])) # print to stdout
-     sys.exit(1)
-    
-    command_name = str.lower(sys.argv[1])
-    command_data_file = sys.argv[2]
-    basedir = sys.argv[3]
-    self.stroutfile = sys.argv[4]
-    logging_level = sys.argv[5]
-    
-    logging_level_str = logging._levelNames[logging_level]
-    chout.setLevel(logging_level_str)
-    logger.setLevel(logging_level_str)
-
-    # on windows we need to reload some of env variables manually because there is no default paths for configs(like
-    # /etc/something/conf on linux. When this env vars created by one of the Script execution, they can not be updated
-    # in agent, so other Script executions will not be able to access to new env variables
-    if platform.system() == "Windows":
-      reload_windows_env()
-
-    try:
-      with open(command_data_file, "r") as f:
-        pass
-        Script.config = ConfigDictionary(json.load(f))
-        #load passwords here(used on windows to impersonate different users)
-        Script.passwords = {}
-        for k, v in _PASSWORD_MAP.iteritems():
-          if get_path_form_configuration(k,Script.config) and get_path_form_configuration(v,Script.config ):
-            Script.passwords[get_path_form_configuration(k,Script.config)] = get_path_form_configuration(v,Script.config)
-
-    except IOError:
-      logger.exception("Can not read json file with command parameters: ")
-      sys.exit(1)
-    # Run class method depending on a command type
-    try:
-      method = self.choose_method_to_execute(command_name)
-      with Environment(basedir) as env:
-        method(env)
-    except ClientComponentHasNoStatus or ComponentIsNotRunning:
-      # Support of component status checks.
-      # Non-zero exit code is interpreted as an INSTALLED status of a component
-      sys.exit(1)
-    except Fail:
-      logger.exception("Error while executing command '{0}':".format(command_name))
-      sys.exit(1)
-
-
-  def choose_method_to_execute(self, command_name):
-    """
-    Returns a callable object that should be executed for a given command.
-    """
-    self_methods = dir(self)
-    if not command_name in self_methods:
-      raise Fail("Script '{0}' has no method '{1}'".format(sys.argv[0], command_name))
-    method = getattr(self, command_name)
-    return method
-
-
-  @staticmethod
-  def get_config():
-    """
-    HACK. Uses static field to store configuration. This is a workaround for
-    "circular dependency" issue when importing params.py file and passing to
-     it a configuration instance.
-    """
-    return Script.config
-
-  @staticmethod
-  def get_password(user):
-    return Script.passwords[user]
-
-  def install(self, env):
-    """
-    Default implementation of install command is to install all packages
-    from a list, received from the server.
-    Feel free to override install() method with your implementation. It
-    usually makes sense to call install_packages() manually in this case
-    """
-    self.install_packages(env)
-
-
-  def install_packages(self, env, exclude_packages=[]):
-    """
-    List of packages that are required< by service is received from the server
-    as a command parameter. The method installs all packages
-    from this list
-    """
-    config = self.get_config()
-    if platform.system() == "Windows":
-      install_windows_msi(os.path.join(config['hostLevelParams']['jdk_location'], "hdp.msi"),
-                          config["hostLevelParams"]["agentCacheDir"], "hdp.msi", self.get_password("hadoop"))
-    else:
-      try:
-        package_list_str = config['hostLevelParams']['package_list']
-        if isinstance(package_list_str, basestring) and len(package_list_str) > 0:
-          package_list = json.loads(package_list_str)
-          for package in package_list:
-            if not package['name'] in exclude_packages:
-              name = package['name']
-              Package(name)
-      except KeyError:
-        pass  # No reason to worry
-
-        # RepoInstaller.remove_repos(config)
-
-
-
-  def fail_with_error(self, message):
-    """
-    Prints error message and exits with non-zero exit code
-    """
-    print("Error: " + message)
-    sys.stderr.write("Error: " + message)
-    sys.exit(1)
-
-  def start(self, env):
-    """
-    To be overridden by subclasses
-    """
-    self.fail_with_error('start method isn\'t implemented')
-
-  def stop(self, env):
-    """
-    To be overridden by subclasses
-    """
-    self.fail_with_error('stop method isn\'t implemented')
-
-  def restart(self, env):
-    """
-    Default implementation of restart command is to call stop and start methods
-    Feel free to override restart() method with your implementation.
-    For client components we call install
-    """
-    config = self.get_config()
-    componentCategory = None
-    try :
-      componentCategory = config['roleParams']['component_category']
-    except KeyError:
-      pass
-
-    if componentCategory and componentCategory.strip().lower() == 'CLIENT'.lower():
-      self.install(env)
-    else:
-      self.stop(env)
-      self.start(env)
-
-  def configure(self, env):
-    """
-    To be overridden by subclasses
-    """
-    self.fail_with_error('configure method isn\'t implemented')

http://git-wip-us.apache.org/repos/asf/ambari/blob/9213dcca/ambari-agent/src/main/python/resource_management/libraries/script/script.py.orig
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/resource_management/libraries/script/script.py.orig b/ambari-agent/src/main/python/resource_management/libraries/script/script.py.orig
deleted file mode 100644
index df666f9..0000000
--- a/ambari-agent/src/main/python/resource_management/libraries/script/script.py.orig
+++ /dev/null
@@ -1,250 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-
-__all__ = ["Script"]
-
-import os
-import sys
-import json
-import logging
-import platform
-
-from resource_management.core.environment import Environment
-from resource_management.core.exceptions import Fail, ClientComponentHasNoStatus, ComponentIsNotRunning
-from resource_management.core.resources.packaging import Package
-from resource_management.libraries.script.config_dictionary import ConfigDictionary, UnknownConfiguration
-from resource_management.libraries.functions.install_hdp_msi import install_windows_msi
-
-
-USAGE = """Usage: {0} <COMMAND> <JSON_CONFIG> <BASEDIR> <STROUTPUT> <LOGGING_LEVEL>
-
-<COMMAND> command type (INSTALL/CONFIGURE/START/STOP/SERVICE_CHECK...)
-<JSON_CONFIG> path to command json file. Ex: /var/lib/ambari-agent/data/command-2.json
-<BASEDIR> path to service metadata dir. Ex: /var/lib/ambari-agent/cache/stacks/HDP/2.0.6/services/HDFS
-<STROUTPUT> path to file with structured command output (file will be created). Ex:/tmp/my.txt
-<LOGGING_LEVEL> log level for stdout. Ex:DEBUG,INFO
-"""
-
-_PASSWORD_MAP = {"/configurations/hadoop-env/hadoop.user.name":"/configurations/hadoop-env/hadoop.user.password"}
-
-def get_path_form_configuration(name, configuration):
-  subdicts = filter(None, name.split('/'))
-
-  for x in subdicts:
-    if x in configuration:
-      configuration = configuration[x]
-    else:
-      return None
-
-  return configuration
-
-class Script(object):
-  """
-  Executes a command for custom service. stdout and stderr are written to
-  tmpoutfile and to tmperrfile respectively.
-  Script instances share configuration as a class parameter and therefore
-  different Script instances can not be used from different threads at
-  the same time within a single python process
-
-  Accepted command line arguments mapping:
-  1 command type (START/STOP/...)
-  2 path to command json file
-  3 path to service metadata dir (Directory "package" inside service directory)
-  4 path to file with structured command output (file will be created)
-  """
-  structuredOut = {}
-
-  def put_structured_out(self, sout):
-    Script.structuredOut.update(sout)
-    try:
-      with open(self.stroutfile, 'w') as fp:
-        json.dump(Script.structuredOut, fp)
-    except IOError:
-      Script.structuredOut.update({"errMsg" : "Unable to write to " + self.stroutfile})
-
-  def execute(self):
-    """
-    Sets up logging;
-    Parses command parameters and executes method relevant to command type
-    """
-    # set up logging (two separate loggers for stderr and stdout with different loglevels)
-    logger = logging.getLogger('resource_management')
-    logger.setLevel(logging.DEBUG)
-    formatter = logging.Formatter('%(asctime)s - %(message)s')
-    chout = logging.StreamHandler(sys.stdout)
-    chout.setLevel(logging.INFO)
-    chout.setFormatter(formatter)
-    cherr = logging.StreamHandler(sys.stderr)
-    cherr.setLevel(logging.ERROR)
-    cherr.setFormatter(formatter)
-    logger.addHandler(cherr)
-    logger.addHandler(chout)
-    
-    # parse arguments
-    if len(sys.argv) < 6: 
-     logger.error("Script expects at least 5 arguments")
-     print USAGE.format(os.path.basename(sys.argv[0])) # print to stdout
-     sys.exit(1)
-    
-    command_name = str.lower(sys.argv[1])
-    command_data_file = sys.argv[2]
-    basedir = sys.argv[3]
-    self.stroutfile = sys.argv[4]
-    logging_level = sys.argv[5]
-    
-    logging_level_str = logging._levelNames[logging_level]
-    chout.setLevel(logging_level_str)
-    logger.setLevel(logging_level_str)
-      
-    try:
-      with open(command_data_file, "r") as f:
-        pass
-        Script.config = ConfigDictionary(json.load(f))
-        #load passwords here(used on windows to impersonate different users)
-        Script.passwords = {}
-        for k, v in _PASSWORD_MAP.iteritems():
-          if get_path_form_configuration(k,Script.config) and get_path_form_configuration(v,Script.config ):
-            Script.passwords[get_path_form_configuration(k,Script.config)] = get_path_form_configuration(v,Script.config)
-
-    except IOError:
-      logger.exception("Can not read json file with command parameters: ")
-      sys.exit(1)
-    # Run class method depending on a command type
-    try:
-      method = self.choose_method_to_execute(command_name)
-      with Environment(basedir) as env:
-        method(env)
-    except ClientComponentHasNoStatus or ComponentIsNotRunning:
-      # Support of component status checks.
-      # Non-zero exit code is interpreted as an INSTALLED status of a component
-      sys.exit(1)
-    except Fail:
-      logger.exception("Error while executing command '{0}':".format(command_name))
-      sys.exit(1)
-
-
-  def choose_method_to_execute(self, command_name):
-    """
-    Returns a callable object that should be executed for a given command.
-    """
-    self_methods = dir(self)
-    if not command_name in self_methods:
-      raise Fail("Script '{0}' has no method '{1}'".format(sys.argv[0], command_name))
-    method = getattr(self, command_name)
-    return method
-
-
-  @staticmethod
-  def get_config():
-    """
-    HACK. Uses static field to store configuration. This is a workaround for
-    "circular dependency" issue when importing params.py file and passing to
-     it a configuration instance.
-    """
-    return Script.config
-
-  @staticmethod
-  def get_password(user):
-    return Script.passwords[user]
-
-  def install(self, env):
-    """
-    Default implementation of install command is to install all packages
-    from a list, received from the server.
-    Feel free to override install() method with your implementation. It
-    usually makes sense to call install_packages() manually in this case
-    """
-    self.install_packages(env)
-
-
-  def install_packages(self, env, exclude_packages=[]):
-    """
-    List of packages that are required< by service is received from the server
-    as a command parameter. The method installs all packages
-    from this list
-    """
-    config = self.get_config()
-    if platform.system() == "Windows":
-      install_windows_msi(os.path.join(config['hostLevelParams']['jdk_location'], "hdp.msi"),
-<<<<<<< HEAD
-                          config["hostLevelParams"]["agentCacheDir"], "hdp.msi")
-=======
-                          config["hostLevelParams"]["agentCacheDir"], "hdp.msi", self.get_password("hadoop"))
->>>>>>> da12124c4425f3f87e15c06795c51cbde97beacb
-    else:
-      try:
-        package_list_str = config['hostLevelParams']['package_list']
-        if isinstance(package_list_str, basestring) and len(package_list_str) > 0:
-          package_list = json.loads(package_list_str)
-          for package in package_list:
-            if not package['name'] in exclude_packages:
-              name = package['name']
-              Package(name)
-      except KeyError:
-        pass  # No reason to worry
-
-        # RepoInstaller.remove_repos(config)
-
-
-
-  def fail_with_error(self, message):
-    """
-    Prints error message and exits with non-zero exit code
-    """
-    print("Error: " + message)
-    sys.stderr.write("Error: " + message)
-    sys.exit(1)
-
-  def start(self, env):
-    """
-    To be overridden by subclasses
-    """
-    self.fail_with_error('start method isn\'t implemented')
-
-  def stop(self, env):
-    """
-    To be overridden by subclasses
-    """
-    self.fail_with_error('stop method isn\'t implemented')
-
-  def restart(self, env):
-    """
-    Default implementation of restart command is to call stop and start methods
-    Feel free to override restart() method with your implementation.
-    For client components we call install
-    """
-    config = self.get_config()
-    componentCategory = None
-    try :
-      componentCategory = config['roleParams']['component_category']
-    except KeyError:
-      pass
-
-    if componentCategory and componentCategory.strip().lower() == 'CLIENT'.lower():
-      self.install(env)
-    else:
-      self.stop(env)
-      self.start(env)
-
-  def configure(self, env):
-    """
-    To be overridden by subclasses
-    """
-    self.fail_with_error('configure method isn\'t implemented')

http://git-wip-us.apache.org/repos/asf/ambari/blob/9213dcca/ambari-agent/src/packages/windows.xml
----------------------------------------------------------------------
diff --git a/ambari-agent/src/packages/windows.xml b/ambari-agent/src/packages/windows.xml
index b24c73e..1abca20 100644
--- a/ambari-agent/src/packages/windows.xml
+++ b/ambari-agent/src/packages/windows.xml
@@ -32,7 +32,7 @@
       <outputDirectory>/sbin/ambari_agent</outputDirectory>
     </fileSet>
     <fileSet>
-      <directory>src/main/python/resource_management</directory>
+      <directory>${project.basedir}/../ambari-common/src/main/python/resource_management</directory>
       <outputDirectory>/sbin/resource_management</outputDirectory>
     </fileSet>
     <fileSet>
@@ -40,8 +40,8 @@
       <outputDirectory>/sbin/ambari_commons</outputDirectory>
     </fileSet>
     <fileSet>
-      <directory>${project.basedir}/../ambari-common/src/main/python/jinja2/jinja2</directory>
-      <outputDirectory>/sbin/jinja2</outputDirectory>
+      <directory>${project.basedir}/../ambari-common/src/main/python/ambari_jinja2/ambari_jinja2</directory>
+      <outputDirectory>/sbin/ambari_jinja2</outputDirectory>
     </fileSet>
     <fileSet>
       <directory>${project.basedir}/conf/windows</directory>

http://git-wip-us.apache.org/repos/asf/ambari/blob/9213dcca/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py b/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
index 897493a..216f8c8 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
@@ -27,6 +27,7 @@ import os, errno, time, pprint, tempfile, threading, json
 import StringIO
 import sys
 from threading import Thread
+import copy
 
 from mock.mock import patch, MagicMock, call
 from ambari_agent.StackVersionsFileHandler import StackVersionsFileHandler
@@ -34,13 +35,11 @@ from ambari_agent.CustomServiceOrchestrator import CustomServiceOrchestrator
 from ambari_agent.PythonExecutor import PythonExecutor
 from ambari_agent.CommandStatusDict import CommandStatusDict
 from ambari_agent.ActualConfigHandler import ActualConfigHandler
+from FileCache import FileCache
 
 
 class TestActionQueue(TestCase):
-
   def setUp(self):
-    out = StringIO.StringIO()
-    sys.stdout = out
     # save original open() method for later use
     self.original_open = open
 
@@ -143,6 +142,19 @@ class TestActionQueue(TestCase):
     'serviceName': u'HDFS',
     'configurations':{'global' : {}},
     'configurationTags':{'global' : { 'tag': 'v123' }},
+    'hostLevelParams':{'custom_command': 'RESTART', 'clientsToUpdateConfigs': []}
+  }
+
+  datanode_restart_command_no_clients_update = {
+    'commandType': 'EXECUTION_COMMAND',
+    'role': u'DATANODE',
+    'roleCommand': u'CUSTOM_COMMAND',
+    'commandId': '1-1',
+    'taskId': 9,
+    'clusterName': u'cc',
+    'serviceName': u'HDFS',
+    'configurations':{'global' : {}},
+    'configurationTags':{'global' : { 'tag': 'v123' }},
     'hostLevelParams':{'custom_command': 'RESTART'}
   }
 
@@ -155,6 +167,49 @@ class TestActionQueue(TestCase):
     'hostLevelParams': {}
   }
 
+  background_command = {
+    'commandType': 'BACKGROUND_EXECUTION_COMMAND',
+    'role': 'NAMENODE',
+    'roleCommand': 'CUSTOM_COMMAND',
+    'commandId': '1-1',
+    'taskId': 19,
+    'clusterName': 'c1',
+    'serviceName': 'HDFS',
+    'configurations':{'global' : {}},
+    'configurationTags':{'global' : { 'tag': 'v123' }},
+    'hostLevelParams':{'custom_command': 'REBALANCE_HDFS'},
+    'commandParams' :  {
+      'script_type' : 'PYTHON',
+      'script' : 'script.py',
+      'command_timeout' : '600',
+      'jdk_location' : '.',
+      'service_package_folder' : '.'
+      }
+  }
+  cancel_background_command = {
+    'commandType': 'EXECUTION_COMMAND',
+    'role': 'NAMENODE',
+    'roleCommand': 'ACTIONEXECUTE',
+    'commandId': '1-1',
+    'taskId': 20,
+    'clusterName': 'c1',
+    'serviceName': 'HDFS',
+    'configurations':{'global' : {}},
+    'configurationTags':{'global' : {}},
+    'hostLevelParams':{},
+    'commandParams' :  {
+      'script_type' : 'PYTHON',
+      'script' : 'cancel_background_task.py',
+      'before_system_hook_function' : 'fetch_bg_pid_by_taskid',
+      'jdk_location' : '.',
+      'command_timeout' : '600',
+      'service_package_folder' : '.',
+      'cancel_policy': 'SIGKILL',
+      'cancel_task_id': "19",
+      }
+  }
+
+
   @patch.object(ActionQueue, "process_command")
   @patch.object(Queue, "get")
   @patch.object(CustomServiceOrchestrator, "__init__")
@@ -178,7 +233,9 @@ class TestActionQueue(TestCase):
   def test_process_command(self, execute_status_command_mock,
                            execute_command_mock, print_exc_mock):
     dummy_controller = MagicMock()
-    actionQueue = ActionQueue(AmbariConfig().getConfig(), dummy_controller)
+    config = AmbariConfig()
+    config.set('agent', 'tolerate_download_failures', "true")
+    actionQueue = ActionQueue(config, dummy_controller)
     execution_command = {
       'commandType' : ActionQueue.EXECUTION_COMMAND,
     }
@@ -243,7 +300,7 @@ class TestActionQueue(TestCase):
         return self.original_open(file, mode)
     open_mock.side_effect = open_side_effect
 
-    config = AmbariConfig().getConfig()
+    config = AmbariConfig()
     tempdir = tempfile.gettempdir()
     config.set('agent', 'prefix', tempdir)
     config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
@@ -429,6 +486,48 @@ class TestActionQueue(TestCase):
     self.assertEqual(len(report['reports']), 1)
     self.assertEqual(expected, report['reports'][0])
 
+  @patch.object(ActualConfigHandler, "write_client_components")
+  @patch.object(CustomServiceOrchestrator, "runCommand")
+  @patch("CommandStatusDict.CommandStatusDict")
+  @patch.object(ActionQueue, "status_update_callback")
+  def test_store_configuration_tags_no_clients(self, status_update_callback_mock,
+                                    command_status_dict_mock,
+                                    cso_runCommand_mock, write_client_components_mock):
+    custom_service_orchestrator_execution_result_dict = {
+      'stdout': 'out',
+      'stderr': 'stderr',
+      'structuredOut' : '',
+      'exitcode' : 0
+    }
+    cso_runCommand_mock.return_value = custom_service_orchestrator_execution_result_dict
+
+    config = AmbariConfig().getConfig()
+    tempdir = tempfile.gettempdir()
+    config.set('agent', 'prefix', tempdir)
+    config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
+    config.set('agent', 'tolerate_download_failures', "true")
+    dummy_controller = MagicMock()
+    actionQueue = ActionQueue(config, dummy_controller)
+    actionQueue.execute_command(self.datanode_restart_command_no_clients_update)
+    report = actionQueue.result()
+    expected = {'status': 'COMPLETED',
+                'configurationTags': {'global': {'tag': 'v123'}},
+                'stderr': 'stderr',
+                'stdout': 'out',
+                'clusterName': u'cc',
+                'structuredOut': '""',
+                'roleCommand': u'CUSTOM_COMMAND',
+                'serviceName': u'HDFS',
+                'role': u'DATANODE',
+                'actionId': '1-1',
+                'taskId': 9,
+                'customCommand': 'RESTART',
+                'exitCode': 0}
+    # Agent caches configurationTags if custom_command RESTART completed
+    self.assertEqual(len(report['reports']), 1)
+    self.assertEqual(expected, report['reports'][0])
+    self.assertFalse(write_client_components_mock.called)
+
   @patch.object(ActionQueue, "status_update_callback")
   @patch.object(StackVersionsFileHandler, "read_stack_version")
   @patch.object(CustomServiceOrchestrator, "requestComponentStatus")
@@ -485,3 +584,162 @@ class TestActionQueue(TestCase):
     self.assertTrue(requestComponentStatus_mock.called)
     self.assertEqual(len(report['componentStatus']), 1)
     self.assertTrue(report['componentStatus'][0].has_key('alerts'))
+
+  @patch.object(ActionQueue, "process_command")
+  @patch.object(Queue, "get")
+  @patch.object(CustomServiceOrchestrator, "__init__")
+  def test_reset_queue(self, CustomServiceOrchestrator_mock,
+                                get_mock, process_command_mock):
+    CustomServiceOrchestrator_mock.return_value = None
+    dummy_controller = MagicMock()
+    config = MagicMock()
+    actionQueue = ActionQueue(config, dummy_controller)
+    actionQueue.start()
+    actionQueue.put([self.datanode_install_command, self.hbase_install_command])
+    self.assertEqual(2, actionQueue.commandQueue.qsize())
+    actionQueue.reset()
+    self.assertTrue(actionQueue.commandQueue.empty())
+    time.sleep(0.1)
+    actionQueue.stop()
+    actionQueue.join()
+    self.assertEqual(actionQueue.stopped(), True, 'Action queue is not stopped.')
+
+  @patch.object(ActionQueue, "process_command")
+  @patch.object(Queue, "get")
+  @patch.object(CustomServiceOrchestrator, "__init__")
+  def test_cancel(self, CustomServiceOrchestrator_mock,
+                       get_mock, process_command_mock):
+    CustomServiceOrchestrator_mock.return_value = None
+    dummy_controller = MagicMock()
+    config = MagicMock()
+    actionQueue = ActionQueue(config, dummy_controller)
+    actionQueue.start()
+    actionQueue.put([self.datanode_install_command, self.hbase_install_command])
+    self.assertEqual(2, actionQueue.commandQueue.qsize())
+    actionQueue.reset()
+    self.assertTrue(actionQueue.commandQueue.empty())
+    time.sleep(0.1)
+    actionQueue.stop()
+    actionQueue.join()
+    self.assertEqual(actionQueue.stopped(), True, 'Action queue is not stopped.')
+
+  @patch.object(StackVersionsFileHandler, "read_stack_version")
+  @patch.object(CustomServiceOrchestrator, "runCommand")
+  @patch.object(CustomServiceOrchestrator, "__init__")
+  def test_execute_background_command(self, CustomServiceOrchestrator_mock,
+                                  runCommand_mock, read_stack_version_mock
+                                  ):
+    CustomServiceOrchestrator_mock.return_value = None
+    CustomServiceOrchestrator.runCommand.return_value = {'exitcode' : 0,
+                                                         'stdout': 'out-11',
+                                                         'stderr' : 'err-13'}
+
+    dummy_controller = MagicMock()
+    actionQueue = ActionQueue(AmbariConfig().getConfig(), dummy_controller)
+
+    execute_command = copy.deepcopy(self.background_command)
+    actionQueue.put([execute_command])
+    actionQueue.processBackgroundQueueSafeEmpty();
+    actionQueue.processStatusCommandQueueSafeEmpty();
+
+    #assert that python execturor start
+    self.assertTrue(runCommand_mock.called)
+    runningCommand = actionQueue.commandStatuses.current_state.get(execute_command['taskId'])
+    self.assertTrue(runningCommand is not None)
+    self.assertEqual(runningCommand[1]['status'], ActionQueue.IN_PROGRESS_STATUS)
+
+    report = actionQueue.result()
+    self.assertEqual(len(report['reports']),1)
+
+  @patch.object(CustomServiceOrchestrator, "resolve_script_path")
+  @patch.object(StackVersionsFileHandler, "read_stack_version")
+  def test_execute_python_executor(self, read_stack_version_mock, resolve_script_path_mock):
+
+    dummy_controller = MagicMock()
+    cfg = AmbariConfig().getConfig()
+    cfg.set('agent', 'tolerate_download_failures', 'true')
+    cfg.set('agent', 'prefix', '.')
+    cfg.set('agent', 'cache_dir', 'background_tasks')
+
+    actionQueue = ActionQueue(cfg, dummy_controller)
+    patch_output_file(actionQueue.customServiceOrchestrator.python_executor)
+    actionQueue.customServiceOrchestrator.dump_command_to_json = MagicMock()
+
+    result = {}
+    lock = threading.RLock()
+    complete_done = threading.Condition(lock)
+
+    def command_complete_w(process_condenced_result, handle):
+      with lock:
+        result['command_complete'] = {'condenced_result' : copy.copy(process_condenced_result),
+                                      'handle' : copy.copy(handle),
+                                      'command_status' : actionQueue.commandStatuses.get_command_status(handle.command['taskId'])
+                                      }
+        complete_done.notifyAll()
+
+    actionQueue.on_background_command_complete_callback = wraped(actionQueue.on_background_command_complete_callback,None, command_complete_w)
+    actionQueue.put([self.background_command])
+    actionQueue.processBackgroundQueueSafeEmpty();
+    actionQueue.processStatusCommandQueueSafeEmpty();
+
+    with lock:
+      complete_done.wait(.1)
+
+      finished_status = result['command_complete']['command_status']
+      self.assertEqual(finished_status['status'], ActionQueue.COMPLETED_STATUS)
+      self.assertEqual(finished_status['stdout'], 'process_out')
+      self.assertEqual(finished_status['stderr'], 'process_err')
+      self.assertEqual(finished_status['exitCode'], 0)
+
+
+    runningCommand = actionQueue.commandStatuses.current_state.get(self.background_command['taskId'])
+    self.assertTrue(runningCommand is not None)
+
+    report = actionQueue.result()
+    self.assertEqual(len(report['reports']),1)
+    self.assertEqual(report['reports'][0]['stdout'],'process_out')
+#    self.assertEqual(report['reports'][0]['structuredOut'],'{"a": "b."}')
+
+
+
+  cancel_background_command = {
+    "commandType":"CANCEL_COMMAND",
+    "role":"AMBARI_SERVER_ACTION",
+    "roleCommand":"ABORT",
+    "commandId":"2--1",
+    "taskId":20,
+    "clusterName":"c1",
+    "serviceName":"",
+    "hostname":"c6401",
+    "roleParams":{
+      "cancelTaskIdTargets":"13,14"
+    },
+  }
+
+def patch_output_file(pythonExecutor):
+  def windows_py(command, tmpout, tmperr):
+    proc = MagicMock()
+    proc.pid = 33
+    proc.returncode = 0
+    with tmpout:
+      tmpout.write('process_out')
+    with tmperr:
+      tmperr.write('process_err')
+    return proc
+  def open_subporcess_files_win(fout, ferr, f):
+    return MagicMock(), MagicMock()
+  def read_result_from_files(out_path, err_path, structured_out_path):
+    return 'process_out', 'process_err', '{"a": "b."}'
+  pythonExecutor.launch_python_subprocess = windows_py
+  pythonExecutor.open_subporcess_files = open_subporcess_files_win
+  pythonExecutor.read_result_from_files = read_result_from_files
+
+def wraped(func, before = None, after = None):
+    def wrapper(*args, **kwargs):
+      if(before is not None):
+        before(*args, **kwargs)
+      ret =  func(*args, **kwargs)
+      if(after is not None):
+        after(*args, **kwargs)
+      return ret
+    return wrapper

http://git-wip-us.apache.org/repos/asf/ambari/blob/9213dcca/ambari-agent/src/test/python/ambari_agent/TestActualConfigHandler.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestActualConfigHandler.py b/ambari-agent/src/test/python/ambari_agent/TestActualConfigHandler.py
index ca56350..c659a03 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestActualConfigHandler.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestActualConfigHandler.py
@@ -34,8 +34,8 @@ class TestActualConfigHandler(TestCase):
   def setUp(self):
     LiveStatus.SERVICES = [
       "HDFS", "MAPREDUCE", "GANGLIA", "HBASE",
-      "NAGIOS", "ZOOKEEPER", "OOZIE", "HCATALOG",
-      "KERBEROS", "TEMPLETON", "HIVE", "WEBHCAT",
+      "NAGIOS", "ZOOKEEPER", "OOZIE",
+      "KERBEROS", "TEMPLETON", "HIVE",
       "YARN", "MAPREDUCE2", "FLUME", "TEZ",
       "FALCON", "STORM"
     ]
@@ -108,7 +108,7 @@ class TestActualConfigHandler(TestCase):
        "componentName" : "HIVE_METASTORE"},
       {"serviceName" : "HIVE",
        "componentName" : "MYSQL_SERVER"},
-      {"serviceName" : "WEBHCAT",
+      {"serviceName" : "HIVE",
        "componentName" : "WEBHCAT_SERVER"},
       {"serviceName" : "YARN",
        "componentName" : "RESOURCEMANAGER"},
@@ -193,6 +193,7 @@ class TestActualConfigHandler(TestCase):
 
     tags1 = { "global": "version1", "core-site": "version2" }
     tags2 = { "global": "version33", "core-site": "version33" }
+    clientsToUpdateConfigs1 = ["*"]
     handler = ActualConfigHandler(config, {})
     handler.write_actual_component('HDFS_CLIENT', tags1)
     handler.write_actual_component('HBASE_CLIENT', tags1)
@@ -201,7 +202,7 @@ class TestActualConfigHandler(TestCase):
     handler.write_actual_component('DATANODE', tags2)
     self.assertEquals(tags2, handler.read_actual_component('DATANODE'))
     self.assertEquals(tags1, handler.read_actual_component('HDFS_CLIENT'))
-    handler.write_client_components('HDFS', tags2)
+    handler.write_client_components('HDFS', tags2, clientsToUpdateConfigs1)
     self.assertEquals(tags2, handler.read_actual_component('HDFS_CLIENT'))
     self.assertEquals(tags1, handler.read_actual_component('HBASE_CLIENT'))
 
@@ -218,17 +219,37 @@ class TestActualConfigHandler(TestCase):
     tags0 = {"global": "version0", "core-site": "version0"}
     tags1 = {"global": "version1", "core-site": "version2"}
     tags2 = {"global": "version33", "core-site": "version33"}
+    clientsToUpdateConfigs1 = ["HDFS_CLIENT","HBASE_CLIENT"]
     configTags = {'HDFS_CLIENT': tags0, 'HBASE_CLIENT': tags1}
     handler = ActualConfigHandler(config, configTags)
     self.assertEquals(tags0, handler.read_actual_component('HDFS_CLIENT'))
     self.assertEquals(tags1, handler.read_actual_component('HBASE_CLIENT'))
-    handler.write_client_components('HDFS', tags2)
+    handler.write_client_components('HDFS', tags2, clientsToUpdateConfigs1)
     self.assertEquals(tags2, handler.read_actual_component('HDFS_CLIENT'))
     self.assertEquals(tags1, handler.read_actual_component('HBASE_CLIENT'))
     self.assertTrue(write_file_mock.called)
     self.assertEqual(1, write_file_mock.call_count)
 
   @patch.object(ActualConfigHandler, "write_file")
+  def test_write_empty_client_components(self, write_file_mock):
+    config = AmbariConfig().getConfig()
+    tmpdir = tempfile.gettempdir()
+    config.set('agent', 'prefix', tmpdir)
+
+    tags0 = {"global": "version0", "core-site": "version0"}
+    tags1 = {"global": "version1", "core-site": "version2"}
+    tags2 = {"global": "version33", "core-site": "version33"}
+    clientsToUpdateConfigs1 = []
+    configTags = {'HDFS_CLIENT': tags0, 'HBASE_CLIENT': tags1}
+    handler = ActualConfigHandler(config, configTags)
+    self.assertEquals(tags0, handler.read_actual_component('HDFS_CLIENT'))
+    self.assertEquals(tags1, handler.read_actual_component('HBASE_CLIENT'))
+    handler.write_client_components('HDFS', tags2, clientsToUpdateConfigs1)
+    self.assertEquals(tags0, handler.read_actual_component('HDFS_CLIENT'))
+    self.assertEquals(tags1, handler.read_actual_component('HBASE_CLIENT'))
+    self.assertFalse(write_file_mock.called)
+
+  @patch.object(ActualConfigHandler, "write_file")
   @patch.object(ActualConfigHandler, "read_file")
   def test_read_actual_component_inmemory(self, read_file_mock, write_file_mock):
     config = AmbariConfig().getConfig()

http://git-wip-us.apache.org/repos/asf/ambari/blob/9213dcca/ambari-agent/src/test/python/ambari_agent/TestAlerts.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestAlerts.py b/ambari-agent/src/test/python/ambari_agent/TestAlerts.py
new file mode 100644
index 0000000..6c5cb4d
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestAlerts.py
@@ -0,0 +1,357 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import os
+import sys
+from ambari_agent.AlertSchedulerHandler import AlertSchedulerHandler
+from ambari_agent.alerts.collector import AlertCollector
+from ambari_agent.alerts.metric_alert import MetricAlert
+from ambari_agent.alerts.port_alert import PortAlert
+from ambari_agent.alerts.script_alert import ScriptAlert
+from ambari_agent.apscheduler.scheduler import Scheduler
+from mock.mock import patch
+from unittest import TestCase
+
+class TestAlerts(TestCase):
+
+  def setUp(self):
+    pass
+
+
+  def tearDown(self):
+    sys.stdout == sys.__stdout__
+
+
+  @patch.object(Scheduler, "add_interval_job")
+  @patch.object(Scheduler, "start")
+  def test_start(self, aps_add_interval_job_mock, aps_start_mock):
+    test_file_path = os.path.join('ambari_agent', 'dummy_files')
+    test_stack_path = os.path.join('ambari_agent', 'dummy_files')
+
+    ash = AlertSchedulerHandler(test_file_path, test_stack_path)
+    ash.start()
+
+    self.assertTrue(aps_add_interval_job_mock.called)
+    self.assertTrue(aps_start_mock.called)
+
+
+  def test_port_alert(self):
+    json = { "name": "namenode_process",
+      "service": "HDFS",
+      "component": "NAMENODE",
+      "label": "NameNode process",
+      "interval": 6,
+      "scope": "host",
+      "enabled": True,
+      "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1",
+      "source": {
+        "type": "PORT",
+        "uri": "{{hdfs-site/my-key}}",
+        "default_port": 50070,
+        "reporting": {
+          "ok": {
+            "text": "TCP OK - {0:.4f} response time on port {1}"
+          },
+          "critical": {
+            "text": "Could not load process info: {0}"
+          }
+        }
+      }
+    }
+
+    collector = AlertCollector()
+
+    pa = PortAlert(json, json['source'])
+    pa.set_helpers(collector, {'hdfs-site/my-key': 'value1'})
+    self.assertEquals(6, pa.interval())
+
+    res = pa.collect()
+
+
+  def test_port_alert_no_sub(self):
+    json = { "name": "namenode_process",
+      "service": "HDFS",
+      "component": "NAMENODE",
+      "label": "NameNode process",
+      "interval": 6,
+      "scope": "host",
+      "enabled": True,
+      "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1",
+      "source": {
+        "type": "PORT",
+        "uri": "http://c6401.ambari.apache.org",
+        "default_port": 50070,
+        "reporting": {
+          "ok": {
+            "text": "TCP OK - {0:.4f} response time on port {1}"
+          },
+          "critical": {
+            "text": "Could not load process info: {0}"
+          }
+        }
+      }
+    }
+
+    pa = PortAlert(json, json['source'])
+    pa.set_helpers(AlertCollector(), '')
+    self.assertEquals('http://c6401.ambari.apache.org', pa.uri)
+
+    res = pa.collect()
+
+
+  def test_script_alert(self):
+    json = {
+      "name": "namenode_process",
+      "service": "HDFS",
+      "component": "NAMENODE",
+      "label": "NameNode process",
+      "interval": 6,
+      "scope": "host",
+      "enabled": True,
+      "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1",
+      "source": {
+        "type": "SCRIPT",
+        "path": "test_script.py",
+        "reporting": {
+          "ok": {
+            "text": "TCP OK - {0:.4f} response time on port {1}"
+          },
+          "critical": {
+            "text": "Could not load process info: {0}"
+          }
+        }
+      }
+    }
+
+    # normally set by AlertSchedulerHandler
+    json['source']['stacks_dir'] = os.path.join('ambari_agent', 'dummy_files')
+
+    collector = AlertCollector()
+    sa = ScriptAlert(json, json['source'])
+    sa.set_helpers(collector, '')
+    self.assertEquals(json['source']['path'], sa.path)
+    self.assertEquals(json['source']['stacks_dir'], sa.stacks_dir)
+
+    sa.collect()
+
+    self.assertEquals('WARNING', collector.alerts()[0]['state'])
+    self.assertEquals('all is not well', collector.alerts()[0]['text'])
+
+
+  @patch.object(MetricAlert, "_load_jmx")
+  def test_metric_alert(self, ma_load_jmx_mock):
+    json = {
+      "name": "cpu_check",
+      "service": "HDFS",
+      "component": "NAMENODE",
+      "label": "NameNode process",
+      "interval": 6,
+      "scope": "host",
+      "enabled": True,
+      "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1",
+      "source": {
+        "type": "METRIC",
+        "uri": "http://myurl:8633",
+        "jmx": {
+          "property_list": [
+            "someJmxObject/value",
+            "someOtherJmxObject/value"
+          ],
+          "value": "{0} * 100 + 123"
+        },
+        "reporting": {
+          "ok": {
+            "text": "ok_arr: {0} {1} {2}",
+          },
+          "warning": {
+            "text": "",
+            "value": 13
+          },
+          "critical": {
+            "text": "crit_arr: {0} {1} {2}",
+            "value": 72
+          }
+        }
+      }
+    }
+
+    ma_load_jmx_mock.return_value = [1, 3]
+
+    collector = AlertCollector()
+    ma = MetricAlert(json, json['source'])
+    ma.set_helpers(collector, '')
+    ma.collect()
+
+    self.assertEquals('CRITICAL', collector.alerts()[0]['state'])
+    self.assertEquals('crit_arr: 1 3 223', collector.alerts()[0]['text'])
+
+    del json['source']['jmx']['value']
+    collector = AlertCollector()
+    ma = MetricAlert(json, json['source'])
+    ma.set_helpers(collector, '')
+    ma.collect()
+
+    self.assertEquals('OK', collector.alerts()[0]['state'])
+    self.assertEquals('ok_arr: 1 3 None', collector.alerts()[0]['text'])
+
+
+  def test_reschedule(self):
+    test_file_path = os.path.join('ambari_agent', 'dummy_files')
+    test_stack_path = os.path.join('ambari_agent', 'dummy_files')
+
+    ash = AlertSchedulerHandler(test_file_path, test_stack_path)
+    ash.start()
+
+    self.assertEquals(1, ash.get_job_count())
+    ash.reschedule()
+    self.assertEquals(1, ash.get_job_count())
+
+
+  def test_alert_collector_purge(self):
+    json = { "name": "namenode_process",
+      "service": "HDFS",
+      "component": "NAMENODE",
+      "label": "NameNode process",
+      "interval": 6,
+      "scope": "host",
+      "enabled": True,
+      "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1",
+      "source": {
+        "type": "PORT",
+        "uri": "{{hdfs-site/my-key}}",
+        "default_port": 50070,
+        "reporting": {
+          "ok": {
+            "text": "TCP OK - {0:.4f} response time on port {1}"
+          },
+          "critical": {
+            "text": "Could not load process info: {0}"
+          }
+        }
+      }
+    }
+
+    collector = AlertCollector()
+
+    pa = PortAlert(json, json['source'])
+    pa.set_helpers(collector, {'hdfs-site/my-key': 'value1'})
+    self.assertEquals(6, pa.interval())
+
+    res = pa.collect()
+
+    self.assertIsNotNone(collector.alerts()[0])
+    self.assertEquals('CRITICAL', collector.alerts()[0]['state'])
+
+    collector.remove_by_uuid('c1f73191-4481-4435-8dae-fd380e4c0be1')
+    self.assertEquals(0,len(collector.alerts()))
+
+
+  def test_disabled_definitions(self):
+    test_file_path = os.path.join('ambari_agent', 'dummy_files')
+    test_stack_path = os.path.join('ambari_agent', 'dummy_files')
+
+    ash = AlertSchedulerHandler(test_file_path, test_stack_path)
+    ash.start()
+
+    self.assertEquals(1, ash.get_job_count())
+
+    json = { "name": "namenode_process",
+      "service": "HDFS",
+      "component": "NAMENODE",
+      "label": "NameNode process",
+      "interval": 6,
+      "scope": "host",
+      "enabled": True,
+      "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1",
+      "source": {
+        "type": "PORT",
+        "uri": "{{hdfs-site/my-key}}",
+        "default_port": 50070,
+        "reporting": {
+          "ok": {
+            "text": "TCP OK - {0:.4f} response time on port {1}"
+          },
+          "critical": {
+            "text": "Could not load process info: {0}"
+          }
+        }
+      }
+    }
+
+    pa = PortAlert(json, json['source'])
+    ash.schedule_definition(pa)
+
+    self.assertEquals(2, ash.get_job_count())
+
+    json['enabled'] = False
+    pa = PortAlert(json, json['source'])
+    ash.schedule_definition(pa)
+
+    # verify disabled alert not scheduled
+    self.assertEquals(2, ash.get_job_count())
+
+    json['enabled'] = True
+    pa = PortAlert(json, json['source'])
+    ash.schedule_definition(pa)
+
+    # verify enabled alert was scheduled
+    self.assertEquals(3, ash.get_job_count())
+
+  def test_immediate_alert(self):
+    test_file_path = os.path.join('ambari_agent', 'dummy_files')
+    test_stack_path = os.path.join('ambari_agent', 'dummy_files')
+
+    ash = AlertSchedulerHandler(test_file_path, test_stack_path)
+    ash.start()
+
+    self.assertEquals(1, ash.get_job_count())
+    self.assertEquals(0, len(ash._collector.alerts()))
+
+    execution_commands = [ {
+        "clusterName": "c1",
+        "hostName": "c6401.ambari.apache.org",
+        "alertDefinition": {
+          "name": "namenode_process",
+          "service": "HDFS",
+          "component": "NAMENODE",
+          "label": "NameNode process",
+          "interval": 6,
+          "scope": "host",
+          "enabled": True,
+          "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1",
+          "source": {
+            "type": "PORT",
+            "uri": "{{hdfs-site/my-key}}",
+            "default_port": 50070,
+            "reporting": {
+              "ok": {
+                "text": "TCP OK - {0:.4f} response time on port {1}"
+              },
+              "critical": {
+                "text": "Could not load process info: {0}"
+              }
+            }
+          }
+        }
+      } ]
+
+    # execute the alert immediately and verify that the collector has the result
+    ash.execute_alert(execution_commands)
+    self.assertEquals(1, len(ash._collector.alerts()))
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/9213dcca/ambari-agent/src/test/python/ambari_agent/TestCertGeneration.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestCertGeneration.py b/ambari-agent/src/test/python/ambari_agent/TestCertGeneration.py
index 5216447..c724c31 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestCertGeneration.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestCertGeneration.py
@@ -29,20 +29,19 @@ from ambari_agent import AmbariConfig
 class TestCertGeneration(TestCase):
   def setUp(self):
     self.tmpdir = tempfile.mkdtemp()
-    config = ConfigParser.RawConfigParser()
-    config.add_section('server')
+    config = AmbariConfig.AmbariConfig()
+    #config.add_section('server')
     config.set('server', 'hostname', 'example.com')
     config.set('server', 'url_port', '777')
-    config.add_section('security')
+    #config.add_section('security')
     config.set('security', 'keysdir', self.tmpdir)
     config.set('security', 'server_crt', 'ca.crt')
     self.certMan = CertificateManager(config)
-    
+
   def test_generation(self):
     self.certMan.genAgentCrtReq()
     self.assertTrue(os.path.exists(self.certMan.getAgentKeyName()))
     self.assertTrue(os.path.exists(self.certMan.getAgentCrtReqName()))
   def tearDown(self):
     shutil.rmtree(self.tmpdir)
-    
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/9213dcca/ambari-agent/src/test/python/ambari_agent/TestController.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestController.py b/ambari-agent/src/test/python/ambari_agent/TestController.py
index dd92e06..72b0cea 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestController.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestController.py
@@ -30,7 +30,7 @@ from threading import Event
 import json
 
 with patch("platform.linux_distribution", return_value = ('Suse','11','Final')):
-  from ambari_agent import Controller, ActionQueue
+  from ambari_agent import Controller, ActionQueue, Register
   from ambari_agent import hostname
   from ambari_agent.Controller import AGENT_AUTO_RESTART_EXIT_CODE
   from ambari_commons import OSCheck
@@ -53,7 +53,8 @@ class TestController(unittest.TestCase):
 
 
     config = MagicMock()
-    config.get.return_value = "something"
+    #config.get.return_value = "something"
+    config.get.return_value = "5"
 
     self.controller = Controller.Controller(config)
     self.controller.netutil.MINIMUM_INTERVAL_BETWEEN_HEARTBEATS = 0.1
@@ -235,6 +236,8 @@ class TestController(unittest.TestCase):
     self.controller.registerWithServer = registerWithServer
     heartbeatWithServer = MagicMock(name="heartbeatWithServer")
     self.controller.heartbeatWithServer = heartbeatWithServer
+    actionQueue = MagicMock(name="actionQueue")
+    self.controller.actionQueue = actionQueue
 
     Controller.Controller.__sendRequest__ = MagicMock(side_effect=Exception())
 
@@ -244,9 +247,9 @@ class TestController(unittest.TestCase):
     heartbeatWithServer.assert_called_once_with()
 
     self.controller.registerWithServer =\
-    Controller.Controller.registerWithServer
+      Controller.Controller.registerWithServer
     self.controller.heartbeatWithServer =\
-    Controller.Controller.registerWithServer
+      Controller.Controller.registerWithServer
 
   @patch("time.sleep")
   def test_registerAndHeartbeat(self, sleepMock):
@@ -256,6 +259,8 @@ class TestController(unittest.TestCase):
     self.controller.registerWithServer = registerWithServer
     heartbeatWithServer = MagicMock(name="heartbeatWithServer")
     self.controller.heartbeatWithServer = heartbeatWithServer
+    actionQueue = MagicMock(name="actionQueue")
+    self.controller.actionQueue = actionQueue
 
     listener1 = MagicMock()
     listener2 = MagicMock()
@@ -281,6 +286,8 @@ class TestController(unittest.TestCase):
     self.controller.registerWithServer = registerWithServer
     heartbeatWithServer = MagicMock(name="heartbeatWithServer")
     self.controller.heartbeatWithServer = heartbeatWithServer
+    actionQueue = MagicMock(name="actionQueue")
+    self.controller.actionQueue = actionQueue
 
     self.controller.isRegistered = True
     self.controller.registerAndHeartbeat()
@@ -293,6 +300,33 @@ class TestController(unittest.TestCase):
       Controller.Controller.registerWithServer
 
 
+  @patch("time.sleep")
+  @patch.object(Controller.Controller, "sendRequest")
+  def test_registerWithIOErrors(self, sendRequestMock, sleepMock):
+    # Check that server continues to heartbeat after connection errors
+    registerMock = MagicMock(name="Register")
+    registerMock.build.return_value = {}
+    actionQueue = MagicMock()
+    actionQueue.isIdle.return_value = True
+    self.controller.actionQueue = actionQueue
+    self.controller.register = registerMock
+    self.controller.responseId = 1
+    self.controller.TEST_IOERROR_COUNTER = 1
+    self.controller.isRegistered = False
+    def util_throw_IOErrors(*args, **kwargs):
+      """
+      Throws IOErrors 10 times and then stops heartbeats/registrations
+      """
+      if self.controller.TEST_IOERROR_COUNTER == 10:
+        self.controller.isRegistered = True
+      self.controller.TEST_IOERROR_COUNTER += 1
+      raise IOError("Sample error")
+    actionQueue.isIdle.return_value = False
+    sendRequestMock.side_effect = util_throw_IOErrors
+    self.controller.registerWithServer()
+    self.assertTrue(sendRequestMock.call_count > 5)
+
+
   @patch("os._exit")
   def test_restartAgent(self, os_exit_mock):
 
@@ -324,18 +358,22 @@ class TestController(unittest.TestCase):
       {'Content-Type': 'application/json'})
 
     conMock.request.return_value = '{invalid_object}'
-    actual = self.controller.sendRequest(url, data)
-    expected = {'exitstatus': 1, 'log': ('Response parsing failed! Request data: ' + data
-                                         + '; Response: {invalid_object}')}
-    self.assertEqual(actual, expected)
+
+    try:
+      self.controller.sendRequest(url, data)
+      self.fail("Should throw exception!")
+    except IOError, e: # Expected
+      self.assertEquals('Response parsing failed! Request data: ' + data +
+                        '; Response: {invalid_object}', str(e))
 
     exceptionMessage = "Connection Refused"
     conMock.request.side_effect = Exception(exceptionMessage)
-    actual = self.controller.sendRequest(url, data)
-    expected = {'exitstatus': 1, 'log': 'Request to ' + url + ' failed due to ' + exceptionMessage}
-
-    self.assertEqual(actual, expected)
-
+    try:
+      self.controller.sendRequest(url, data)
+      self.fail("Should throw exception!")
+    except IOError, e: # Expected
+      self.assertEquals('Request to ' + url + ' failed due to ' +
+                        exceptionMessage, str(e))
 
 
   @patch.object(threading._Event, "wait")
@@ -368,6 +406,7 @@ class TestController(unittest.TestCase):
 
     # one successful request, after stop
     self.controller.actionQueue = actionQueue
+    self.controller.alert_scheduler_handler = MagicMock()
     self.controller.heartbeatWithServer()
     self.assertTrue(sendRequest.called)
 
@@ -476,6 +515,27 @@ class TestController(unittest.TestCase):
     sleepMock.assert_called_with(
       self.controller.netutil.MINIMUM_INTERVAL_BETWEEN_HEARTBEATS)
 
+    # Check that server continues to heartbeat after connection errors
+    self.controller.responseId = 1
+    self.controller.TEST_IOERROR_COUNTER = 1
+    sendRequest.reset()
+    def util_throw_IOErrors(*args, **kwargs):
+      """
+      Throws IOErrors 100 times and then stops heartbeats/registrations
+      """
+      if self.controller.TEST_IOERROR_COUNTER == 10:
+        self.controller.DEBUG_STOP_HEARTBEATING = True
+      self.controller.TEST_IOERROR_COUNTER += 1
+      raise IOError("Sample error")
+    self.controller.DEBUG_STOP_HEARTBEATING = False
+    actionQueue.isIdle.return_value = False
+    sendRequest.side_effect = util_throw_IOErrors
+    self.controller.heartbeatWithServer()
+    self.assertTrue(sendRequest.call_count > 5)
+
+    sleepMock.assert_called_with(
+      self.controller.netutil.MINIMUM_INTERVAL_BETWEEN_HEARTBEATS)
+
     sys.stdout = sys.__stdout__
     self.controller.sendRequest = Controller.Controller.sendRequest
     self.controller.sendRequest = Controller.Controller.addToQueue


Mime
View raw message