ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dmitriu...@apache.org
Subject [1/7] AMBARI-3810. Unittests for File resource an all it's attributes (Eugene Chekanskiy via dlysnichenko)
Date Tue, 19 Nov 2013 18:17:15 GMT
Updated Branches:
  refs/heads/trunk b2571e408 -> e5c6e1130


http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/dummy_files/dummy_current_stack
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/dummy_files/dummy_current_stack b/ambari-agent/src/test/python/dummy_files/dummy_current_stack
deleted file mode 100644
index 7123c53..0000000
--- a/ambari-agent/src/test/python/dummy_files/dummy_current_stack
+++ /dev/null
@@ -1,22 +0,0 @@
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-
-DATANODE    {"stackName":"HDP","stackVersion":"1.2.0"}
-NAGIOS_SERVER   {"stackName":"HDP","stackVersion":"1.2.1"}
-HCATALOG    {"stackName":"HDP","stackVersion":"1.2.2"}
-GANGLIA_SERVER  {"stackName":"HDP","stackVersion":"1.2.2"}

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/dummy_files/test_rco_data.json
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/dummy_files/test_rco_data.json b/ambari-agent/src/test/python/dummy_files/test_rco_data.json
deleted file mode 100644
index 5eedd1a..0000000
--- a/ambari-agent/src/test/python/dummy_files/test_rco_data.json
+++ /dev/null
@@ -1,19 +0,0 @@
-{
-  "_comment": "a comment",
-    "general_deps" : {
-      "_comment": "a comment",
-      "SECONDARY_NAMENODE-START": ["DATANODE-START"],
-      "DATANODE-STOP": ["JOBTRACKER-STOP", "TASKTRACKER-STOP", "RESOURCEMANAGER-STOP",
-        "NODEMANAGER-STOP", "HISTORYSERVER-STOP", "HBASE_MASTER-STOP"],
-      "_comment": "a comment",
-      "SECONDARY_NAMENODE-UPGRADE": ["NAMENODE-UPGRADE"]
-    },
-    "optional_hcfs": {
-        "HBASE_MASTER-START": ["PEERSTATUS-START"],
-        "JOBTRACKER-START": ["PEERSTATUS-START"]
-    },
-    "optional_no_hcfs": {
-        "SECONDARY_NAMENODE-START": ["NAMENODE-START"],
-        "RESOURCEMANAGER-START": ["NAMENODE-START", "DATANODE-START"]
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/dummy_puppet_output_error.txt
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/dummy_puppet_output_error.txt b/ambari-agent/src/test/python/dummy_puppet_output_error.txt
deleted file mode 100644
index 5efa5af..0000000
--- a/ambari-agent/src/test/python/dummy_puppet_output_error.txt
+++ /dev/null
@@ -1,45 +0,0 @@
-debug: Creating default schedules
-debug: Puppet::Type::User::ProviderDirectoryservice: file /usr/bin/dscl does not exist
-debug: Puppet::Type::User::ProviderUser_role_add: file roledel does not exist
-debug: Puppet::Type::User::ProviderPw: file pw does not exist
-debug: Failed to load library 'ldap' for feature 'ldap'
-debug: Puppet::Type::User::ProviderLdap: feature ldap is missing
-debug: /File[/var/lib/puppet/state/graphs]: Autorequiring File[/var/lib/puppet/state]
-debug: /File[/var/lib/puppet/ssl/crl.pem]: Autorequiring File[/var/lib/puppet/ssl]
-debug: /File[/var/lib/puppet/ssl/private_keys]: Autorequiring File[/var/lib/puppet/ssl]
-debug: /File[/var/lib/puppet/state/resources.txt]: Autorequiring File[/var/lib/puppet/state]
-debug: /File[/var/lib/puppet/ssl/certificate_requests]: Autorequiring File[/var/lib/puppet/ssl]
-debug: /File[/var/lib/puppet/ssl/certs]: Autorequiring File[/var/lib/puppet/ssl]
-debug: /File[/var/lib/puppet/state/state.yaml]: Autorequiring File[/var/lib/puppet/state]
-debug: /File[/var/lib/puppet/client_data]: Autorequiring File[/var/lib/puppet]
-debug: /File[/var/lib/puppet/facts]: Autorequiring File[/var/lib/puppet]
-debug: /File[/var/lib/puppet/ssl]: Autorequiring File[/var/lib/puppet]
-debug: /File[/var/lib/puppet/state]: Autorequiring File[/var/lib/puppet]
-debug: /File[/var/lib/puppet/ssl/private]: Autorequiring File[/var/lib/puppet/ssl]
-debug: /File[/var/lib/puppet/state/last_run_report.yaml]: Autorequiring File[/var/lib/puppet/state]
-debug: /File[/var/lib/puppet/lib]: Autorequiring File[/var/lib/puppet]
-debug: /File[/var/lib/puppet/ssl/public_keys]: Autorequiring File[/var/lib/puppet/ssl]
-debug: /File[/var/lib/puppet/client_yaml]: Autorequiring File[/var/lib/puppet]
-debug: /File[/var/lib/puppet/ssl/certs/ca.pem]: Autorequiring File[/var/lib/puppet/ssl/certs]
-debug: /File[/var/lib/puppet/clientbucket]: Autorequiring File[/var/lib/puppet]
-debug: /File[/var/lib/puppet/state/last_run_summary.yaml]: Autorequiring File[/var/lib/puppet/state]
-debug: Finishing transaction 70171638648540
-debug: Loaded state in 0.00 seconds
-debug: Loaded state in 0.00 seconds
-info: Applying configuration version '1352127563'
-debug: /Schedule[daily]: Skipping device resources because running on a host
-debug: /Schedule[monthly]: Skipping device resources because running on a host
-debug: /Schedule[hourly]: Skipping device resources because running on a host
-debug: /Schedule[never]: Skipping device resources because running on a host
-debug: Exec[command_good](provider=posix): Executing 'wget e432423423xample.com/badurl444111'
-debug: Executing 'wget e432423423xample.com/badurl444111'
-err: /Stage[main]//Exec[command_good]/returns: change from notrun to 0 failed: wget e432423423xample.com/badurl444111 returned 4 instead of one of [0] at /root/puppet-learn/2-bad.pp:5
-debug: /Schedule[weekly]: Skipping device resources because running on a host
-debug: /Schedule[puppet]: Skipping device resources because running on a host
-debug: Finishing transaction 70171639726240
-debug: Storing state
-debug: Stored state in 0.01 seconds
-notice: Finished catalog run in 0.23 seconds
-debug: Finishing transaction 70171638871060
-debug: Received report to process from ambari-dmi
-debug: Processing report from ambari-dmi with processor Puppet::Reports::Store

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/dummy_puppet_output_error2.txt
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/dummy_puppet_output_error2.txt b/ambari-agent/src/test/python/dummy_puppet_output_error2.txt
deleted file mode 100644
index 19ae347..0000000
--- a/ambari-agent/src/test/python/dummy_puppet_output_error2.txt
+++ /dev/null
@@ -1,40 +0,0 @@
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:57 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:57 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:57 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred]/Hdp-hadoop::Exec-hadoop[fs -mkdir /mapred]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred]/returns: Bad connection to FS. command aborted. exception: Call to dev.hortonworks.com/10.0.2.15:8020 failed on connection exception: java.net.ConnectException: Connection refused
-err: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred]/Hdp-hadoop::Exec-hadoop[fs -mkdir /mapred]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred]/returns: change from notrun to 0 failed: hadoop --config /etc/hadoop/conf fs -mkdir /mapred returned 255 instead of one of [0] at /var/lib/ambari-agent/puppet/modules/hdp/manifests/init.pp:267
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred]/Hdp-hadoop::Exec-hadoop[fs -mkdir /mapred]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -mkdir /mapred::end]: Dependency Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred] has failures: true
-warning: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred]/Hdp-hadoop::Exec-hadoop[fs -mkdir /mapred]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -mkdir /mapred::end]: Skipping because of failed dependencies
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred]/Hdp-hadoop::Exec-hadoop[fs -chown mapred /mapred]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chown mapred /mapred]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -chown mapred /mapred::begin]: Dependency Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred] has failures: true
-warning: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred]/Hdp-hadoop::Exec-hadoop[fs -chown mapred /mapred]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chown mapred /mapred]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -chown mapred /mapred::begin]: Skipping because of failed dependencies
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred]/Hdp-hadoop::Exec-hadoop[fs -chown mapred /mapred]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chown mapred /mapred]/Exec[hadoop --config /etc/hadoop/conf fs -chown mapred /mapred]: Dependency Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred] has failures: true
-warning: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred]/Hdp-hadoop::Exec-hadoop[fs -chown mapred /mapred]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chown mapred /mapred]/Exec[hadoop --config /etc/hadoop/conf fs -chown mapred /mapred]: Skipping because of failed dependencies
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred]/Hdp-hadoop::Exec-hadoop[fs -chown mapred /mapred]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chown mapred /mapred]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -chown mapred /mapred::end]: Dependency Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred] has failures: true
-warning: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred]/Hdp-hadoop::Exec-hadoop[fs -chown mapred /mapred]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chown mapred /mapred]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -chown mapred /mapred::end]: Skipping because of failed dependencies
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:50 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:51 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:52 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:53 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:54 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 4 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:55 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 5 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:56 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 6 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:57 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:58 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 8 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:59 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 9 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: Bad connection to FS. command aborted. exception: Call to dev.hortonworks.com/10.0.2.15:8020 failed on connection exception: java.net.ConnectException: Connection refused
-err: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: change from notrun to 0 failed: hadoop --config /etc/hadoop/conf fs -mkdir /tmp returned 255 instead of one of [0] at /var/lib/ambari-agent/puppet/modules/hdp/manifests/init.pp:267
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -mkdir /tmp::end]: Dependency Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp] has failures: true
-warning: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -mkdir /tmp::end]: Skipping because of failed dependencies
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred/system]/Hdp-hadoop::Exec-hadoop[fs -mkdir /mapred/system]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system::begin]: Dependency Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred] has failures: true
-warning: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred/system]/Hdp-hadoop::Exec-hadoop[fs -mkdir /mapred/system]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system::begin]: Skipping because of failed dependencies
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred/system]/Hdp-hadoop::Exec-hadoop[fs -mkdir /mapred/system]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system]: Dependency Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred] has failures: true
-warning: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred/system]/Hdp-hadoop::Exec-hadoop[fs -mkdir /mapred/system]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system]: Skipping because of failed dependencies
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred/system]/Hdp-hadoop::Exec-hadoop[fs -mkdir /mapred/system]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system::end]: Dependency Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred] has failures: true
-warning: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred/system]/Hdp-hadoop::Exec-hadoop[fs -mkdir /mapred/system]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system::end]: Skipping because of failed dependencies
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/ambari_qa]/Hdp-hadoop::Exec-hadoop[fs -mkdir /user/ambari_qa]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/returns: 12/11/10 08:58:14 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/ambari_qa]/Hdp-hadoop::Exec-hadoop[fs -mkdir /user/ambari_qa]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/returns: 12/11/10 08:58:15 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/ambari_qa]/Hdp-hadoop::Exec-hadoop[fs -mkdir /user/ambari_qa]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/returns: 12/11/10 08:58:16 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/ambari_qa]/Hdp-hadoop::Exec-hadoop[fs -mkdir /user/ambari_qa]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/returns: 12/11/10 08:58:17 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/ambari_qa]/Hdp-hadoop::Exec-hadoop[fs -mkdir /user/ambari_qa]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/returns: 12/11/10 08:58:18 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 4 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/ambari_qa]/Hdp-hadoop::Exec-hadoop[fs -mkdir /user/ambari_qa]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/returns: 12/11/10 08:58:19 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 5 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/ambari_qa]/Hdp-hadoop::Exec-hadoop[fs -mkdir /user/ambari_qa]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/returns: 12/11/10 08:58:20 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 6 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/dummy_puppet_output_error3.txt
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/dummy_puppet_output_error3.txt b/ambari-agent/src/test/python/dummy_puppet_output_error3.txt
deleted file mode 100644
index 06b6094..0000000
--- a/ambari-agent/src/test/python/dummy_puppet_output_error3.txt
+++ /dev/null
@@ -1,76 +0,0 @@
-ESC[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Packages/Hdp-nagios::Server::Package[nagios-plugins]/Hdp::Package[nagios-plugins]/Hdp:
-:Package::Process_pkg[nagios-plugins]/Package[nagios-plugins-1.4.9]/ensure: createdESC[0mESC[1;35merr: /Stage[2]/Hdp-nagios::Server::Packages/Hdp-nagios::Server::Package[nagios-addons]/Hdp::Package[nagios-addons]/Hdp::Pack
-age::Process_pkg[nagios-addons]/Package[hdp_mon_nagios_addons]/ensure: change from absent to present failed: Execution of '/usr/bin/yum -d 0 -e 0 -y install hdp_mon_nagios_addons' returned 1:
-Error Downloading Packages:
-  hdp_mon_nagios_addons-0.0.2.15-1.noarch: failure: noarch/hdp_mon/hdp_mon_nagios_addons-0.0.2.15-1.noarch.rpm from AMBARI.dev-1.x: [Errno 256] No more mirrors to try.
-ESC[0m
-ESC[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Packages/Hdp-nagios::Server::Package[nagios-addons]/Hdp::Package[nagios-addons]/Hdp::Package::Process_pkg[nagios-addons]/Anchor[hdp::package::nagios-addons::end]: Dependency Package[hdp_mon_nagios_addons] has failures:
-trueESC[0mESC[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Packages/Hdp-nagios::Server::Package[nagios-addons]/Hdp::Package[nagios-addons]/Hdp::
-Package::Process_pkg[nagios-addons]/Anchor[hdp::package::nagios-addons::end]: Skipping because of failed dependenciesESC[0mESC[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Packages/Anchor[hdp-nagios::server::packages::end]: Dependency Package[hdp_mon
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: Finished catalog run in 49.63
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/dummy_puppet_output_good.txt
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/dummy_puppet_output_good.txt b/ambari-agent/src/test/python/dummy_puppet_output_good.txt
deleted file mode 100644
index c6ecbbc..0000000
--- a/ambari-agent/src/test/python/dummy_puppet_output_good.txt
+++ /dev/null
@@ -1,47 +0,0 @@
-debug: Creating default schedules
-debug: Puppet::Type::User::ProviderDirectoryservice: file /usr/bin/dscl does not exist
-debug: Puppet::Type::User::ProviderUser_role_add: file roledel does not exist
-debug: Puppet::Type::User::ProviderPw: file pw does not exist
-debug: Failed to load library 'ldap' for feature 'ldap'
-debug: Puppet::Type::User::ProviderLdap: feature ldap is missing
-debug: /File[/var/lib/puppet/ssl/certs/ca.pem]: Autorequiring File[/var/lib/puppet/ssl/certs]
-debug: /File[/var/lib/puppet/ssl/public_keys]: Autorequiring File[/var/lib/puppet/ssl]
-debug: /File[/var/lib/puppet/ssl/crl.pem]: Autorequiring File[/var/lib/puppet/ssl]
-debug: /File[/var/lib/puppet/state/last_run_report.yaml]: Autorequiring File[/var/lib/puppet/state]
-debug: /File[/var/lib/puppet/ssl/certificate_requests]: Autorequiring File[/var/lib/puppet/ssl]
-debug: /File[/var/lib/puppet/state/last_run_summary.yaml]: Autorequiring File[/var/lib/puppet/state]
-debug: /File[/var/lib/puppet/client_data]: Autorequiring File[/var/lib/puppet]
-debug: /File[/var/lib/puppet/state]: Autorequiring File[/var/lib/puppet]
-debug: /File[/var/lib/puppet/ssl/private]: Autorequiring File[/var/lib/puppet/ssl]
-debug: /File[/var/lib/puppet/state/graphs]: Autorequiring File[/var/lib/puppet/state]
-debug: /File[/var/lib/puppet/ssl]: Autorequiring File[/var/lib/puppet]
-debug: /File[/var/lib/puppet/state/state.yaml]: Autorequiring File[/var/lib/puppet/state]
-debug: /File[/var/lib/puppet/client_yaml]: Autorequiring File[/var/lib/puppet]
-debug: /File[/var/lib/puppet/facts]: Autorequiring File[/var/lib/puppet]
-debug: /File[/var/lib/puppet/ssl/private_keys]: Autorequiring File[/var/lib/puppet/ssl]
-debug: /File[/var/lib/puppet/state/resources.txt]: Autorequiring File[/var/lib/puppet/state]
-debug: /File[/var/lib/puppet/clientbucket]: Autorequiring File[/var/lib/puppet]
-debug: /File[/var/lib/puppet/lib]: Autorequiring File[/var/lib/puppet]
-debug: /File[/var/lib/puppet/ssl/certs]: Autorequiring File[/var/lib/puppet/ssl]
-debug: Finishing transaction 70060456464420
-debug: Loaded state in 0.00 seconds
-debug: Loaded state in 0.00 seconds
-info: Applying configuration version '1352127399'
-debug: /Schedule[daily]: Skipping device resources because running on a host
-debug: /Schedule[monthly]: Skipping device resources because running on a host
-debug: /Schedule[hourly]: Skipping device resources because running on a host
-debug: /Schedule[never]: Skipping device resources because running on a host
-debug: Exec[command_good](provider=posix): Executing 'wget example.com'
-debug: Executing 'wget example.com'
-notice: /Stage[main]//Exec[command_good]/returns: executed successfully
-debug: /Stage[main]//Exec[command_good]: The container Class[Main] will propagate my refresh event
-debug: /Schedule[weekly]: Skipping device resources because running on a host
-debug: /Schedule[puppet]: Skipping device resources because running on a host
-debug: Class[Main]: The container Stage[main] will propagate my refresh event
-debug: Finishing transaction 70060457541680
-debug: Storing state
-debug: Stored state in 0.01 seconds
-notice: Finished catalog run in 0.59 seconds
-debug: Finishing transaction 70060456663980
-debug: Received report to process from ambari-dmi
-debug: Processing report from ambari-dmi with processor Puppet::Reports::Store
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/examples/ControllerTester.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/examples/ControllerTester.py b/ambari-agent/src/test/python/examples/ControllerTester.py
deleted file mode 100644
index 99c94bd..0000000
--- a/ambari-agent/src/test/python/examples/ControllerTester.py
+++ /dev/null
@@ -1,243 +0,0 @@
-#!/usr/bin/env python2.6
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-
-from ambari_agent import Controller
-import pprint, json, os, time, sys
-import tempfile
-from urllib2 import Request, urlopen, URLError
-from mock.mock import patch, MagicMock, call
-from ambari_agent.AmbariConfig  import AmbariConfig
-import Queue
-import logging
-from ambari_agent import PuppetExecutor, PythonExecutor
-
-logger=logging.getLogger()
-
-queue = Queue.Queue()
-
-# Set to True to replace python and puppet calls with mockups
-disable_python_and_puppet = True
-
-agent_version = "1.3.0"
-
-# Values from the list below are returned in responce to agent requests (one per
-# request). When every value has been returned, the last element of list is
-# returned on every subsequent request.
-responces = [
-  """{"responseId":"n",
-  "response":"OK"}""",
-
-  """
-  {
-    "responseId":"n",
-    "restartAgent": "False",
-    "executionCommands":
-      [{
-        "commandId": "31-1",
-        "role" : "DATANODE",
-        "taskId" : 2,
-        "clusterName" : "clusterName",
-        "serviceName" : "HDFS",
-        "roleCommand" : "UPGRADE",
-        "hostname" : "localhost.localdomain",
-        "hostLevelParams": {},
-        "clusterHostInfo": "clusterHostInfo",
-        "configurations": {},
-        "commandType": "EXECUTION_COMMAND",
-        "configurations": {"global" : {}},
-        "roleParams": {},
-        "commandParams" :	{
-          "source_stack_version": "{\\"stackName\\":\\"HDP\\",\\"stackVersion\\":\\"1.2.2\\"}",
-          "target_stack_version": "{\\"stackName\\":\\"HDP\\",\\"stackVersion\\":\\"1.3.0\\"}"
-        },
-        "clusterHostInfo": {
-          "ambari_db_server_host": [
-              "dev.hortonworks.com"
-          ],
-          "ganglia_server_host": [
-              "dev.hortonworks.com"
-          ],
-          "nagios_server_host": [
-              "dev.hortonworks.com"
-          ],
-          "namenode_host": [
-              "dev.hortonworks.com"
-          ],
-          "slave_hosts": [
-              "dev.hortonworks.com"
-          ]
-        }
-      }],
-    "statusCommands":[]
-  }
-  """,
-
-  """
-  {
-    "responseId":"n",
-    "restartAgent": "False",
-    "executionCommands": [],
-    "statusCommands":[]
-  }
-  """
-]
-
-class Int(object):
-  def __init__(self, value):
-    self.value = value
-
-  def inc(self):
-    self.value += 1
-
-  def val(self):
-    return self.value
-
-responseId = Int(0)
-
-def main():
-
-  if disable_python_and_puppet:
-    with patch.object(PuppetExecutor.PuppetExecutor, 'run_manifest') \
-                                          as run_manifest_method:
-      run_manifest_method.side_effect = \
-              lambda command, file, tmpout, tmperr: {
-          'exitcode' : 0,
-          'stdout'   : "Simulated run of pp %s" % file,
-          'stderr'   : 'None'
-        }
-      with patch.object(PythonExecutor.PythonExecutor, 'run_file') \
-                                          as run_file_py_method:
-        run_file_py_method.side_effect = \
-              lambda command, file, tmpoutfile, tmperrfile: {
-          'exitcode' : 0,
-          'stdout'   : "Simulated run of py %s" % file,
-          'stderr'   : 'None'
-        }
-        run_simulation()
-  else:
-    run_simulation()
-
-
-
-def run_simulation():
-  Controller.logger = MagicMock()
-  sendRequest_method = MagicMock()
-
-  tmpfile = tempfile.gettempdir()
-
-  config = AmbariConfig().getConfig()
-  config.set('agent', 'prefix', tmpfile)
-
-  ver_file = os.path.join(tmpfile, "version")
-
-  with open(ver_file, "w") as text_file:
-      text_file.write(agent_version)
-
-  controller = Controller.Controller(config)
-  controller.sendRequest = sendRequest_method
-  controller.netutil.HEARTBEAT_IDDLE_INTERVAL_SEC = 0.1
-  controller.netutil.HEARTBEAT_NOT_IDDLE_INTERVAL_SEC = 0.1
-  controller.range = 1
-
-  for responce in responces:
-    queue.put(responce)
-
-  def send_stub(url, data):
-    logger.info("Controller sends data to %s :" % url)
-    logger.info(pprint.pformat(data))
-    if not queue.empty():
-      responce = queue.get()
-    else:
-      responce = responces[-1]
-      logger.info("There is no predefined responce available, sleeping for 30 sec")
-      time.sleep(30)
-    responce = json.loads(responce)
-    responseId.inc()
-    responce["responseId"] = responseId.val()
-    responce = json.dumps(responce)
-    logger.info("Returning data to Controller:" + responce)
-    return responce
-
-  sendRequest_method.side_effect = send_stub
-
-  logger.setLevel(logging.DEBUG)
-  formatter = logging.Formatter("%(asctime)s %(filename)s:%(lineno)d - \
-        %(message)s")
-  stream_handler = logging.StreamHandler()
-  stream_handler.setFormatter(formatter)
-  logger.addHandler(stream_handler)
-  logger.info("Starting")
-
-  controller.start()
-  controller.actionQueue.IDLE_SLEEP_TIME = 0.1
-  controller.run()
-
-
-if __name__ == '__main__':
-#  s =   """
-#  {
-#    "responseId":"n",
-#    "restartAgent": "False",
-#    "executionCommands":
-#      [{
-#        "commandId": "31-1",
-#        "role" : "DATANODE",
-#        "taskId" : 2,
-#        "clusterName" : "clusterName",
-#        "serviceName" : "HDFS",
-#        "roleCommand" : "UPGRADE",
-#        "hostname" : "localhost.localdomain",
-#        "hostLevelParams": {},
-#        "clusterHostInfo": "clusterHostInfo",
-#        "configurations": {},
-#        "commandType": "EXECUTION_COMMAND",
-#        "configurations": {"global" : {}},
-#        "roleParams": {},
-#        "commandParams" :	{
-#          "commandParams": {"source_stack_version": "{\\"stackName\\":\\"HDP\\",\\"stackVersion\\":\\"1.2.0\\"}", "target_stack_version": "{\\"stackName\\":\\"HDP\\",\\"stackVersion\\":\\"1.2.2\\"}"}
-#        },
-#        "clusterHostInfo": {
-#          "ambari_db_server_host": [
-#              "dev.hortonworks.com"
-#          ],
-#          "ganglia_server_host": [
-#              "dev.hortonworks.com"
-#          ],
-#          "nagios_server_host": [
-#              "dev.hortonworks.com"
-#          ],
-#          "namenode_host": [
-#              "dev.hortonworks.com"
-#          ],
-#          "slave_hosts": [
-#              "dev.hortonworks.com"
-#          ]
-#        }
-#      }],
-#    "statusCommands":[]
-#  }
-#  """
-#  t = json.loads(s)
-#  pprint.pprint(t)
-
-  main()
-
-
-

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/examples/debug_testcase_example.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/examples/debug_testcase_example.py b/ambari-agent/src/test/python/examples/debug_testcase_example.py
deleted file mode 100644
index 74bd817..0000000
--- a/ambari-agent/src/test/python/examples/debug_testcase_example.py
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/usr/bin/env python2.6
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-
-from unittest import TestCase
-#from Register import Register
-from ambari_agent.Controller import Controller
-from ambari_agent.Heartbeat import Heartbeat
-from ambari_agent.ActionQueue import ActionQueue
-from ambari_agent import AmbariConfig
-from ambari_agent.NetUtil import NetUtil
-import socket, ConfigParser, logging
-import os, pprint, json, sys, unittest
-from threading import Thread
-import time
-import Queue
-
-logger = logging.getLogger()
-
-class TestController(TestCase):
-
-# This file should be put to ambari-agent/src/main/python/ambari-agent/debug_testcase_example.py.
-# After installing python plugin and adjusting test,
-# it may be run in IntelliJ IDEA debugger
-
-  def setUp(self):
-    #logger.disabled = True
-    self.defaulttimeout = -1.0
-    if hasattr(socket, 'getdefaulttimeout'):
-      # Set the default timeout on sockets
-      self.defaulttimeout = socket.getdefaulttimeout()
-
-  def tearDown(self):
-    if self.defaulttimeout is not None and self.defaulttimeout > 0 and hasattr(socket, 'setdefaulttimeout'):
-      # Set the default timeout on sockets
-      socket.setdefaulttimeout(self.defaulttimeout)
-      #logger.disabled = False
-
-  def test_custom(self):
-    '''
-      test to make sure if we can get a re register command, we register with the server
-    '''
-    pass
-
-def main(argv=None):
-  logger.setLevel(logging.INFO)
-  formatter = logging.Formatter("%(asctime)s %(filename)s:%(lineno)d - \
-      %(message)s")
-  stream_handler = logging.StreamHandler()
-  stream_handler.setFormatter(formatter)
-  logger.addHandler(stream_handler)
-
-  unittest.main()
-
-if __name__ == '__main__':
-  main()
-
-

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/resource_management/TestFileResource.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/resource_management/TestFileResource.py b/ambari-agent/src/test/python/resource_management/TestFileResource.py
new file mode 100644
index 0000000..acc0dc9
--- /dev/null
+++ b/ambari-agent/src/test/python/resource_management/TestFileResource.py
@@ -0,0 +1,332 @@
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+
+from unittest import TestCase
+from mock.mock import patch, MagicMock
+import os
+import sys
+from resource_management.core import Environment, Fail
+from resource_management.core.resources import File
+from resource_management.core.system import System
+import resource_management.core.providers.system
+import resource_management
+
+
+@patch.object(System, "platform", new = 'redhat')
+class TestFileResource(TestCase):
+  @patch.object(os.path, "dirname")
+  @patch.object(os.path, "isdir")
+  def test_action_create_dir_exist(self, isdir_mock, dirname_mock):
+    """
+    Tests if 'create' action fails when path is existent directory
+    """
+    isdir_mock.side_effect = [True, False]
+    try:
+      with Environment('/') as env:
+        File('/existent_directory',
+             action='create',
+             mode=0777,
+             content='file-content'
+        )
+      env.run()
+      self.fail("Must fail when directory with name 'path' exist")
+    except Fail as e:
+      self.assertEqual("Applying File['/existent_directory'] failed, directory with name /existent_directory exists",
+                       str(e))
+    self.assertFalse(dirname_mock.called)
+
+  @patch.object(os.path, "dirname")
+  @patch.object(os.path, "isdir")
+  def test_action_create_parent_dir_non_exist(self, isdir_mock, dirname_mock):
+    """
+    Tests if 'create' action fails when parent directory of path
+    doesn't exist
+    """
+    isdir_mock.side_effect = [False, False]
+    dirname_mock.return_value = "/non_existent_directory"
+    try:
+      with Environment('/') as env:
+        File('/non_existent_directory/file',
+             action='create',
+             mode=0777,
+             content='file-content'
+        )
+      env.run()
+      self.fail('Must fail on non existent parent directory')
+    except Fail as e:
+      self.assertEqual(
+        "Applying File['/non_existent_directory/file'] failed, parent directory /non_existent_directory doesn't exist",
+        str(e))
+    self.assertTrue(dirname_mock.called)
+
+  @patch("resource_management.core.providers.system._ensure_metadata")
+  @patch("__builtin__.open")
+  @patch.object(os.path, "exists")
+  @patch.object(os.path, "isdir")
+  def test_action_create_non_existent_file(self, isdir_mock, exists_mock, open_mock, ensure_mock):
+    """
+    Tests if 'create' action create new non existent file and write proper data
+    """
+    isdir_mock.side_effect = [False, True]
+    exists_mock.return_value = False
+    new_file = MagicMock()
+    open_mock.return_value = new_file
+    with Environment('/') as env:
+      File('/directory/file',
+           action='create',
+           mode=0777,
+           content='file-content'
+      )
+    env.run()
+
+    open_mock.assert_called_with('/directory/file', 'wb')
+    new_file.__enter__().write.assert_called_with('file-content')
+    self.assertEqual(open_mock.call_count, 1)
+    ensure_mock.assert_called()
+
+
+  @patch("resource_management.core.providers.system._ensure_metadata")
+  @patch("__builtin__.open")
+  @patch.object(os.path, "exists")
+  @patch.object(os.path, "isdir")
+  def test_action_create_replace(self, isdir_mock, exists_mock, open_mock, ensure_mock):
+    """
+    Tests if 'create' action rewrite existent file with new data
+    """
+    isdir_mock.side_effect = [False, True]
+    old_file, new_file = MagicMock(), MagicMock()
+    open_mock.side_effect = [old_file, new_file]
+    old_file.read.return_value = 'old-content'
+    exists_mock.return_value = True
+
+    with Environment('/') as env:
+      File('/directory/file',
+           action='create',
+           mode=0777,
+           backup=False,
+           content='new-content'
+      )
+
+    env.run()
+    old_file.read.assert_called()
+    new_file.__enter__().write.assert_called_with('new-content')
+    ensure_mock.assert_called()
+    self.assertEqual(open_mock.call_count, 2)
+    open_mock.assert_any_call('/directory/file', 'rb')
+    open_mock.assert_any_call('/directory/file', 'wb')
+
+
+  @patch.object(os, "unlink")
+  @patch.object(os.path, "exists")
+  @patch.object(os.path, "isdir")
+  def test_action_delete_is_directory(self, isdir_mock, exist_mock, unlink_mock):
+    """
+    Tests if 'delete' action fails when path is directory
+    """
+    isdir_mock.return_value = True
+
+    try:
+      with Environment('/') as env:
+        File('/directory/file',
+             action='delete',
+             mode=0777,
+             backup=False,
+             content='new-content'
+        )
+      env.run()
+      self.fail("Should fail when deleting directory")
+    except Fail:
+      pass
+
+    self.assertEqual(isdir_mock.call_count, 1)
+    self.assertEqual(exist_mock.call_count, 0)
+    self.assertEqual(unlink_mock.call_count, 0)
+
+  @patch.object(os, "unlink")
+  @patch.object(os.path, "exists")
+  @patch.object(os.path, "isdir")
+  def test_action_delete(self, isdir_mock, exist_mock, unlink_mock):
+    """
+    Tests if 'delete' action removes file
+    """
+    isdir_mock.return_value = False
+
+    with Environment('/') as env:
+      File('/directory/file',
+           action='delete',
+           mode=0777,
+           backup=False,
+           content='new-content'
+      )
+    env.run()
+
+    self.assertEqual(isdir_mock.call_count, 1)
+    self.assertEqual(exist_mock.call_count, 1)
+    self.assertEqual(unlink_mock.call_count, 1)
+
+
+  @patch.object(os.path, "isdir")
+  def test_attribute_path(self, isdir_mock):
+    """
+    Tests 'path' attribute
+    """
+    isdir_mock.side_effect = [True, False]
+
+    try:
+      with Environment('/') as env:
+        File('/existent_directory',
+             action='create',
+             mode=0777,
+             content='file-content'
+        )
+      env.run()
+      self.fail("Must fail when directory with name 'path' exist")
+    except Fail as e:
+      pass
+
+    isdir_mock.assert_called_with('/existent_directory')
+
+  @patch.object(resource_management.core.Environment, "backup_file")
+  @patch("resource_management.core.providers.system._ensure_metadata")
+  @patch("__builtin__.open")
+  @patch.object(os.path, "exists")
+  @patch.object(os.path, "isdir")
+  def test_attribute_backup(self, isdir_mock, exists_mock, open_mock, ensure_mock, backup_file_mock):
+    """
+    Tests 'backup' attribute
+    """
+    isdir_mock.side_effect = [False, True, False, True]
+    open_mock.return_value = MagicMock()
+    exists_mock.return_value = True
+
+    with Environment('/') as env:
+      File('/directory/file',
+           action='create',
+           mode=0777,
+           backup=False,
+           content='new-content'
+      )
+    env.run()
+
+    self.assertEqual(backup_file_mock.call_count, 0)
+
+    with Environment('/') as env:
+      File('/directory/file',
+           action='create',
+           mode=0777,
+           backup=True,
+           content='new-content'
+      )
+    env.run()
+
+    self.assertEqual(backup_file_mock.call_count, 1)
+    backup_file_mock.assert_called_with('/directory/file')
+
+
+  @patch("resource_management.core.providers.system._ensure_metadata")
+  @patch("__builtin__.open")
+  @patch.object(os.path, "exists")
+  @patch.object(os.path, "isdir")
+  def test_attribute_replace(self, isdir_mock, exists_mock, open_mock, ensure_mock):
+    """
+    Tests 'replace' attribute
+    """
+    isdir_mock.side_effect = [False, True]
+    old_file, new_file = MagicMock(), MagicMock()
+    open_mock.side_effect = [old_file, new_file]
+    old_file.read.return_value = 'old-content'
+    exists_mock.return_value = True
+
+    with Environment('/') as env:
+      File('/directory/file',
+           action='create',
+           mode=0777,
+           backup=False,
+           content='new-content',
+           replace=False
+      )
+
+    env.run()
+    old_file.read.assert_called()
+    self.assertEqual(new_file.__enter__().write.call_count, 0)
+    ensure_mock.assert_called()
+    self.assertEqual(open_mock.call_count, 0)
+
+
+  @patch("resource_management.core.providers.system._coerce_uid")
+  @patch("resource_management.core.providers.system._coerce_gid")
+  @patch.object(os, "chown")
+  @patch.object(os, "chmod")
+  @patch.object(os, "stat")
+  @patch("__builtin__.open")
+  @patch.object(os.path, "exists")
+  @patch.object(os.path, "isdir")
+  def test_ensure_metadata(self, isdir_mock, exists_mock, open_mock, stat_mock, chmod_mock, chown_mock, gid_mock,
+                           uid_mock):
+    """
+    Tests if _ensure_metadata changes owner, usergroup and permissions of file to proper values
+    """
+    isdir_mock.side_effect = [False, True, False, True]
+    exists_mock.return_value = False
+
+    class stat():
+      def __init__(self):
+        self.st_mode = 0666
+        self.st_uid = 1
+        self.st_gid = 1
+
+    stat_mock.return_value = stat()
+    gid_mock.return_value = 0
+    uid_mock.return_value = 0
+
+    with Environment('/') as env:
+      File('/directory/file',
+           action='create',
+           mode=0777,
+           content='file-content',
+           owner='root',
+           group='hdfs'
+      )
+    env.run()
+
+    open_mock.assert_called_with('/directory/file', 'wb')
+    self.assertEqual(open_mock.call_count, 1)
+    stat_mock.assert_called_with('/directory/file')
+    self.assertEqual(chmod_mock.call_count, 1)
+    self.assertEqual(chown_mock.call_count, 2)
+    gid_mock.assert_called_once_with('hdfs')
+    uid_mock.assert_called_once_with('root')
+
+    chmod_mock.reset_mock()
+    chown_mock.reset_mock()
+    gid_mock.return_value = 1
+    uid_mock.return_value = 1
+
+    with Environment('/') as env:
+      File('/directory/file',
+           action='create',
+           mode=0777,
+           content='file-content',
+           owner='root',
+           group='hdfs'
+      )
+    env.run()
+
+    self.assertEqual(chmod_mock.call_count, 1)
+    self.assertEqual(chown_mock.call_count, 0)

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/unitTests.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/unitTests.py b/ambari-agent/src/test/python/unitTests.py
index 79d5ce8..4e6e88d 100644
--- a/ambari-agent/src/test/python/unitTests.py
+++ b/ambari-agent/src/test/python/unitTests.py
@@ -51,7 +51,10 @@ def all_tests_suite():
 
 
   src_dir = os.getcwd()
-  files_list=os.listdir(src_dir)
+  files_list = []
+  for directory in os.listdir(src_dir):
+    if os.path.isdir(directory):
+      files_list += os.listdir(src_dir + os.sep + directory)
   shuffle(files_list)
   tests_list = []
 

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/e5c6e113/ambari-agent/src/test/python/winpwd.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/winpwd.py b/ambari-agent/src/test/python/winpwd.py
deleted file mode 100644
index e919bad..0000000
--- a/ambari-agent/src/test/python/winpwd.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python2.6
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-
-
-
-
-
-def getpwnam(user):
-    print("YESSSSSSSS")
-
-
-
-


Mime
View raw message