airflow-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From bo...@apache.org
Subject [1/4] incubator-airflow git commit: [AIRFLOW-1604] Rename logger to log
Date Tue, 19 Sep 2017 08:18:44 GMT
Repository: incubator-airflow
Updated Branches:
  refs/heads/v1-9-test 14e6d7bf4 -> af4050847


http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/af405084/airflow/utils/log/logging_mixin.py
----------------------------------------------------------------------
diff --git a/airflow/utils/log/logging_mixin.py b/airflow/utils/log/logging_mixin.py
new file mode 100644
index 0000000..a3aad5b
--- /dev/null
+++ b/airflow/utils/log/logging_mixin.py
@@ -0,0 +1,61 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import logging
+import warnings
+from builtins import object
+
+
+class LoggingMixin(object):
+    """
+    Convenience super-class to have a logger configured with the class name
+    """
+
+    # We want to deprecate the logger property in Airflow 2.0
+    # The log property is the de facto standard in most programming languages
+    @property
+    def logger(self):
+        warnings.warn(
+            'Initializing logger for {} using logger(), which will '
+            'be replaced by .log in Airflow 2.0'.format(
+                self.__class__.__module__ + '.' + self.__class__.__name__
+            ),
+            DeprecationWarning
+        )
+        return self.log
+
+    @property
+    def log(self):
+        try:
+            return self._log
+        except AttributeError:
+            self._log = logging.root.getChild(
+                self.__class__.__module__ + '.' + self.__class__.__name__
+            )
+            return self._log
+
+    def set_log_contexts(self, task_instance):
+        """
+        Set the context for all handlers of current logger.
+        """
+        for handler in self.log.handlers:
+            try:
+                handler.set_context(task_instance)
+            except AttributeError:
+                pass

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/af405084/airflow/utils/log/s3_task_handler.py
----------------------------------------------------------------------
diff --git a/airflow/utils/log/s3_task_handler.py b/airflow/utils/log/s3_task_handler.py
index 71fc149..2ed97a1 100644
--- a/airflow/utils/log/s3_task_handler.py
+++ b/airflow/utils/log/s3_task_handler.py
@@ -14,7 +14,7 @@
 import os
 
 from airflow import configuration
-from airflow.utils.log.LoggingMixin import LoggingMixin
+from airflow.utils.log.logging_mixin import LoggingMixin
 from airflow.utils.log.file_task_handler import FileTaskHandler
 
 
@@ -36,7 +36,7 @@ class S3TaskHandler(FileTaskHandler, LoggingMixin):
             from airflow.hooks.S3_hook import S3Hook
             return S3Hook(remote_conn_id)
         except:
-            self.logger.error(
+            self.log.error(
                 'Could not create an S3Hook with connection id "%s". '
                 'Please make sure that airflow[s3] is installed and '
                 'the S3 connection exists.', remote_conn_id
@@ -132,7 +132,7 @@ class S3TaskHandler(FileTaskHandler, LoggingMixin):
             # return error if needed
             if return_error:
                 msg = 'Could not read logs from {}'.format(remote_log_location)
-                self.logger.error(msg)
+                self.log.error(msg)
                 return msg
 
     def s3_write(self, log, remote_log_location, append=True):
@@ -159,4 +159,4 @@ class S3TaskHandler(FileTaskHandler, LoggingMixin):
                 encrypt=configuration.getboolean('core', 'ENCRYPT_S3_LOGS'),
             )
         except:
-            self.logger.error('Could not write logs to %s', remote_log_location)
+            self.log.error('Could not write logs to %s', remote_log_location)

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/af405084/airflow/utils/timeout.py
----------------------------------------------------------------------
diff --git a/airflow/utils/timeout.py b/airflow/utils/timeout.py
index 53f2149..e0b3f96 100644
--- a/airflow/utils/timeout.py
+++ b/airflow/utils/timeout.py
@@ -20,7 +20,7 @@ from __future__ import unicode_literals
 import signal
 
 from airflow.exceptions import AirflowTaskTimeout
-from airflow.utils.log.LoggingMixin import LoggingMixin
+from airflow.utils.log.logging_mixin import LoggingMixin
 
 
 class timeout(LoggingMixin):
@@ -33,7 +33,7 @@ class timeout(LoggingMixin):
         self.error_message = error_message
 
     def handle_timeout(self, signum, frame):
-        self.logger.error("Process timed out")
+        self.log.error("Process timed out")
         raise AirflowTaskTimeout(self.error_message)
 
     def __enter__(self):
@@ -41,12 +41,12 @@ class timeout(LoggingMixin):
             signal.signal(signal.SIGALRM, self.handle_timeout)
             signal.alarm(self.seconds)
         except ValueError as e:
-            self.logger.warning("timeout can't be used in the current context")
-            self.logger.exception(e)
+            self.log.warning("timeout can't be used in the current context")
+            self.log.exception(e)
 
     def __exit__(self, type, value, traceback):
         try:
             signal.alarm(0)
         except ValueError as e:
-            self.logger.warning("timeout can't be used in the current context")
-            self.logger.exception(e)
+            self.log.warning("timeout can't be used in the current context")
+            self.log.exception(e)

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/af405084/airflow/www/api/experimental/endpoints.py
----------------------------------------------------------------------
diff --git a/airflow/www/api/experimental/endpoints.py b/airflow/www/api/experimental/endpoints.py
index 4e5892d..b5a3052 100644
--- a/airflow/www/api/experimental/endpoints.py
+++ b/airflow/www/api/experimental/endpoints.py
@@ -18,7 +18,7 @@ from airflow.api.common.experimental import trigger_dag as trigger
 from airflow.api.common.experimental.get_task import get_task
 from airflow.api.common.experimental.get_task_instance import get_task_instance
 from airflow.exceptions import AirflowException
-from airflow.utils.log.LoggingMixin import LoggingMixin
+from airflow.utils.log.logging_mixin import LoggingMixin
 from airflow.www.app import csrf
 
 from flask import (
@@ -27,7 +27,7 @@ from flask import (
 )
 from datetime import datetime
 
-_log = LoggingMixin().logger
+_log = LoggingMixin().log
 
 requires_authentication = airflow.api.api_auth.requires_authentication
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/af405084/airflow/www/app.py
----------------------------------------------------------------------
diff --git a/airflow/www/app.py b/airflow/www/app.py
index f280713..438a1e2 100644
--- a/airflow/www/app.py
+++ b/airflow/www/app.py
@@ -113,7 +113,7 @@ def create_app(config=None, testing=False):
 
         def integrate_plugins():
             """Integrate plugins to the context"""
-            log = LoggingMixin().logger
+            log = LoggingMixin().log
             from airflow.plugins_manager import (
                 admin_views, flask_blueprints, menu_links)
             for v in admin_views:

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/af405084/scripts/perf/scheduler_ops_metrics.py
----------------------------------------------------------------------
diff --git a/scripts/perf/scheduler_ops_metrics.py b/scripts/perf/scheduler_ops_metrics.py
index 40e1b36..34b5a83 100644
--- a/scripts/perf/scheduler_ops_metrics.py
+++ b/scripts/perf/scheduler_ops_metrics.py
@@ -119,9 +119,9 @@ class SchedulerMetricsJob(SchedulerJob):
                 (datetime.now()-self.start_date).total_seconds() >
                 MAX_RUNTIME_SECS):
             if (len(successful_tis) == num_task_instances):
-                self.logger.info("All tasks processed! Printing stats.")
+                self.log.info("All tasks processed! Printing stats.")
             else:
-                self.logger.info("Test timeout reached. "
+                self.log.info("Test timeout reached. "
                                  "Printing available stats.")
             self.print_stats()
             set_dags_paused_state(True)

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/af405084/tests/contrib/hooks/test_databricks_hook.py
----------------------------------------------------------------------
diff --git a/tests/contrib/hooks/test_databricks_hook.py b/tests/contrib/hooks/test_databricks_hook.py
index e091067..3931bd3 100644
--- a/tests/contrib/hooks/test_databricks_hook.py
+++ b/tests/contrib/hooks/test_databricks_hook.py
@@ -111,7 +111,7 @@ class DatabricksHookTest(unittest.TestCase):
     @mock.patch('airflow.contrib.hooks.databricks_hook.requests')
     def test_do_api_call_with_error_retry(self, mock_requests):
         for exception in [requests_exceptions.ConnectionError, requests_exceptions.Timeout]:
-            with mock.patch.object(self.hook.logger, 'error') as mock_errors:
+            with mock.patch.object(self.hook.log, 'error') as mock_errors:
                 mock_requests.reset_mock()
                 mock_requests.post.side_effect = exception()
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/af405084/tests/contrib/operators/test_dataproc_operator.py
----------------------------------------------------------------------
diff --git a/tests/contrib/operators/test_dataproc_operator.py b/tests/contrib/operators/test_dataproc_operator.py
index 89ad258..7ce6199 100644
--- a/tests/contrib/operators/test_dataproc_operator.py
+++ b/tests/contrib/operators/test_dataproc_operator.py
@@ -132,7 +132,7 @@ class DataprocClusterCreateOperatorTest(unittest.TestCase):
                 zone=ZONE,
                 dag=self.dag
             )
-            with patch.object(dataproc_task.logger, 'info') as mock_info:
+            with patch.object(dataproc_task.log, 'info') as mock_info:
                 with self.assertRaises(TypeError) as _:
                     dataproc_task.execute(None)
                 mock_info.assert_called_with('Creating cluster: %s', CLUSTER_NAME)
@@ -148,7 +148,7 @@ class DataprocClusterCreateOperatorTest(unittest.TestCase):
                 zone=ZONE,
                 dag=self.dag
             )
-            with patch.object(dataproc_task.logger, 'info') as mock_info:
+            with patch.object(dataproc_task.log, 'info') as mock_info:
                 context = { 'ts_nodash' : 'testnodash'}
 
                 rendered = dataproc_task.render_template('cluster_name', getattr(dataproc_task,'cluster_name'),
context)
@@ -190,7 +190,7 @@ class DataprocClusterDeleteOperatorTest(unittest.TestCase):
                 project_id=PROJECT_ID,
                 dag=self.dag
             )
-            with patch.object(dataproc_task.logger, 'info') as mock_info:
+            with patch.object(dataproc_task.log, 'info') as mock_info:
                 with self.assertRaises(TypeError) as _:
                     dataproc_task.execute(None)
                 mock_info.assert_called_with('Deleting cluster: %s', CLUSTER_NAME)
@@ -205,7 +205,7 @@ class DataprocClusterDeleteOperatorTest(unittest.TestCase):
                 dag=self.dag
             )
 
-            with patch.object(dataproc_task.logger, 'info') as mock_info:
+            with patch.object(dataproc_task.log, 'info') as mock_info:
                 context = { 'ts_nodash' : 'testnodash'}
 
                 rendered = dataproc_task.render_template('cluster_name', getattr(dataproc_task,'cluster_name'),
context)

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/af405084/tests/contrib/sensors/test_hdfs_sensors.py
----------------------------------------------------------------------
diff --git a/tests/contrib/sensors/test_hdfs_sensors.py b/tests/contrib/sensors/test_hdfs_sensors.py
index 0e2ed0c..290089b 100644
--- a/tests/contrib/sensors/test_hdfs_sensors.py
+++ b/tests/contrib/sensors/test_hdfs_sensors.py
@@ -26,8 +26,8 @@ class HdfsSensorFolderTests(unittest.TestCase):
             raise unittest.SkipTest('HdfsSensor won\'t work with python3. No need to test
anything here')
         from tests.core import FakeHDFSHook
         self.hook = FakeHDFSHook
-        self.logger = logging.getLogger()
-        self.logger.setLevel(logging.DEBUG)
+        self.log = logging.getLogger()
+        self.log.setLevel(logging.DEBUG)
 
     def test_should_be_empty_directory(self):
         """
@@ -35,9 +35,9 @@ class HdfsSensorFolderTests(unittest.TestCase):
         :return:
         """
         # Given
-        self.logger.debug('#' * 10)
-        self.logger.debug('Running %s', self._testMethodName)
-        self.logger.debug('#' * 10)
+        self.log.debug('#' * 10)
+        self.log.debug('Running %s', self._testMethodName)
+        self.log.debug('#' * 10)
         task = HdfsSensorFolder(task_id='Should_be_empty_directory',
                                 filepath='/datadirectory/empty_directory',
                                 be_empty=True,
@@ -58,9 +58,9 @@ class HdfsSensorFolderTests(unittest.TestCase):
         :return:
         """
         # Given
-        self.logger.debug('#' * 10)
-        self.logger.debug('Running %s', self._testMethodName)
-        self.logger.debug('#' * 10)
+        self.log.debug('#' * 10)
+        self.log.debug('Running %s', self._testMethodName)
+        self.log.debug('#' * 10)
         task = HdfsSensorFolder(task_id='Should_be_empty_directory_fail',
                                 filepath='/datadirectory/not_empty_directory',
                                 be_empty=True,
@@ -80,9 +80,9 @@ class HdfsSensorFolderTests(unittest.TestCase):
         :return:
         """
         # Given
-        self.logger.debug('#' * 10)
-        self.logger.debug('Running %s', self._testMethodName)
-        self.logger.debug('#' * 10)
+        self.log.debug('#' * 10)
+        self.log.debug('Running %s', self._testMethodName)
+        self.log.debug('#' * 10)
         task = HdfsSensorFolder(task_id='Should_be_non_empty_directory',
                                 filepath='/datadirectory/not_empty_directory',
                                 timeout=1,
@@ -102,9 +102,9 @@ class HdfsSensorFolderTests(unittest.TestCase):
         :return:
         """
         # Given
-        self.logger.debug('#' * 10)
-        self.logger.debug('Running %s', self._testMethodName)
-        self.logger.debug('#' * 10)
+        self.log.debug('#' * 10)
+        self.log.debug('Running %s', self._testMethodName)
+        self.log.debug('#' * 10)
         task = HdfsSensorFolder(task_id='Should_be_empty_directory_fail',
                                 filepath='/datadirectory/empty_directory',
                                 timeout=1,
@@ -124,8 +124,8 @@ class HdfsSensorRegexTests(unittest.TestCase):
             raise unittest.SkipTest('HdfsSensor won\'t work with python3. No need to test
anything here')
         from tests.core import FakeHDFSHook
         self.hook = FakeHDFSHook
-        self.logger = logging.getLogger()
-        self.logger.setLevel(logging.DEBUG)
+        self.log = logging.getLogger()
+        self.log.setLevel(logging.DEBUG)
 
     def test_should_match_regex(self):
         """
@@ -133,9 +133,9 @@ class HdfsSensorRegexTests(unittest.TestCase):
         :return:
         """
         # Given
-        self.logger.debug('#' * 10)
-        self.logger.debug('Running %s', self._testMethodName)
-        self.logger.debug('#' * 10)
+        self.log.debug('#' * 10)
+        self.log.debug('Running %s', self._testMethodName)
+        self.log.debug('#' * 10)
         compiled_regex = re.compile("test[1-2]file")
         task = HdfsSensorRegex(task_id='Should_match_the_regex',
                                filepath='/datadirectory/regex_dir',
@@ -157,9 +157,9 @@ class HdfsSensorRegexTests(unittest.TestCase):
         :return:
         """
         # Given
-        self.logger.debug('#' * 10)
-        self.logger.debug('Running %s', self._testMethodName)
-        self.logger.debug('#' * 10)
+        self.log.debug('#' * 10)
+        self.log.debug('Running %s', self._testMethodName)
+        self.log.debug('#' * 10)
         compiled_regex = re.compile("^IDoNotExist")
         task = HdfsSensorRegex(task_id='Should_not_match_the_regex',
                                filepath='/datadirectory/regex_dir',
@@ -180,9 +180,9 @@ class HdfsSensorRegexTests(unittest.TestCase):
         :return:
         """
         # Given
-        self.logger.debug('#' * 10)
-        self.logger.debug('Running %s', self._testMethodName)
-        self.logger.debug('#' * 10)
+        self.log.debug('#' * 10)
+        self.log.debug('Running %s', self._testMethodName)
+        self.log.debug('#' * 10)
         compiled_regex = re.compile("test[1-2]file")
         task = HdfsSensorRegex(task_id='Should_match_the_regex_and_filesize',
                                filepath='/datadirectory/regex_dir',
@@ -207,9 +207,9 @@ class HdfsSensorRegexTests(unittest.TestCase):
         :return:
         """
         # Given
-        self.logger.debug('#' * 10)
-        self.logger.debug('Running %s', self._testMethodName)
-        self.logger.debug('#' * 10)
+        self.log.debug('#' * 10)
+        self.log.debug('Running %s', self._testMethodName)
+        self.log.debug('#' * 10)
         compiled_regex = re.compile("test[1-2]file")
         task = HdfsSensorRegex(task_id='Should_match_the_regex_but_filesize',
                                filepath='/datadirectory/regex_dir',
@@ -231,9 +231,9 @@ class HdfsSensorRegexTests(unittest.TestCase):
         :return:
         """
         # Given
-        self.logger.debug('#' * 10)
-        self.logger.debug('Running %s', self._testMethodName)
-        self.logger.debug('#' * 10)
+        self.log.debug('#' * 10)
+        self.log.debug('Running %s', self._testMethodName)
+        self.log.debug('#' * 10)
         compiled_regex = re.compile("copying_file_\d+.txt")
         task = HdfsSensorRegex(task_id='Should_match_the_regex_but_filesize',
                                filepath='/datadirectory/regex_dir',

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/af405084/tests/executors/test_executor.py
----------------------------------------------------------------------
diff --git a/tests/executors/test_executor.py b/tests/executors/test_executor.py
index 9ec6cd4..a0e227c 100644
--- a/tests/executors/test_executor.py
+++ b/tests/executors/test_executor.py
@@ -29,8 +29,8 @@ class TestExecutor(BaseExecutor):
         super(TestExecutor, self).__init__(*args, **kwargs)
 
     def execute_async(self, key, command, queue=None):
-        self.logger.debug("{} running task instances".format(len(self.running)))
-        self.logger.debug("{} in queue".format(len(self.queued_tasks)))
+        self.log.debug("{} running task instances".format(len(self.running)))
+        self.log.debug("{} in queue".format(len(self.queued_tasks)))
 
     def heartbeat(self):
         session = settings.Session()

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/af405084/tests/operators/sensors.py
----------------------------------------------------------------------
diff --git a/tests/operators/sensors.py b/tests/operators/sensors.py
index 9b256e6..ee67524 100644
--- a/tests/operators/sensors.py
+++ b/tests/operators/sensors.py
@@ -75,7 +75,7 @@ class TimeoutTestSensor(BaseSensorOperator):
                 else:
                     raise AirflowSensorTimeout('Snap. Time is OUT.')
             time.sleep(self.poke_interval)
-        self.logger.info("Success criteria met. Exiting.")
+        self.log.info("Success criteria met. Exiting.")
 
 
 class SensorTimeoutTest(unittest.TestCase):
@@ -187,7 +187,7 @@ class HttpSensorTests(unittest.TestCase):
             poke_interval=1
         )
 
-        with mock.patch.object(task.hook.logger, 'error') as mock_errors:
+        with mock.patch.object(task.hook.log, 'error') as mock_errors:
             with self.assertRaises(AirflowSensorTimeout):
                 task.execute(None)
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/af405084/tests/test_utils/reset_warning_registry.py
----------------------------------------------------------------------
diff --git a/tests/test_utils/reset_warning_registry.py b/tests/test_utils/reset_warning_registry.py
new file mode 100644
index 0000000..a275a6d
--- /dev/null
+++ b/tests/test_utils/reset_warning_registry.py
@@ -0,0 +1,82 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+import sys
+
+
+# We need to explicitly clear the warning registry context
+# https://docs.python.org/2/library/warnings.html
+# One thing to be aware of is that if a warning has already been raised because
+# of a once/default rule, then no matter what filters are set the warning will
+# not be seen again unless the warnings registry related to the warning has
+# been cleared.
+#
+# Proposed fix from Stack overflow, which refers to the Python bug-page
+# noqa
+# https://stackoverflow.com/questions/19428761/python-showing-once-warnings-again-resetting-all-warning-registries
+class reset_warning_registry(object):
+    """
+    context manager which archives & clears warning registry for duration of
+    context.
+
+    :param pattern:
+          optional regex pattern, causes manager to only reset modules whose
+          names match this pattern. defaults to ``".*"``.
+    """
+
+    #: regexp for filtering which modules are reset
+    _pattern = None
+
+    #: dict mapping module name -> old registry contents
+    _backup = None
+
+    def __init__(self, pattern=None):
+        self._pattern = re.compile(pattern or ".*")
+
+    def __enter__(self):
+        # archive and clear the __warningregistry__ key for all modules
+        # that match the 'reset' pattern.
+        pattern = self._pattern
+        backup = self._backup = {}
+        for name, mod in list(sys.modules.items()):
+            if pattern.match(name):
+                reg = getattr(mod, "__warningregistry__", None)
+                if reg:
+                    backup[name] = reg.copy()
+                    reg.clear()
+        return self
+
+    def __exit__(self, *exc_info):
+        # restore warning registry from backup
+        modules = sys.modules
+        backup = self._backup
+        for name, content in backup.items():
+            mod = modules.get(name)
+            if mod is None:
+                continue
+            reg = getattr(mod, "__warningregistry__", None)
+            if reg is None:
+                setattr(mod, "__warningregistry__", content)
+            else:
+                reg.clear()
+                reg.update(content)
+
+        # clear all registry entries that we didn't archive
+        pattern = self._pattern
+        for name, mod in list(modules.items()):
+            if pattern.match(name) and name not in backup:
+                reg = getattr(mod, "__warningregistry__", None)
+                if reg:
+                    reg.clear()

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/af405084/tests/utils/log/test_logging.py
----------------------------------------------------------------------
diff --git a/tests/utils/log/test_logging.py b/tests/utils/log/test_logging.py
index 7e05c7d..8df6dfc 100644
--- a/tests/utils/log/test_logging.py
+++ b/tests/utils/log/test_logging.py
@@ -41,7 +41,7 @@ class TestS3TaskHandler(unittest.TestCase):
     def test_init_raises(self):
         self.hook_mock.side_effect = Exception('Failed to connect')
         handler = S3TaskHandler()
-        with mock.patch.object(handler.logger, 'error') as mock_error:
+        with mock.patch.object(handler.log, 'error') as mock_error:
             # Initialize the hook
             handler.hook()
             mock_error.assert_called_once_with(
@@ -81,7 +81,7 @@ class TestS3TaskHandler(unittest.TestCase):
     def test_read_raises_return_error(self):
         self.hook_inst_mock.get_key.side_effect = Exception('error')
         handler = S3TaskHandler()
-        with mock.patch.object(handler.logger, 'error') as mock_error:
+        with mock.patch.object(handler.log, 'error') as mock_error:
             result = handler.s3_log_read(
                 self.remote_log_location,
                 return_error=True
@@ -102,7 +102,7 @@ class TestS3TaskHandler(unittest.TestCase):
     def test_write_raises(self):
         self.hook_inst_mock.load_string.side_effect = Exception('error')
         handler = S3TaskHandler()
-        with mock.patch.object(handler.logger, 'error') as mock_error:
+        with mock.patch.object(handler.log, 'error') as mock_error:
             handler.write('text', self.remote_log_location)
             msg = 'Could not write logs to %s' % self.remote_log_location
             mock_error.assert_called_once_with(msg)

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/af405084/tests/utils/test_logging_mixin.py
----------------------------------------------------------------------
diff --git a/tests/utils/test_logging_mixin.py b/tests/utils/test_logging_mixin.py
new file mode 100644
index 0000000..bf9e225
--- /dev/null
+++ b/tests/utils/test_logging_mixin.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+import warnings
+
+from airflow.operators.bash_operator import BashOperator
+from tests.test_utils.reset_warning_registry import reset_warning_registry
+
+
+class TestLoggingMixin(unittest.TestCase):
+    def setUp(self):
+        warnings.filterwarnings(
+            action='always'
+        )
+
+    def test_log(self):
+        op = BashOperator(
+            task_id='task-1',
+            bash_command='exit 0'
+        )
+        with reset_warning_registry():
+            with warnings.catch_warnings(record=True) as w:
+                # Set to always, because the warning may have been thrown before
+                # Trigger the warning
+                op.logger.info('Some arbitrary line')
+
+                self.assertEqual(len(w), 1)
+
+                warning = w[0]
+                self.assertTrue(issubclass(warning.category, DeprecationWarning))
+                self.assertEqual(
+                    'Initializing logger for airflow.operators.bash_operator.BashOperator'
+                    ' using logger(), which will be replaced by .log in Airflow 2.0',
+                    str(warning.message)
+                )
+
+    def tearDown(self):
+        warnings.resetwarnings()


Mime
View raw message