airflow-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From "ASF GitHub Bot (JIRA)" <j...@apache.org>
Subject [jira] [Commented] (AIRFLOW-3239) Test discovery partial fails due to incorrect name of the test files
Date Mon, 05 Nov 2018 15:53:00 GMT

    [ https://issues.apache.org/jira/browse/AIRFLOW-3239?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16675345#comment-16675345
] 

ASF GitHub Bot commented on AIRFLOW-3239:
-----------------------------------------

Fokko closed pull request #4131: [AIRFLOW-3239] Further fix of CI tests
URL: https://github.com/apache/incubator-airflow/pull/4131
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/tests/__init__.py b/tests/__init__.py
index eff9d4b9f3..6cc4996f5e 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -22,15 +22,5 @@
 from __future__ import absolute_import
 
 from .api import *
-from .configuration import *
-from .contrib import *
 from .core import *
-from .executors import *
-from .jobs import *
-from .impersonation import *
-from .lineage import *
 from .models import *
-from .operators import *
-from .security import *
-from .task import *
-from .utils import *
diff --git a/tests/operators/bash_operator.py b/tests/operators/test_bash_operator.py
similarity index 95%
rename from tests/operators/bash_operator.py
rename to tests/operators/test_bash_operator.py
index e0a0ff3ebb..8f55b9cda1 100644
--- a/tests/operators/bash_operator.py
+++ b/tests/operators/test_bash_operator.py
@@ -65,6 +65,9 @@ def test_echo_env_variables(self):
                              'echo $AIRFLOW_CTX_EXECUTION_DATE>> {0};'
                              'echo $AIRFLOW_CTX_DAG_RUN_ID>> {0};'.format(fname)
             )
+
+            original_AIRFLOW_HOME = os.environ['AIRFLOW_HOME']
+
             os.environ['AIRFLOW_HOME'] = 'MY_PATH_TO_AIRFLOW_HOME'
             t.run(DEFAULT_DATE, DEFAULT_DATE,
                   ignore_first_depends_on_past=True, ignore_ti_state=True)
@@ -78,3 +81,5 @@ def test_echo_env_variables(self):
                 self.assertIn('echo_env_vars', output)
                 self.assertIn(DEFAULT_DATE.isoformat(), output)
                 self.assertIn('manual__' + DEFAULT_DATE.isoformat(), output)
+
+            os.environ['AIRFLOW_HOME'] = original_AIRFLOW_HOME
diff --git a/tests/operators/operators.py b/tests/operators/test_operators.py
similarity index 86%
rename from tests/operators/operators.py
rename to tests/operators/test_operators.py
index a2d2e4ccff..39a813316e 100644
--- a/tests/operators/operators.py
+++ b/tests/operators/test_operators.py
@@ -53,6 +53,8 @@ def tearDown(self):
             for table in drop_tables:
                 conn.execute("DROP TABLE IF EXISTS {}".format(table))
 
+    @unittest.skipUnless('mysql' in configuration.conf.get('core', 'sql_alchemy_conn'),
+                         "This is a MySQL test")
     def test_mysql_operator_test(self):
         sql = """
         CREATE TABLE IF NOT EXISTS test_airflow (
@@ -66,8 +68,11 @@ def test_mysql_operator_test(self):
             dag=self.dag)
         t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
 
+    @unittest.skipUnless('mysql' in configuration.conf.get('core', 'sql_alchemy_conn'),
+                         "This is a MySQL test")
     def test_mysql_operator_test_multi(self):
         sql = [
+            "CREATE TABLE IF NOT EXISTS test_airflow (dummy VARCHAR(50))",
             "TRUNCATE TABLE test_airflow",
             "INSERT INTO test_airflow VALUES ('X')",
         ]
@@ -79,6 +84,8 @@ def test_mysql_operator_test_multi(self):
         )
         t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
 
+    @unittest.skipUnless('mysql' in configuration.conf.get('core', 'sql_alchemy_conn'),
+                         "This is a MySQL test")
     def test_mysql_hook_test_bulk_load(self):
         records = ("foo", "bar", "baz")
 
@@ -101,6 +108,8 @@ def test_mysql_hook_test_bulk_load(self):
                 results = tuple(result[0] for result in c.fetchall())
                 self.assertEqual(sorted(results), sorted(records))
 
+    @unittest.skipUnless('mysql' in configuration.conf.get('core', 'sql_alchemy_conn'),
+                         "This is a MySQL test")
     def test_mysql_hook_test_bulk_dump(self):
         from airflow.hooks.mysql_hook import MySqlHook
         hook = MySqlHook('airflow_db')
@@ -112,6 +121,8 @@ def test_mysql_hook_test_bulk_dump(self):
             self.skipTest("Skip test_mysql_hook_test_bulk_load "
                           "since file output is not permitted")
 
+    @unittest.skipUnless('mysql' in configuration.conf.get('core', 'sql_alchemy_conn'),
+                         "This is a MySQL test")
     @mock.patch('airflow.hooks.mysql_hook.MySqlHook.get_conn')
     def test_mysql_hook_test_bulk_dump_mock(self, mock_get_conn):
         mock_execute = mock.MagicMock()
@@ -131,6 +142,8 @@ def test_mysql_hook_test_bulk_dump_mock(self, mock_get_conn):
         """.format(tmp_file=tmp_file, table=table)
         assertEqualIgnoreMultipleSpaces(self, mock_execute.call_args[0][0], query)
 
+    @unittest.skipUnless('mysql' in configuration.conf.get('core', 'sql_alchemy_conn'),
+                         "This is a MySQL test")
     def test_mysql_to_mysql(self):
         sql = "SELECT * FROM INFORMATION_SCHEMA.TABLES LIMIT 100;"
         from airflow.operators.generic_transfer import GenericTransfer
@@ -148,6 +161,8 @@ def test_mysql_to_mysql(self):
             dag=self.dag)
         t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
 
+    @unittest.skipUnless('mysql' in configuration.conf.get('core', 'sql_alchemy_conn'),
+                         "This is a MySQL test")
     def test_overwrite_schema(self):
         """
         Verifies option to overwrite connection schema
@@ -177,6 +192,16 @@ def setUp(self):
         dag = DAG(TEST_DAG_ID, default_args=args)
         self.dag = dag
 
+    def tearDown(self):
+        tables_to_drop = ['test_postgres_to_postgres', 'test_airflow']
+        from airflow.hooks.postgres_hook import PostgresHook
+        with PostgresHook().get_conn() as conn:
+            with conn.cursor() as cur:
+                for t in tables_to_drop:
+                    cur.execute("DROP TABLE IF EXISTS {}".format(t))
+
+    @unittest.skipUnless('postgres' in configuration.conf.get('core', 'sql_alchemy_conn'),
+                         "This is a Postgres test")
     def test_postgres_operator_test(self):
         sql = """
         CREATE TABLE IF NOT EXISTS test_airflow (
@@ -197,8 +222,11 @@ def test_postgres_operator_test(self):
             end_date=DEFAULT_DATE,
             ignore_ti_state=True)
 
+    @unittest.skipUnless('postgres' in configuration.conf.get('core', 'sql_alchemy_conn'),
+                         "This is a Postgres test")
     def test_postgres_operator_test_multi(self):
         sql = [
+            "CREATE TABLE IF NOT EXISTS test_airflow (dummy VARCHAR(50))",
             "TRUNCATE TABLE test_airflow",
             "INSERT INTO test_airflow VALUES ('X')",
         ]
@@ -207,6 +235,8 @@ def test_postgres_operator_test_multi(self):
             task_id='postgres_operator_test_multi', sql=sql, dag=self.dag)
         t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
 
+    @unittest.skipUnless('postgres' in configuration.conf.get('core', 'sql_alchemy_conn'),
+                         "This is a Postgres test")
     def test_postgres_to_postgres(self):
         sql = "SELECT * FROM INFORMATION_SCHEMA.TABLES LIMIT 100;"
         from airflow.operators.generic_transfer import GenericTransfer
@@ -224,6 +254,8 @@ def test_postgres_to_postgres(self):
             dag=self.dag)
         t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
 
+    @unittest.skipUnless('postgres' in configuration.conf.get('core', 'sql_alchemy_conn'),
+                         "This is a Postgres test")
     def test_vacuum(self):
         """
         Verifies the VACUUM operation runs well with the PostgresOperator
@@ -238,6 +270,8 @@ def test_vacuum(self):
             autocommit=True)
         t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
 
+    @unittest.skipUnless('postgres' in configuration.conf.get('core', 'sql_alchemy_conn'),
+                         "This is a Postgres test")
     def test_overwrite_schema(self):
         """
         Verifies option to overwrite connection schema
@@ -343,11 +377,15 @@ def tearDown(self):
         with MySqlHook().get_conn() as cur:
             cur.execute("DROP TABLE IF EXISTS baby_names CASCADE;")
 
+    @unittest.skipUnless('mysql' in configuration.conf.get('core', 'sql_alchemy_conn'),
+                         "This is a MySQL test")
     def test_clear(self):
         self.dag.clear(
             start_date=DEFAULT_DATE,
             end_date=timezone.utcnow())
 
+    @unittest.skipUnless('mysql' in configuration.conf.get('core', 'sql_alchemy_conn'),
+                         "This is a MySQL test")
     def test_mysql_to_hive(self):
         from airflow.operators.mysql_to_hive import MySqlToHiveTransfer
         sql = "SELECT * FROM baby_names LIMIT 1000;"
@@ -361,6 +399,8 @@ def test_mysql_to_hive(self):
             dag=self.dag)
         t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
 
+    @unittest.skipUnless('mysql' in configuration.conf.get('core', 'sql_alchemy_conn'),
+                         "This is a MySQL test")
     def test_mysql_to_hive_partition(self):
         from airflow.operators.mysql_to_hive import MySqlToHiveTransfer
         sql = "SELECT * FROM baby_names LIMIT 1000;"
@@ -376,6 +416,8 @@ def test_mysql_to_hive_partition(self):
             dag=self.dag)
         t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
 
+    @unittest.skipUnless('mysql' in configuration.conf.get('core', 'sql_alchemy_conn'),
+                         "This is a MySQL test")
     def test_mysql_to_hive_tblproperties(self):
         from airflow.operators.mysql_to_hive import MySqlToHiveTransfer
         sql = "SELECT * FROM baby_names LIMIT 1000;"
@@ -390,6 +432,8 @@ def test_mysql_to_hive_tblproperties(self):
             dag=self.dag)
         t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
 
+    @unittest.skipUnless('mysql' in configuration.conf.get('core', 'sql_alchemy_conn'),
+                         "This is a MySQL test")
     @mock.patch('airflow.hooks.hive_hooks.HiveCliHook.load_file')
     def test_mysql_to_hive_type_conversion(self, mock_load_file):
         mysql_table = 'test_mysql_to_hive'
@@ -433,6 +477,8 @@ def test_mysql_to_hive_type_conversion(self, mock_load_file):
             with m.get_conn() as c:
                 c.execute("DROP TABLE IF EXISTS {}".format(mysql_table))
 
+    @unittest.skipUnless('mysql' in configuration.conf.get('core', 'sql_alchemy_conn'),
+                         "This is a MySQL test")
     def test_mysql_to_hive_verify_loaded_values(self):
         mysql_table = 'test_mysql_to_hive'
         hive_table = 'test_mysql_to_hive'
diff --git a/tests/configuration.py b/tests/test_configuration.py
similarity index 100%
rename from tests/configuration.py
rename to tests/test_configuration.py
diff --git a/tests/impersonation.py b/tests/test_impersonation.py
similarity index 100%
rename from tests/impersonation.py
rename to tests/test_impersonation.py
diff --git a/tests/jobs.py b/tests/test_jobs.py
similarity index 100%
rename from tests/jobs.py
rename to tests/test_jobs.py
diff --git a/tests/utils.py b/tests/test_utils.py
similarity index 100%
rename from tests/utils.py
rename to tests/test_utils.py


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


> Test discovery partial fails due to incorrect name of the test files
> --------------------------------------------------------------------
>
>                 Key: AIRFLOW-3239
>                 URL: https://issues.apache.org/jira/browse/AIRFLOW-3239
>             Project: Apache Airflow
>          Issue Type: Bug
>          Components: tests
>            Reporter: Xiaodong DENG
>            Assignee: Xiaodong DENG
>            Priority: Major
>             Fix For: 2.0.0
>
>
> In PR [https://github.com/apache/incubator-airflow/pull/4049,] I have fixed the incorrect
name of some test files (resulting in partial failure in test discovery).
> There are some other scripts with this issue.



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Mime
View raw message