superset-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From maximebeauche...@apache.org
Subject [incubator-superset] branch master updated: 7620: Start removing dependencies on requests (#7643)
Date Fri, 02 Aug 2019 17:01:41 GMT
This is an automated email from the ASF dual-hosted git repository.

maximebeauchemin pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-superset.git


The following commit(s) were added to refs/heads/master by this push:
     new e23920b  7620: Start removing dependencies on requests (#7643)
e23920b is described below

commit e23920b8bab05232fac8b905bd28dfcec2f6b981
Author: Gianluca Ciccarelli <galiziacentrale@gmail.com>
AuthorDate: Fri Aug 2 20:01:28 2019 +0300

    7620: Start removing dependencies on requests (#7643)
    
    * 7620: Start removing dependencies on requests
    
    * Patch urllib.request.urlopen instead of requests.get
    
    * Try to fix flake8
    
    * More work on flake8 import errors
    
    * First attempt at using urllib with cookies
    
    * Fix pylint/flake8
    
    * Fix test_deliver_slice_csv_attachment
    
    * Fix test_deliver_slice_csv_inline
    
    * Import requests and pydruid conditionally, remove dependency on prison
    
    * Fix flake errors
    
    * Fix load_examples
    
    * Please flake
    
    * Skip tests depending on optional deps
    
    * Try to please flake
    
    * Address review comments
    
    * Remove Druid-related UI
    
    * Revert "Remove Druid-related UI"
    
    This reverts commit d7e0f166cc3f3dd2496b4a666e177f0c191aeb0f.
    
    * Skip a few tests more
    
    * Put imports in right order
    
    * Apply black patch
    
    * Please flake
    
    * Please black, silence flake
    
    * Use flake8 silencing the right way
    
    * Add deps for CI
---
 requirements-dev.txt                |  2 +
 requirements.txt                    |  2 -
 setup.py                            |  3 +-
 superset/connectors/druid/models.py | 79 +++++++++++++++++-------------
 superset/examples/helpers.py        |  5 +-
 superset/tasks/cache.py             |  8 ++--
 superset/tasks/schedules.py         | 12 +++--
 superset/utils/core.py              | 40 ++++++++++------
 tests/druid_func_tests.py           | 96 +++++++++++++++++++++++++++++++++++--
 tests/druid_tests.py                | 47 +++++++++++++++---
 tests/model_tests.py                |  3 ++
 tests/schedules_test.py             | 22 +++++----
 tests/security_tests.py             | 10 ++++
 13 files changed, 248 insertions(+), 81 deletions(-)

diff --git a/requirements-dev.txt b/requirements-dev.txt
index 67a13e7..5c425cc 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -27,8 +27,10 @@ pip-tools==3.7.0
 pre-commit==1.17.0
 psycopg2-binary==2.7.5
 pycodestyle==2.5.0
+pydruid==0.5.6
 pyhive==0.6.1
 pylint==1.9.2
 redis==3.2.1
+requests==2.22.0
 statsd==3.3.0
 tox==3.11.1
diff --git a/requirements.txt b/requirements.txt
index 54265b1..38d4922 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -60,7 +60,6 @@ polyline==1.4.0
 prison==0.1.2             # via flask-appbuilder
 py==1.8.0                 # via retry
 pycparser==2.19           # via cffi
-pydruid==0.5.6
 pyjwt==1.7.1              # via flask-appbuilder, flask-jwt-extended
 pyrsistent==0.15.4        # via jsonschema
 python-dateutil==2.8.0
@@ -70,7 +69,6 @@ python-geohash==0.8.5
 python3-openid==3.1.0     # via flask-openid
 pytz==2019.2              # via babel, celery, pandas
 pyyaml==5.1.2
-requests==2.22.0
 retry==0.9.2
 selenium==3.141.0
 simplejson==3.16.0
diff --git a/setup.py b/setup.py
index e63eaeb..11712c9 100644
--- a/setup.py
+++ b/setup.py
@@ -89,12 +89,10 @@ setup(
         "parsedatetime",
         "pathlib2",
         "polyline",
-        "pydruid>=0.5.2",
         "python-dateutil",
         "python-dotenv",
         "python-geohash",
         "pyyaml>=5.1",
-        "requests>=2.22.0",
         "retry>=0.9.2",
         "selenium>=3.141.0",
         "simplejson>=3.15.0",
@@ -111,6 +109,7 @@ setup(
         "mysql": ["mysqlclient==1.4.2.post1"],
         "postgres": ["psycopg2-binary==2.7.5"],
         "presto": ["pyhive[presto]>=0.4.0"],
+        "druid": ["pydruid==0.5.2", "requests==2.22.0"],
     },
     author="Apache Software Foundation",
     author_email="dev@superset.incubator.apache.org",
diff --git a/superset/connectors/druid/models.py b/superset/connectors/druid/models.py
index d7b00c3..9f37237 100644
--- a/superset/connectors/druid/models.py
+++ b/superset/connectors/druid/models.py
@@ -16,6 +16,7 @@
 # under the License.
 # pylint: disable=C,R,W
 # pylint: disable=invalid-unary-operand-type
+# flake8: noqa I202
 from collections import OrderedDict
 from copy import deepcopy
 from datetime import datetime, timedelta
@@ -31,20 +32,24 @@ from flask_appbuilder import Model
 from flask_appbuilder.models.decorators import renders
 from flask_babel import lazy_gettext as _
 import pandas
-from pydruid.client import PyDruid
-from pydruid.utils.aggregators import count
-from pydruid.utils.dimensions import MapLookupExtraction, RegexExtraction
-from pydruid.utils.filters import Dimension, Filter
-from pydruid.utils.having import Aggregation
-from pydruid.utils.postaggregator import (
-    Const,
-    Field,
-    HyperUniqueCardinality,
-    Postaggregator,
-    Quantile,
-    Quantiles,
-)
-import requests
+
+try:
+    from pydruid.client import PyDruid
+    from pydruid.utils.aggregators import count
+    from pydruid.utils.dimensions import MapLookupExtraction, RegexExtraction
+    from pydruid.utils.filters import Dimension, Filter
+    from pydruid.utils.having import Aggregation
+    from pydruid.utils.postaggregator import (
+        Const,
+        Field,
+        HyperUniqueCardinality,
+        Postaggregator,
+        Quantile,
+        Quantiles,
+    )
+    import requests
+except ImportError:
+    pass
 import sqlalchemy as sa
 from sqlalchemy import (
     Boolean,
@@ -65,36 +70,44 @@ from superset.connectors.base.models import BaseColumn, BaseDatasource,
BaseMetr
 from superset.exceptions import MetricPermException, SupersetException
 from superset.models.helpers import AuditMixinNullable, ImportMixin, QueryResult
 from superset.utils import core as utils, import_datasource
-from superset.utils.core import DimSelector, DTTM_ALIAS, flasher
 
+try:
+    from superset.utils.core import DimSelector, DTTM_ALIAS, flasher
+except ImportError:
+    pass
 DRUID_TZ = conf.get("DRUID_TZ")
 POST_AGG_TYPE = "postagg"
 metadata = Model.metadata  # pylint: disable=no-member
 
 
-# Function wrapper because bound methods cannot
-# be passed to processes
-def _fetch_metadata_for(datasource):
-    return datasource.latest_metadata()
+try:
+    # Postaggregator might not have been imported.
+    class JavascriptPostAggregator(Postaggregator):
+        def __init__(self, name, field_names, function):
+            self.post_aggregator = {
+                "type": "javascript",
+                "fieldNames": field_names,
+                "name": name,
+                "function": function,
+            }
+            self.name = name
 
+    class CustomPostAggregator(Postaggregator):
+        """A way to allow users to specify completely custom PostAggregators"""
 
-class JavascriptPostAggregator(Postaggregator):
-    def __init__(self, name, field_names, function):
-        self.post_aggregator = {
-            "type": "javascript",
-            "fieldNames": field_names,
-            "name": name,
-            "function": function,
-        }
-        self.name = name
+        def __init__(self, name, post_aggregator):
+            self.name = name
+            self.post_aggregator = post_aggregator
 
 
-class CustomPostAggregator(Postaggregator):
-    """A way to allow users to specify completely custom PostAggregators"""
+except NameError:
+    pass
 
-    def __init__(self, name, post_aggregator):
-        self.name = name
-        self.post_aggregator = post_aggregator
+
+# Function wrapper because bound methods cannot
+# be passed to processes
+def _fetch_metadata_for(datasource):
+    return datasource.latest_metadata()
 
 
 class DruidCluster(Model, AuditMixinNullable, ImportMixin):
diff --git a/superset/examples/helpers.py b/superset/examples/helpers.py
index cff7da6..28349eb 100644
--- a/superset/examples/helpers.py
+++ b/superset/examples/helpers.py
@@ -19,10 +19,9 @@
 from io import BytesIO
 import json
 import os
+from urllib import request
 import zlib
 
-import requests
-
 from superset import app, db
 from superset.connectors.connector_registry import ConnectorRegistry
 from superset.models import core as models
@@ -70,7 +69,7 @@ def get_slice_json(defaults, **kwargs):
 
 
 def get_example_data(filepath, is_gzip=True, make_bytes=False):
-    content = requests.get(f"{BASE_URL}{filepath}?raw=true").content
+    content = request.urlopen(f"{BASE_URL}{filepath}?raw=true").read()
     if is_gzip:
         content = zlib.decompress(content, zlib.MAX_WBITS | 16)
     if make_bytes:
diff --git a/superset/tasks/cache.py b/superset/tasks/cache.py
index 73dc756..136de81 100644
--- a/superset/tasks/cache.py
+++ b/superset/tasks/cache.py
@@ -18,10 +18,10 @@
 
 import json
 import logging
+from urllib import request
+from urllib.error import URLError
 
 from celery.utils.log import get_task_logger
-import requests
-from requests.exceptions import RequestException
 from sqlalchemy import and_, func
 
 from superset import app, db
@@ -282,9 +282,9 @@ def cache_warmup(strategy_name, *args, **kwargs):
     for url in strategy.get_urls():
         try:
             logger.info(f"Fetching {url}")
-            requests.get(url)
+            request.urlopen(url)
             results["success"].append(url)
-        except RequestException:
+        except URLError:
             logger.exception("Error warming up cache!")
             results["errors"].append(url)
 
diff --git a/superset/tasks/schedules.py b/superset/tasks/schedules.py
index ac12efa..e8c7d67 100644
--- a/superset/tasks/schedules.py
+++ b/superset/tasks/schedules.py
@@ -23,19 +23,18 @@ from datetime import datetime, timedelta
 from email.utils import make_msgid, parseaddr
 import logging
 import time
-
+from urllib.error import URLError
+import urllib.request
 
 import croniter
 from dateutil.tz import tzlocal
 from flask import render_template, Response, session, url_for
 from flask_babel import gettext as __
 from flask_login import login_user
-import requests
 from retry.api import retry_call
 from selenium.common.exceptions import WebDriverException
 from selenium.webdriver import chrome, firefox
 import simplejson as json
-from six.moves import urllib
 from werkzeug.utils import parse_cookie
 
 # Superset framework imports
@@ -258,8 +257,11 @@ def _get_slice_data(schedule):
     for cookie in _get_auth_cookies():
         cookies["session"] = cookie
 
-    response = requests.get(slice_url, cookies=cookies)
-    response.raise_for_status()
+    opener = urllib.request.build_opener()
+    opener.addheaders.append(("Cookie", f"session={cookies['session']}"))
+    response = opener.open(slice_url)
+    if response.getcode() != 200:
+        raise URLError(response.getcode())
 
     # TODO: Move to the csv module
     rows = [r.split(b",") for r in response.content.splitlines()]
diff --git a/superset/utils/core.py b/superset/utils/core.py
index b7d3370..eb43e3a 100644
--- a/superset/utils/core.py
+++ b/superset/utils/core.py
@@ -15,6 +15,7 @@
 # specific language governing permissions and limitations
 # under the License.
 # pylint: disable=C,R,W
+# flake8: noqa I202
 """Utility functions used across Superset"""
 from datetime import date, datetime, time, timedelta
 import decimal
@@ -51,7 +52,11 @@ import markdown as md
 import numpy
 import pandas as pd
 import parsedatetime
-from pydruid.utils.having import Having
+
+try:
+    from pydruid.utils.having import Having
+except ImportError:
+    pass
 import sqlalchemy as sa
 from sqlalchemy import event, exc, select, Text
 from sqlalchemy.dialects.mysql import MEDIUMTEXT
@@ -72,6 +77,25 @@ JS_MAX_INTEGER = 9007199254740991  # Largest int Java Script can handle
2^53-1
 
 sources = {"chart": 0, "dashboard": 1, "sql_lab": 2}
 
+try:
+    # Having might not have been imported.
+    class DimSelector(Having):
+        def __init__(self, **args):
+            # Just a hack to prevent any exceptions
+            Having.__init__(self, type="equalTo", aggregation=None, value=None)
+
+            self.having = {
+                "having": {
+                    "type": "dimSelector",
+                    "dimension": args["dimension"],
+                    "value": args["value"],
+                }
+            }
+
+
+except NameError:
+    pass
+
 
 def flasher(msg, severity=None):
     """Flask's flash if available, logging call if not"""
@@ -179,20 +203,6 @@ def string_to_num(s: str):
         return None
 
 
-class DimSelector(Having):
-    def __init__(self, **args):
-        # Just a hack to prevent any exceptions
-        Having.__init__(self, type="equalTo", aggregation=None, value=None)
-
-        self.having = {
-            "having": {
-                "type": "dimSelector",
-                "dimension": args["dimension"],
-                "value": args["value"],
-            }
-        }
-
-
 def list_minus(l: List, minus: List) -> List:
     """Returns l without what is in minus
 
diff --git a/tests/druid_func_tests.py b/tests/druid_func_tests.py
index 3c954f9..c4d6ab9 100644
--- a/tests/druid_func_tests.py
+++ b/tests/druid_func_tests.py
@@ -18,13 +18,16 @@ import json
 import unittest
 from unittest.mock import Mock
 
-from pydruid.utils.dimensions import MapLookupExtraction, RegexExtraction
-import pydruid.utils.postaggregator as postaggs
-
+try:
+    from pydruid.utils.dimensions import MapLookupExtraction, RegexExtraction
+    import pydruid.utils.postaggregator as postaggs
+except ImportError:
+    pass
 
 import superset.connectors.druid.models as models
 from superset.connectors.druid.models import DruidColumn, DruidDatasource, DruidMetric
 from superset.exceptions import SupersetException
+from .base_tests import SupersetTestCase
 
 
 def mock_metric(metric_name, is_postagg=False):
@@ -40,6 +43,9 @@ def emplace(metrics_dict, metric_name, is_postagg=False):
 
 # Unit tests that can be run without initializing base tests
 class DruidFuncTestCase(unittest.TestCase):
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_get_filters_extraction_fn_map(self):
         filters = [{"col": "deviceName", "val": ["iPhone X"], "op": "in"}]
         dimension_spec = {
@@ -83,6 +89,9 @@ class DruidFuncTestCase(unittest.TestCase):
             dim_ext_fn["retainMissingValue"], f_ext_fn._retain_missing_values
         )
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_get_filters_extraction_fn_regex(self):
         filters = [{"col": "buildPrefix", "val": ["22B"], "op": "in"}]
         dimension_spec = {
@@ -101,6 +110,9 @@ class DruidFuncTestCase(unittest.TestCase):
         f_ext_fn = f.extraction_function
         self.assertEqual(dim_ext_fn["expr"], f_ext_fn._expr)
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_get_filters_ignores_invalid_filter_objects(self):
         filtr = {"col": "col1", "op": "=="}
         filters = [filtr]
@@ -108,6 +120,9 @@ class DruidFuncTestCase(unittest.TestCase):
         column_dict = {"col1": col}
         self.assertIsNone(DruidDatasource.get_filters(filters, [], column_dict))
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_get_filters_constructs_filter_in(self):
         filtr = {"col": "A", "op": "in", "val": ["a", "b", "c"]}
         col = DruidColumn(column_name="A")
@@ -118,6 +133,9 @@ class DruidFuncTestCase(unittest.TestCase):
         self.assertEqual("or", res.filter["filter"]["type"])
         self.assertEqual(3, len(res.filter["filter"]["fields"]))
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_get_filters_constructs_filter_not_in(self):
         filtr = {"col": "A", "op": "not in", "val": ["a", "b", "c"]}
         col = DruidColumn(column_name="A")
@@ -131,6 +149,9 @@ class DruidFuncTestCase(unittest.TestCase):
             3, len(res.filter["filter"]["field"].filter["filter"]["fields"])
         )
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_get_filters_constructs_filter_equals(self):
         filtr = {"col": "A", "op": "==", "val": "h"}
         col = DruidColumn(column_name="A")
@@ -140,6 +161,9 @@ class DruidFuncTestCase(unittest.TestCase):
         self.assertEqual("A", res.filter["filter"]["dimension"])
         self.assertEqual("h", res.filter["filter"]["value"])
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_get_filters_constructs_filter_not_equals(self):
         filtr = {"col": "A", "op": "!=", "val": "h"}
         col = DruidColumn(column_name="A")
@@ -148,6 +172,9 @@ class DruidFuncTestCase(unittest.TestCase):
         self.assertEqual("not", res.filter["filter"]["type"])
         self.assertEqual("h", res.filter["filter"]["field"].filter["filter"]["value"])
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_get_filters_constructs_bounds_filter(self):
         filtr = {"col": "A", "op": ">=", "val": "h"}
         col = DruidColumn(column_name="A")
@@ -168,6 +195,9 @@ class DruidFuncTestCase(unittest.TestCase):
         res = DruidDatasource.get_filters([filtr], [], column_dict)
         self.assertTrue(res.filter["filter"]["upperStrict"])
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_get_filters_constructs_regex_filter(self):
         filtr = {"col": "A", "op": "regex", "val": "[abc]"}
         col = DruidColumn(column_name="A")
@@ -177,6 +207,9 @@ class DruidFuncTestCase(unittest.TestCase):
         self.assertEqual("[abc]", res.filter["filter"]["pattern"])
         self.assertEqual("A", res.filter["filter"]["dimension"])
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_get_filters_composes_multiple_filters(self):
         filtr1 = {"col": "A", "op": "!=", "val": "y"}
         filtr2 = {"col": "B", "op": "in", "val": ["a", "b", "c"]}
@@ -187,6 +220,9 @@ class DruidFuncTestCase(unittest.TestCase):
         self.assertEqual("and", res.filter["filter"]["type"])
         self.assertEqual(2, len(res.filter["filter"]["fields"]))
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_get_filters_ignores_in_not_in_with_empty_value(self):
         filtr1 = {"col": "A", "op": "in", "val": []}
         filtr2 = {"col": "A", "op": "not in", "val": []}
@@ -195,6 +231,9 @@ class DruidFuncTestCase(unittest.TestCase):
         res = DruidDatasource.get_filters([filtr1, filtr2], [], column_dict)
         self.assertIsNone(res)
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_get_filters_constructs_equals_for_in_not_in_single_value(self):
         filtr = {"col": "A", "op": "in", "val": ["a"]}
         cola = DruidColumn(column_name="A")
@@ -203,6 +242,9 @@ class DruidFuncTestCase(unittest.TestCase):
         res = DruidDatasource.get_filters([filtr], [], column_dict)
         self.assertEqual("selector", res.filter["filter"]["type"])
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_get_filters_handles_arrays_for_string_types(self):
         filtr = {"col": "A", "op": "==", "val": ["a", "b"]}
         col = DruidColumn(column_name="A")
@@ -214,6 +256,9 @@ class DruidFuncTestCase(unittest.TestCase):
         res = DruidDatasource.get_filters([filtr], [], column_dict)
         self.assertIsNone(res.filter["filter"]["value"])
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_get_filters_handles_none_for_string_types(self):
         filtr = {"col": "A", "op": "==", "val": None}
         col = DruidColumn(column_name="A")
@@ -221,6 +266,9 @@ class DruidFuncTestCase(unittest.TestCase):
         res = DruidDatasource.get_filters([filtr], [], column_dict)
         self.assertIsNone(res)
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_get_filters_extracts_values_in_quotes(self):
         filtr = {"col": "A", "op": "in", "val": ['"a"']}
         col = DruidColumn(column_name="A")
@@ -228,6 +276,9 @@ class DruidFuncTestCase(unittest.TestCase):
         res = DruidDatasource.get_filters([filtr], [], column_dict)
         self.assertEqual("a", res.filter["filter"]["value"])
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_get_filters_keeps_trailing_spaces(self):
         filtr = {"col": "A", "op": "in", "val": ["a "]}
         col = DruidColumn(column_name="A")
@@ -235,6 +286,9 @@ class DruidFuncTestCase(unittest.TestCase):
         res = DruidDatasource.get_filters([filtr], [], column_dict)
         self.assertEqual("a ", res.filter["filter"]["value"])
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_get_filters_converts_strings_to_num(self):
         filtr = {"col": "A", "op": "in", "val": ["6"]}
         col = DruidColumn(column_name="A")
@@ -245,6 +299,9 @@ class DruidFuncTestCase(unittest.TestCase):
         res = DruidDatasource.get_filters([filtr], ["A"], column_dict)
         self.assertEqual(6, res.filter["filter"]["value"])
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_run_query_no_groupby(self):
         client = Mock()
         from_dttm = Mock()
@@ -291,6 +348,9 @@ class DruidFuncTestCase(unittest.TestCase):
         self.assertIn("post_aggregations", called_args)
         # restore functions
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_run_query_with_adhoc_metric(self):
         client = Mock()
         from_dttm = Mock()
@@ -345,6 +405,9 @@ class DruidFuncTestCase(unittest.TestCase):
         self.assertIn("post_aggregations", called_args)
         # restore functions
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_run_query_single_groupby(self):
         client = Mock()
         from_dttm = Mock()
@@ -440,6 +503,9 @@ class DruidFuncTestCase(unittest.TestCase):
         self.assertEqual("matcho", client.topn.call_args_list[0][1]["dimension"])
         self.assertEqual(spec, client.topn.call_args_list[1][1]["dimension"])
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_run_query_multiple_groupby(self):
         client = Mock()
         from_dttm = Mock()
@@ -485,6 +551,9 @@ class DruidFuncTestCase(unittest.TestCase):
         self.assertIn("dimensions", called_args)
         self.assertEqual(["col1", "col2"], called_args["dimensions"])
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_get_post_agg_returns_correct_agg_type(self):
         get_post_agg = DruidDatasource.get_post_agg
         # javascript PostAggregators
@@ -553,6 +622,9 @@ class DruidFuncTestCase(unittest.TestCase):
         self.assertEqual(postagg.name, "custom_name")
         self.assertEqual(postagg.post_aggregator["stuff"], "more_stuff")
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_find_postaggs_for_returns_postaggs_and_removes(self):
         find_postaggs_for = DruidDatasource.find_postaggs_for
         postagg_names = set(["pa2", "pa3", "pa4", "m1", "m2", "m3", "m4"])
@@ -573,6 +645,9 @@ class DruidFuncTestCase(unittest.TestCase):
         self.assertEqual(0, len(expected_postaggs))
         self.assertEqual(0, len(postagg_names))
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_recursive_get_fields(self):
         conf = {
             "type": "quantile",
@@ -611,6 +686,9 @@ class DruidFuncTestCase(unittest.TestCase):
             expected.remove(field)
         self.assertEqual(0, len(expected))
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_metrics_and_post_aggs_tree(self):
         metrics = ["A", "B", "m1", "m2"]
         metrics_dict = {}
@@ -645,6 +723,9 @@ class DruidFuncTestCase(unittest.TestCase):
             del postaggs[chr(i)]
         self.assertEqual(0, len(postaggs))
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_metrics_and_post_aggs(self):
         """
         Test generation of metrics and post-aggregations from an initial list
@@ -753,6 +834,9 @@ class DruidFuncTestCase(unittest.TestCase):
         assert set(saved_metrics.keys()) == {"aCustomMetric"}
         assert set(post_aggs.keys()) == result_postaggs
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_druid_type_from_adhoc_metric(self):
 
         druid_type = DruidDatasource.druid_type_from_adhoc_metric(
@@ -800,6 +884,9 @@ class DruidFuncTestCase(unittest.TestCase):
         )
         assert druid_type == "hyperUnique"
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_run_query_order_by_metrics(self):
         client = Mock()
         client.query_builder.last_query.query_dict = {"mock": 0}
@@ -932,6 +1019,9 @@ class DruidFuncTestCase(unittest.TestCase):
         self.assertEqual({"count1", "sum1", "sum2"}, set(aggregations.keys()))
         self.assertEqual({"div1"}, set(post_aggregations.keys()))
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_get_aggregations(self):
         ds = DruidDatasource(datasource_name="datasource")
         metrics_dict = {
diff --git a/tests/druid_tests.py b/tests/druid_tests.py
index 59964c6..e275973 100644
--- a/tests/druid_tests.py
+++ b/tests/druid_tests.py
@@ -14,6 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+# flake8: noqa I202
 """Unit tests for Superset"""
 from datetime import datetime
 import json
@@ -21,12 +22,16 @@ import unittest
 from unittest.mock import Mock, patch
 
 from superset import db, security_manager
-from superset.connectors.druid.models import (
-    DruidCluster,
-    DruidColumn,
-    DruidDatasource,
-    DruidMetric,
-)
+
+try:
+    from superset.connectors.druid.models import (
+        DruidCluster,
+        DruidColumn,
+        DruidDatasource,
+        DruidMetric,
+    )
+except ImportError:
+    pass
 from .base_tests import SupersetTestCase
 
 
@@ -131,6 +136,9 @@ class DruidTests(SupersetTestCase):
 
         return cluster
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     @patch("superset.connectors.druid.models.PyDruid")
     def test_client(self, PyDruid):
         self.login(username="admin")
@@ -189,6 +197,9 @@ class DruidTests(SupersetTestCase):
         resp = self.get_json_resp(url, {"form_data": json.dumps(form_data)})
         self.assertEqual("Canada", resp["data"]["records"][0]["dim1"])
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_druid_sync_from_config(self):
         CLUSTER_NAME = "new_druid"
         self.login()
@@ -276,6 +287,9 @@ class DruidTests(SupersetTestCase):
         )
         assert resp.status_code == 201
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_filter_druid_datasource(self):
         CLUSTER_NAME = "new_druid"
         cluster = self.get_or_create(
@@ -311,6 +325,9 @@ class DruidTests(SupersetTestCase):
         self.assertIn("datasource_for_gamma", resp)
         self.assertNotIn("datasource_not_for_gamma", resp)
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     @patch("superset.connectors.druid.models.PyDruid")
     def test_sync_druid_perm(self, PyDruid):
         self.login(username="admin")
@@ -354,6 +371,9 @@ class DruidTests(SupersetTestCase):
         )
         assert pv is not None
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     @patch("superset.connectors.druid.models.PyDruid")
     def test_refresh_metadata(self, PyDruid):
         self.login(username="admin")
@@ -381,6 +401,9 @@ class DruidTests(SupersetTestCase):
                 json.loads(metric.json)["type"], "double{}".format(agg.capitalize())
             )
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     @patch("superset.connectors.druid.models.PyDruid")
     def test_refresh_metadata_augment_type(self, PyDruid):
         self.login(username="admin")
@@ -413,6 +436,9 @@ class DruidTests(SupersetTestCase):
 
             self.assertEqual(metric.json_obj["type"], "long{}".format(agg.capitalize()))
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     @patch("superset.connectors.druid.models.PyDruid")
     def test_refresh_metadata_augment_verbose_name(self, PyDruid):
         self.login(username="admin")
@@ -444,6 +470,9 @@ class DruidTests(SupersetTestCase):
         for metric in metrics:
             self.assertEqual(metric.verbose_name, metric.metric_name)
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_urls(self):
         cluster = self.get_test_cluster_obj()
         self.assertEquals(
@@ -460,6 +489,9 @@ class DruidTests(SupersetTestCase):
             cluster.get_base_broker_url(), "http://localhost:7980/druid/v2"
         )
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     @patch("superset.connectors.druid.models.PyDruid")
     def test_druid_time_granularities(self, PyDruid):
         self.login(username="admin")
@@ -518,6 +550,9 @@ class DruidTests(SupersetTestCase):
                 instance.timeseries.call_args[1]["granularity"]["period"],
             )
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     @patch("superset.connectors.druid.models.PyDruid")
     def test_external_metadata(self, PyDruid):
         self.login(username="admin")
diff --git a/tests/model_tests.py b/tests/model_tests.py
index 9e6afc0..55926cf 100644
--- a/tests/model_tests.py
+++ b/tests/model_tests.py
@@ -27,6 +27,9 @@ from .base_tests import SupersetTestCase
 
 
 class DatabaseModelTestCase(SupersetTestCase):
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("requests"), "requests not installed"
+    )
     def test_database_schema_presto(self):
         sqlalchemy_uri = "presto://presto.airbnb.io:8080/hive/default"
         model = Database(sqlalchemy_uri=sqlalchemy_uri)
diff --git a/tests/schedules_test.py b/tests/schedules_test.py
index 3f7f5ba..9b58394 100644
--- a/tests/schedules_test.py
+++ b/tests/schedules_test.py
@@ -361,12 +361,16 @@ class SchedulesTestCase(unittest.TestCase):
             element.screenshot_as_png,
         )
 
-    @patch("superset.tasks.schedules.requests.get")
+    @patch("superset.tasks.schedules.urllib.request.OpenerDirector.open")
+    @patch("superset.tasks.schedules.urllib.request.urlopen")
     @patch("superset.tasks.schedules.send_email_smtp")
-    def test_deliver_slice_csv_attachment(self, send_email_smtp, get):
+    def test_deliver_slice_csv_attachment(
+        self, send_email_smtp, mock_open, mock_urlopen
+    ):
         response = Mock()
-        get.return_value = response
-        response.raise_for_status.return_value = None
+        mock_open.return_value = response
+        mock_urlopen.return_value = response
+        mock_urlopen.return_value.getcode.return_value = 200
         response.content = self.CSV
 
         schedule = (
@@ -385,12 +389,14 @@ class SchedulesTestCase(unittest.TestCase):
 
         self.assertEquals(send_email_smtp.call_args[1]["data"][file_name], self.CSV)
 
-    @patch("superset.tasks.schedules.requests.get")
+    @patch("superset.tasks.schedules.urllib.request.urlopen")
+    @patch("superset.tasks.schedules.urllib.request.OpenerDirector.open")
     @patch("superset.tasks.schedules.send_email_smtp")
-    def test_deliver_slice_csv_inline(self, send_email_smtp, get):
+    def test_deliver_slice_csv_inline(self, send_email_smtp, mock_open, mock_urlopen):
         response = Mock()
-        get.return_value = response
-        response.raise_for_status.return_value = None
+        mock_open.return_value = response
+        mock_urlopen.return_value = response
+        mock_urlopen.return_value.getcode.return_value = 200
         response.content = self.CSV
 
         schedule = (
diff --git a/tests/security_tests.py b/tests/security_tests.py
index 36046e8..56c222d 100644
--- a/tests/security_tests.py
+++ b/tests/security_tests.py
@@ -15,6 +15,7 @@
 # specific language governing permissions and limitations
 # under the License.
 import inspect
+import unittest
 
 from superset import app, appbuilder, security_manager
 from .base_tests import SupersetTestCase
@@ -150,6 +151,9 @@ class RolePermissionTests(SupersetTestCase):
             )
         )
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_is_alpha_only(self):
         self.assertFalse(
             security_manager.is_alpha_only(
@@ -205,11 +209,17 @@ class RolePermissionTests(SupersetTestCase):
         self.assert_cannot_gamma(get_perm_tuples("Gamma"))
         self.assert_cannot_alpha(get_perm_tuples("Alpha"))
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_alpha_permissions(self):
         self.assert_can_gamma(get_perm_tuples("Alpha"))
         self.assert_can_alpha(get_perm_tuples("Alpha"))
         self.assert_cannot_alpha(get_perm_tuples("Alpha"))
 
+    @unittest.skipUnless(
+        SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed"
+    )
     def test_admin_permissions(self):
         self.assert_can_gamma(get_perm_tuples("Admin"))
         self.assert_can_alpha(get_perm_tuples("Admin"))


Mime
View raw message