Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 93AF8200D24 for ; Tue, 24 Oct 2017 23:58:22 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id 922A41609C8; Tue, 24 Oct 2017 21:58:22 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 63A7D160BDB for ; Tue, 24 Oct 2017 23:58:21 +0200 (CEST) Received: (qmail 86978 invoked by uid 500); 24 Oct 2017 21:58:20 -0000 Mailing-List: contact commits-help@superset.incubator.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@superset.incubator.apache.org Delivered-To: mailing list commits@superset.incubator.apache.org Received: (qmail 86956 invoked by uid 99); 24 Oct 2017 21:58:20 -0000 Received: from ec2-52-202-80-70.compute-1.amazonaws.com (HELO gitbox.apache.org) (52.202.80.70) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 24 Oct 2017 21:58:20 +0000 Received: by gitbox.apache.org (ASF Mail Server at gitbox.apache.org, from userid 33) id 72AD581B56; Tue, 24 Oct 2017 21:58:17 +0000 (UTC) Date: Tue, 24 Oct 2017 21:58:17 +0000 To: "commits@superset.apache.org" Subject: [incubator-superset] branch master updated: fixes for bugs in #3689 (#3692) MIME-Version: 1.0 Content-Type: text/plain; charset=utf-8 Content-Transfer-Encoding: 8bit Message-ID: <150888229751.19255.8534813764174807126@gitbox.apache.org> From: maximebeauchemin@apache.org X-Git-Host: gitbox.apache.org X-Git-Repo: incubator-superset X-Git-Refname: refs/heads/master X-Git-Reftype: branch X-Git-Oldrev: 1d064956296d77352cc5cbd1142aef48566a38bd X-Git-Newrev: efae14592edaddd4111a859e284aff6a85f76493 X-Git-Rev: efae14592edaddd4111a859e284aff6a85f76493 X-Git-NotificationType: ref_changed_plus_diff X-Git-Multimail-Version: 1.5.dev Auto-Submitted: auto-generated archived-at: Tue, 24 Oct 2017 21:58:22 -0000 This is an automated email from the ASF dual-hosted git repository. maximebeauchemin pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/incubator-superset.git The following commit(s) were added to refs/heads/master by this push: new efae145 fixes for bugs in #3689 (#3692) efae145 is described below commit efae14592edaddd4111a859e284aff6a85f76493 Author: Jeff Niu AuthorDate: Tue Oct 24 14:58:15 2017 -0700 fixes for bugs in #3689 (#3692) --- .../explore/components/controls/Filter.jsx | 2 +- .../javascripts/explore/components/Filter_spec.jsx | 24 ++++ superset/connectors/druid/models.py | 45 +++++--- tests/druid_tests.py | 127 ++++++++++++++++++++- 4 files changed, 179 insertions(+), 19 deletions(-) diff --git a/superset/assets/javascripts/explore/components/controls/Filter.jsx b/superset/assets/javascripts/explore/components/controls/Filter.jsx index 24b9d2f..49a9751 100644 --- a/superset/assets/javascripts/explore/components/controls/Filter.jsx +++ b/superset/assets/javascripts/explore/components/controls/Filter.jsx @@ -109,7 +109,7 @@ export default class Filter extends React.Component { diff --git a/superset/assets/spec/javascripts/explore/components/Filter_spec.jsx b/superset/assets/spec/javascripts/explore/components/Filter_spec.jsx index 9c045cc..854f940 100644 --- a/superset/assets/spec/javascripts/explore/components/Filter_spec.jsx +++ b/superset/assets/spec/javascripts/explore/components/Filter_spec.jsx @@ -88,4 +88,28 @@ describe('Filter', () => { const regexWrapper = shallow(); expect(regexWrapper.find('input')).to.have.lengthOf(1); }); + + it('renders `input` for text filters', () => { + const props = Object.assign({}, defaultProps); + ['>=', '>', '<=', '<'].forEach((op) => { + props.filter = { + col: 'col1', + op, + value: 'val', + }; + wrapper = shallow(); + expect(wrapper.find('input')).to.have.lengthOf(1); + }); + }); + + it('replaces null filter values with empty string in `input`', () => { + const props = Object.assign({}, defaultProps); + props.filter = { + col: 'col1', + op: '>=', + value: null, + }; + wrapper = shallow(); + expect(wrapper.find('input').props().value).to.equal(''); + }); }); diff --git a/superset/connectors/druid/models.py b/superset/connectors/druid/models.py index 376bf7a..28c5e7c 100644 --- a/superset/connectors/druid/models.py +++ b/superset/connectors/druid/models.py @@ -18,7 +18,7 @@ from dateutil.parser import parse as dparse from pydruid.client import PyDruid from pydruid.utils.aggregators import count -from pydruid.utils.filters import Dimension, Filter +from pydruid.utils.filters import Dimension, Filter, Bound from pydruid.utils.postaggregator import ( Postaggregator, Quantile, Quantiles, Field, Const, HyperUniqueCardinality, ) @@ -966,7 +966,7 @@ class DruidDatasource(Model, BaseDatasource): intervals=from_dttm.isoformat() + '/' + to_dttm.isoformat(), ) - filters = self.get_filters(filter) + filters = DruidDatasource.get_filters(filter, self.num_cols) if filters: qry['filter'] = filters @@ -1103,11 +1103,13 @@ class DruidDatasource(Model, BaseDatasource): query=query_str, duration=datetime.now() - qry_start_dttm) - def get_filters(self, raw_filters): # noqa + @staticmethod + def get_filters(raw_filters, num_cols): # noqa filters = None for flt in raw_filters: if not all(f in flt for f in ['col', 'op', 'val']): continue + col = flt['col'] op = flt['op'] eq = flt['val'] @@ -1119,36 +1121,52 @@ class DruidDatasource(Model, BaseDatasource): else types for types in eq] elif not isinstance(flt['val'], string_types): - eq = eq[0] if len(eq) > 0 else '' - if col in self.num_cols: + eq = eq[0] if eq and len(eq) > 0 else '' + + is_numeric_col = col in num_cols + if is_numeric_col: if op in ('in', 'not in'): eq = [utils.string_to_num(v) for v in eq] else: eq = utils.string_to_num(eq) + if op == '==': cond = Dimension(col) == eq elif op == '!=': - cond = ~(Dimension(col) == eq) + cond = Dimension(col) != eq elif op in ('in', 'not in'): fields = [] - if len(eq) > 1: + + # ignore the filter if it has no value + if not len(eq): + continue + elif len(eq) == 1: + cond = Dimension(col) == eq[0] + else: for s in eq: fields.append(Dimension(col) == s) cond = Filter(type="or", fields=fields) - elif len(eq) == 1: - cond = Dimension(col) == eq[0] + if op == 'not in': cond = ~cond + elif op == 'regex': cond = Filter(type="regex", pattern=eq, dimension=col) elif op == '>=': - cond = Dimension(col) >= eq + cond = Bound(col, eq, None, alphaNumeric=is_numeric_col) elif op == '<=': - cond = Dimension(col) <= eq + cond = Bound(col, None, eq, alphaNumeric=is_numeric_col) elif op == '>': - cond = Dimension(col) > eq + cond = Bound( + col, eq, None, + lowerStrict=True, alphaNumeric=is_numeric_col + ) elif op == '<': - cond = Dimension(col) < eq + cond = Bound( + col, None, eq, + upperStrict=True, alphaNumeric=is_numeric_col + ) + if filters: filters = Filter(type="and", fields=[ cond, @@ -1156,6 +1174,7 @@ class DruidDatasource(Model, BaseDatasource): ]) else: filters = cond + return filters def _get_having_obj(self, col, op, eq): diff --git a/tests/druid_tests.py b/tests/druid_tests.py index c506ebf..d274691 100644 --- a/tests/druid_tests.py +++ b/tests/druid_tests.py @@ -11,7 +11,9 @@ import unittest from mock import Mock, patch from superset import db, sm, security -from superset.connectors.druid.models import DruidMetric, DruidCluster, DruidDatasource +from superset.connectors.druid.models import ( + DruidMetric, DruidCluster, DruidDatasource +) from superset.connectors.druid.models import PyDruid, Quantile, Postaggregator from .base_tests import SupersetTestCase @@ -306,11 +308,12 @@ class DruidTests(SupersetTestCase): metadata_last_refreshed=datetime.now()) db.session.add(cluster) - cluster.get_datasources = PickableMock(return_value=['test_datasource']) + cluster.get_datasources = PickableMock( + return_value=['test_datasource'] + ) cluster.get_druid_version = PickableMock(return_value='0.9.1') cluster.refresh_datasources() - datasource_id = cluster.datasources[0].id cluster.datasources[0].merge_flag = True metadata = cluster.datasources[0].latest_metadata() self.assertEqual(len(metadata), 4) @@ -346,12 +349,16 @@ class DruidTests(SupersetTestCase): metric_name='a_histogram', verbose_name='APPROXIMATE_HISTOGRAM(*)', metric_type='approxHistogramFold', - json=json.dumps({'type': 'approxHistogramFold', 'name': 'a_histogram'})), + json=json.dumps( + {'type': 'approxHistogramFold', 'name': 'a_histogram'}) + ), 'aCustomMetric': DruidMetric( metric_name='aCustomMetric', verbose_name='MY_AWESOME_METRIC(*)', metric_type='aCustomType', - json=json.dumps({'type': 'customMetric', 'name': 'aCustomMetric'})), + json=json.dumps( + {'type': 'customMetric', 'name': 'aCustomMetric'}) + ), 'quantile_p95': DruidMetric( metric_name='quantile_p95', verbose_name='P95(*)', @@ -396,6 +403,116 @@ class DruidTests(SupersetTestCase): assert all_metrics == ['aCustomMetric'] assert set(post_aggs.keys()) == result_postaggs + def test_get_filters_ignores_invalid_filter_objects(self): + filtr = {'col': 'col1', 'op': '=='} + filters = [filtr] + self.assertEqual(None, DruidDatasource.get_filters(filters, [])) + + def test_get_filters_constructs_filter_in(self): + filtr = {'col': 'A', 'op': 'in', 'val': ['a', 'b', 'c']} + res = DruidDatasource.get_filters([filtr], []) + self.assertIn('filter', res.filter) + self.assertIn('fields', res.filter['filter']) + self.assertEqual('or', res.filter['filter']['type']) + self.assertEqual(3, len(res.filter['filter']['fields'])) + + def test_get_filters_constructs_filter_not_in(self): + filtr = {'col': 'A', 'op': 'not in', 'val': ['a', 'b', 'c']} + res = DruidDatasource.get_filters([filtr], []) + self.assertIn('filter', res.filter) + self.assertIn('type', res.filter['filter']) + self.assertEqual('not', res.filter['filter']['type']) + self.assertIn('field', res.filter['filter']) + self.assertEqual( + 3, + len(res.filter['filter']['field'].filter['filter']['fields']) + ) + + def test_get_filters_constructs_filter_equals(self): + filtr = {'col': 'A', 'op': '==', 'val': 'h'} + res = DruidDatasource.get_filters([filtr], []) + self.assertEqual('selector', res.filter['filter']['type']) + self.assertEqual('A', res.filter['filter']['dimension']) + self.assertEqual('h', res.filter['filter']['value']) + + def test_get_filters_constructs_filter_not_equals(self): + filtr = {'col': 'A', 'op': '!=', 'val': 'h'} + res = DruidDatasource.get_filters([filtr], []) + self.assertEqual('not', res.filter['filter']['type']) + self.assertEqual( + 'h', + res.filter['filter']['field'].filter['filter']['value'] + ) + + def test_get_filters_constructs_bounds_filter(self): + filtr = {'col': 'A', 'op': '>=', 'val': 'h'} + res = DruidDatasource.get_filters([filtr], []) + self.assertFalse(res.filter['filter']['lowerStrict']) + self.assertEqual('A', res.filter['filter']['dimension']) + self.assertEqual('h', res.filter['filter']['lower']) + self.assertFalse(res.filter['filter']['alphaNumeric']) + filtr['op'] = '>' + res = DruidDatasource.get_filters([filtr], []) + self.assertTrue(res.filter['filter']['lowerStrict']) + filtr['op'] = '<=' + res = DruidDatasource.get_filters([filtr], []) + self.assertFalse(res.filter['filter']['upperStrict']) + self.assertEqual('h', res.filter['filter']['upper']) + filtr['op'] = '<' + res = DruidDatasource.get_filters([filtr], []) + self.assertTrue(res.filter['filter']['upperStrict']) + + def test_get_filters_constructs_regex_filter(self): + filtr = {'col': 'A', 'op': 'regex', 'val': '[abc]'} + res = DruidDatasource.get_filters([filtr], []) + self.assertEqual('regex', res.filter['filter']['type']) + self.assertEqual('[abc]', res.filter['filter']['pattern']) + self.assertEqual('A', res.filter['filter']['dimension']) + + def test_get_filters_composes_multiple_filters(self): + filtr1 = {'col': 'A', 'op': '!=', 'val': 'y'} + filtr2 = {'col': 'B', 'op': 'in', 'val': ['a', 'b', 'c']} + res = DruidDatasource.get_filters([filtr1, filtr2], []) + self.assertEqual('and', res.filter['filter']['type']) + self.assertEqual(2, len(res.filter['filter']['fields'])) + + def test_get_filters_ignores_in_not_in_with_empty_value(self): + filtr1 = {'col': 'A', 'op': 'in', 'val': []} + filtr2 = {'col': 'A', 'op': 'not in', 'val': []} + res = DruidDatasource.get_filters([filtr1, filtr2], []) + self.assertEqual(None, res) + + def test_get_filters_constructs_equals_for_in_not_in_single_value(self): + filtr = {'col': 'A', 'op': 'in', 'val': ['a']} + res = DruidDatasource.get_filters([filtr], []) + self.assertEqual('selector', res.filter['filter']['type']) + + def test_get_filters_handles_arrays_for_string_types(self): + filtr = {'col': 'A', 'op': '==', 'val': ['a', 'b']} + res = DruidDatasource.get_filters([filtr], []) + self.assertEqual('a', res.filter['filter']['value']) + filtr = {'col': 'A', 'op': '==', 'val': []} + res = DruidDatasource.get_filters([filtr], []) + self.assertEqual('', res.filter['filter']['value']) + + def test_get_filters_handles_none_for_string_types(self): + filtr = {'col': 'A', 'op': '==', 'val': None} + res = DruidDatasource.get_filters([filtr], []) + self.assertEqual('', res.filter['filter']['value']) + + def test_get_filters_extracts_values_in_quotes(self): + filtr = {'col': 'A', 'op': 'in', 'val': [" 'a' "]} + res = DruidDatasource.get_filters([filtr], []) + self.assertEqual('a', res.filter['filter']['value']) + + def test_get_filters_converts_strings_to_num(self): + filtr = {'col': 'A', 'op': 'in', 'val': ['6']} + res = DruidDatasource.get_filters([filtr], ['A']) + self.assertEqual(6, res.filter['filter']['value']) + filtr = {'col': 'A', 'op': '==', 'val': '6'} + res = DruidDatasource.get_filters([filtr], ['A']) + self.assertEqual(6, res.filter['filter']['value']) + if __name__ == '__main__': unittest.main() -- To stop receiving notification emails like this one, please contact ['"commits@superset.apache.org" '].