superset-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From h...@apache.org
Subject [incubator-superset] branch black-lint updated: run2
Date Mon, 10 Dec 2018 05:41:41 GMT
This is an automated email from the ASF dual-hosted git repository.

hugh pushed a commit to branch black-lint
in repository https://gitbox.apache.org/repos/asf/incubator-superset.git


The following commit(s) were added to refs/heads/black-lint by this push:
     new c464cbc  run2
c464cbc is described below

commit c464cbcfe831c773a83adf6afaa202801f7bd8aa
Author: hughhhh <hmiles@lyft.com>
AuthorDate: Sun Dec 9 21:41:23 2018 -0800

    run2
---
 superset/__init__.py                               |   12 +-
 superset/cli.py                                    |  291 +++--
 superset/common/query_context.py                   |   13 +-
 superset/common/query_object.py                    |   27 +-
 superset/config.py                                 |   31 +-
 superset/connectors/base/models.py                 |  101 +-
 superset/connectors/base/views.py                  |   11 +-
 superset/connectors/connector_registry.py          |   22 +-
 superset/connectors/druid/models.py                |  590 ++++-----
 superset/connectors/druid/views.py                 |  164 ++-
 superset/connectors/sqla/models.py                 |  374 +++---
 superset/connectors/sqla/views.py                  |  209 ++--
 superset/data/bart_lines.py                        |    3 +-
 superset/data/birth_names.py                       |  162 +--
 superset/data/countries.py                         |  496 ++++----
 superset/data/country_map.py                       |    3 +-
 superset/data/css_templates.py                     |   15 +-
 superset/data/deck.py                              |  111 +-
 superset/data/energy.py                            |   27 +-
 superset/data/flights.py                           |    7 +-
 superset/data/helpers.py                           |    3 +-
 superset/data/long_lat.py                          |    9 +-
 superset/data/misc_dashboard.py                    |   18 +-
 superset/data/multi_line.py                        |   33 +-
 superset/data/multiformat_time_series.py           |    3 +-
 superset/data/paris.py                             |    3 +-
 superset/data/random_time_series.py                |   16 +-
 superset/data/sf_population_polygons.py            |    3 +-
 superset/data/unicode_test_data.py                 |   14 +-
 superset/data/world_bank.py                        |   86 +-
 superset/dataframe.py                              |   41 +-
 superset/db_engine_specs.py                        |  345 +++---
 superset/db_engines/hive.py                        |    9 +-
 superset/forms.py                                  |  111 +-
 superset/jinja_context.py                          |   11 +-
 superset/legacy.py                                 |   11 +-
 superset/migrations/env.py                         |   33 +-
 .../0c5070e96b57_add_user_attributes_table.py      |   27 +-
 ...9ee0e3_fix_wrong_constraint_on_table_columns.py |   27 +-
 .../versions/1296d28ec131_druid_exports.py         |    4 +-
 .../versions/12d55656cbca_is_featured.py           |    1 -
 .../versions/130915240929_is_sqllab_viz_flow.py    |    1 +
 .../versions/18e88e1cc004_making_audit_nullable.py |  109 +-
 .../versions/1a48a5411020_adding_slug_to_dash.py   |    1 +
 superset/migrations/versions/1d9e835a84f9_.py      |    4 +-
 superset/migrations/versions/1e2841a4128_.py       |    1 +
 .../versions/21e88bc06c02_annotation_migration.py  |   35 +-
 .../migrations/versions/2591d77e9831_user_id.py    |   12 +-
 .../versions/27ae655e4247_make_creator_owners.py   |   22 +-
 .../289ce07647b_add_encrypted_password_field.py    |    9 +-
 .../2929af7925ed_tz_offsets_in_data_sources.py     |    1 +
 .../versions/2fcdcb35e487_saved_queries.py         |   10 +-
 superset/migrations/versions/30bb17c0dc76_.py      |    2 +-
 .../versions/315b3f4da9b0_adding_log_model.py      |    7 +-
 .../versions/33d996bcc382_update_slice_model.py    |    4 +-
 .../versions/3b626e2a6783_sync_db_with_models.py   |   53 +-
 ...1c4c6_migrate_num_period_compare_and_period_.py |   20 +-
 .../41f6a59a61f2_database_options_for_sql_lab.py   |    7 +-
 .../4451805bbaa1_remove_double_percents.py         |    4 +-
 .../versions/4500485bde7d_allow_run_sync_async.py  |    1 -
 ...8b9b7_remove_coordinator_from_druid_cluster_.py |    7 +-
 superset/migrations/versions/4736ec66ce19_.py      |   55 +-
 ...08545_migrate_time_range_for_default_filters.py |   26 +-
 superset/migrations/versions/4e6a06bad7a8_init.py  |  346 +++---
 .../versions/4fa88fe24e94_owners_many_to_many.py   |   14 +-
 .../5e4a03ef0bf0_add_request_access_model.py       |    6 +-
 .../732f1c06bcbf_add_fetch_values_predicate.py     |   11 +-
 .../versions/763d4b211ec9_fixing_audit_fk.py       |  256 ++--
 .../versions/7dbf98566af7_slice_description.py     |    2 +
 .../versions/7e3ddad2a00b_results_key_to_query.py  |    4 +-
 superset/migrations/versions/7fcdcde0761c_.py      |   14 +-
 .../versions/836c0bf75904_cache_timeouts.py        |    4 +-
 superset/migrations/versions/8e80a26a31db_.py      |   21 +-
 .../versions/956a063c52b3_adjusting_key_length.py  |  160 ++-
 superset/migrations/versions/960c69cb1f5b_.py      |   10 +-
 .../versions/a2d606a761d9_adding_favstar_model.py  |    7 +-
 .../versions/a61b40f9f57f_remove_allow_run_sync.py |    3 +-
 ...a65458420354_add_result_backend_time_logging.py |    6 +-
 .../a6c18f869a4e_query_start_running_time.py       |    4 +-
 ...7c195a_rewriting_url_from_shortner_with_new_.py |   15 +-
 ...d66c4246e_add_cache_timeout_to_druid_cluster.py |    3 +-
 .../versions/ad4d656d92bc_add_avg_metric.py        |    9 +-
 .../versions/ad82a75afd82_add_query_model.py       |   23 +-
 .../versions/afb7730f6a9c_remove_empty_filters.py  |    3 +-
 ...dfe5fb6c_adding_verbose_name_to_druid_column.py |    4 +-
 .../b4456560d4f3_change_table_unique_constraint.py |    7 +-
 .../versions/b46fa1b0b39e_add_params_to_tables.py  |    4 +-
 .../versions/bcf3126872fc_add_keyvalue.py          |    9 +-
 .../versions/bddc498dd179_adhoc_filters.py         |    4 +-
 .../bebcf3fed1fe_convert_dashboard_v1_positions.py |  226 ++--
 .../bf706ae5eb46_cal_heatmap_metric_to_metrics.py  |    1 +
 .../c3a8f8611885_materializing_permission.py       |    1 +
 .../versions/d827694c7555_css_templates.py         |   23 +-
 ...d8bc074f7aad_add_new_field_is_restricted_to_.py |   12 +-
 .../db0c65b146bd_update_slice_model_json.py        |    1 +
 .../versions/db527d8c4c78_add_db_verbose_name.py   |    9 +-
 .../versions/ddd6ebdd853b_annotations.py           |   19 +-
 .../versions/e46f2d27a08e_materialize_perms.py     |    5 +-
 .../e502db2af7be_add_template_params_to_tables.py  |    3 +-
 .../versions/e866bd2d4976_smaller_grid.py          |    8 +-
 .../eca4694defa7_sqllab_setting_defaults.py        |    2 +-
 ...bf6129e13_adding_verbose_name_to_tablecolumn.py |    4 +-
 .../versions/f162a1dea4c4_d3format_by_metric.py    |    8 +-
 .../f18570e03440_add_query_result_key_index.py     |    4 +-
 superset/migrations/versions/f1f2d4af5b90_.py      |   11 +-
 superset/migrations/versions/f231d82b9b26_.py      |   19 +-
 superset/models/annotations.py                     |   12 +-
 superset/models/core.py                            |  349 +++---
 superset/models/helpers.py                         |  153 ++-
 superset/models/sql_lab.py                         |   37 +-
 superset/models/user_attributes.py                 |    4 +-
 superset/security.py                               |  210 ++--
 superset/sql_lab.py                                |   97 +-
 superset/sql_parse.py                              |   15 +-
 superset/stats_logger.py                           |   27 +-
 superset/utils/cache.py                            |   11 +-
 superset/utils/core.py                             |  232 ++--
 superset/utils/dashboard_import_export.py          |    3 +-
 superset/utils/dict_import_export.py               |   47 +-
 superset/utils/import_datasource.py                |   30 +-
 superset/views/__init__.py                         |    6 +-
 superset/views/annotations.py                      |   22 +-
 superset/views/base.py                             |   82 +-
 superset/views/core.py                             | 1293 +++++++++++---------
 superset/views/datasource.py                       |   11 +-
 superset/views/sql_lab.py                          |   33 +-
 superset/views/utils.py                            |   13 +-
 superset/viz.py                                    |  692 +++++------
 128 files changed, 4685 insertions(+), 3867 deletions(-)

diff --git a/superset/__init__.py b/superset/__init__.py
index 0a267e8..2a8bb3f 100644
--- a/superset/__init__.py
+++ b/superset/__init__.py
@@ -17,7 +17,10 @@ from superset import config
 from superset.connectors.connector_registry import ConnectorRegistry
 from superset.security import SupersetSecurityManager
 from superset.utils.core import (
-    get_update_perms_flag, pessimistic_connection_handling, setup_cache)
+    get_update_perms_flag,
+    pessimistic_connection_handling,
+    setup_cache,
+)
 
 APP_DIR = os.path.dirname(__file__)
 CONFIG_MODULE = os.environ.get('SUPERSET_CONFIG', 'superset.config')
@@ -131,11 +134,13 @@ if app.config.get('ENABLE_TIME_ROTATE'):
         app.config.get('FILENAME'),
         when=app.config.get('ROLLOVER'),
         interval=app.config.get('INTERVAL'),
-        backupCount=app.config.get('BACKUP_COUNT'))
+        backupCount=app.config.get('BACKUP_COUNT'),
+    )
     logging.getLogger().addHandler(handler)
 
 if app.config.get('ENABLE_CORS'):
     from flask_cors import CORS
+
     CORS(app, **app.config.get('CORS_OPTIONS'))
 
 if app.config.get('ENABLE_PROXY_FIX'):
@@ -177,7 +182,8 @@ if not issubclass(custom_sm, SupersetSecurityManager):
     raise Exception(
         """Your CUSTOM_SECURITY_MANAGER must now extend SupersetSecurityManager,
          not FAB's security manager.
-         See [4565] in UPDATING.md""")
+         See [4565] in UPDATING.md"""
+    )
 
 appbuilder = AppBuilder(
     app,
diff --git a/superset/cli.py b/superset/cli.py
index c6444b1..837bad5 100755
--- a/superset/cli.py
+++ b/superset/cli.py
@@ -11,11 +11,8 @@ from pathlib2 import Path
 import werkzeug.serving
 import yaml
 
-from superset import (
-    app, data, db, security_manager,
-)
-from superset.utils import (
-    core as utils, dashboard_import_export, dict_import_export)
+from superset import app, data, db, security_manager
+from superset.utils import core as utils, dashboard_import_export, dict_import_export
 
 config = app.config
 celery_app = utils.get_celery_app(config)
@@ -45,10 +42,7 @@ def debug_run(app, port, use_reloader):
         fg='red',
     )
     click.secho('[example]', fg='yellow')
-    click.secho(
-        'flask run -p 8080 --with-threads --reload --debugger',
-        fg='green',
-    )
+    click.secho('flask run -p 8080 --with-threads --reload --debugger', fg='green')
 
 
 def console_log_run(app, port, use_reloader):
@@ -60,13 +54,13 @@ def console_log_run(app, port, use_reloader):
 
     def run():
         server = pywsgi.WSGIServer(
-            ('0.0.0.0', int(port)),
-            app,
-            handler_class=WebSocketHandler)
+            ('0.0.0.0', int(port)), app, handler_class=WebSocketHandler
+        )
         server.serve_forever()
 
     if use_reloader:
         from gevent import monkey
+
         monkey.patch_all()
         run = werkzeug.serving.run_with_reloader(run)
 
@@ -75,33 +69,66 @@ def console_log_run(app, port, use_reloader):
 
 @app.cli.command()
 @click.option('--debug', '-d', is_flag=True, help='Start the web server in debug mode')
-@click.option('--console-log', is_flag=True,
-              help='Create logger that logs to the browser console (implies -d)')
-@click.option('--no-reload', '-n', 'use_reloader', flag_value=False,
-              default=config.get('FLASK_USE_RELOAD'),
-              help='Don\'t use the reloader in debug mode')
-@click.option('--address', '-a', default=config.get('SUPERSET_WEBSERVER_ADDRESS'),
-              help='Specify the address to which to bind the web server')
-@click.option('--port', '-p', default=config.get('SUPERSET_WEBSERVER_PORT'),
-              help='Specify the port on which to run the web server')
-@click.option('--workers', '-w', default=config.get('SUPERSET_WORKERS', 2),
-              help='Number of gunicorn web server workers to fire up [DEPRECATED]')
-@click.option('--timeout', '-t', default=config.get('SUPERSET_WEBSERVER_TIMEOUT'),
-              help='Specify the timeout (seconds) for the '
-                   'gunicorn web server [DEPRECATED]')
-@click.option('--socket', '-s', default=config.get('SUPERSET_WEBSERVER_SOCKET'),
-              help='Path to a UNIX socket as an alternative to address:port, e.g. '
-                   '/var/run/superset.sock. '
-                   'Will override the address and port values. [DEPRECATED]')
-def runserver(debug, console_log, use_reloader, address, port, timeout, workers, socket):
+@click.option(
+    '--console-log',
+    is_flag=True,
+    help='Create logger that logs to the browser console (implies -d)',
+)
+@click.option(
+    '--no-reload',
+    '-n',
+    'use_reloader',
+    flag_value=False,
+    default=config.get('FLASK_USE_RELOAD'),
+    help='Don\'t use the reloader in debug mode',
+)
+@click.option(
+    '--address',
+    '-a',
+    default=config.get('SUPERSET_WEBSERVER_ADDRESS'),
+    help='Specify the address to which to bind the web server',
+)
+@click.option(
+    '--port',
+    '-p',
+    default=config.get('SUPERSET_WEBSERVER_PORT'),
+    help='Specify the port on which to run the web server',
+)
+@click.option(
+    '--workers',
+    '-w',
+    default=config.get('SUPERSET_WORKERS', 2),
+    help='Number of gunicorn web server workers to fire up [DEPRECATED]',
+)
+@click.option(
+    '--timeout',
+    '-t',
+    default=config.get('SUPERSET_WEBSERVER_TIMEOUT'),
+    help='Specify the timeout (seconds) for the ' 'gunicorn web server [DEPRECATED]',
+)
+@click.option(
+    '--socket',
+    '-s',
+    default=config.get('SUPERSET_WEBSERVER_SOCKET'),
+    help='Path to a UNIX socket as an alternative to address:port, e.g. '
+    '/var/run/superset.sock. '
+    'Will override the address and port values. [DEPRECATED]',
+)
+def runserver(
+    debug, console_log, use_reloader, address, port, timeout, workers, socket
+):
     """Starts a Superset web server."""
     debug = debug or config.get('DEBUG') or console_log
     if debug:
         print(Fore.BLUE + '-=' * 20)
         print(
-            Fore.YELLOW + 'Starting Superset server in ' +
-            Fore.RED + 'DEBUG' +
-            Fore.YELLOW + ' mode')
+            Fore.YELLOW
+            + 'Starting Superset server in '
+            + Fore.RED
+            + 'DEBUG'
+            + Fore.YELLOW
+            + ' mode'
+        )
         print(Fore.BLUE + '-=' * 20)
         print(Style.RESET_ALL)
         if console_log:
@@ -111,8 +138,9 @@ def runserver(debug, console_log, use_reloader, address, port, timeout, workers,
     else:
         logging.info(
             "The Gunicorn 'superset runserver' command is deprecated. Please "
-            "use the 'gunicorn' command instead.")
-        addr_str = ' unix:{socket} ' if socket else' {address}:{port} '
+            "use the 'gunicorn' command instead."
+        )
+        addr_str = ' unix:{socket} ' if socket else ' {address}:{port} '
         cmd = (
             'gunicorn '
             f'-w {workers} '
@@ -133,8 +161,12 @@ def runserver(debug, console_log, use_reloader, address, port, timeout, workers,
 def version(verbose):
     """Prints the current version number"""
     print(Fore.BLUE + '-=' * 15)
-    print(Fore.YELLOW + 'Superset ' + Fore.CYAN + '{version}'.format(
-        version=config.get('VERSION_STRING')))
+    print(
+        Fore.YELLOW
+        + 'Superset '
+        + Fore.CYAN
+        + '{version}'.format(version=config.get('VERSION_STRING'))
+    )
     print(Fore.BLUE + '-=' * 15)
     if verbose:
         print('[DB] : ' + '{}'.format(db.engine))
@@ -201,39 +233,44 @@ def load_examples(load_test_data):
 
 
 @app.cli.command()
-@click.option('--datasource', '-d', help='Specify which datasource name to load, if '
-                                         'omitted, all datasources will be refreshed')
-@click.option('--merge', '-m', is_flag=True, default=False,
-              help='Specify using \'merge\' property during operation. '
-                   'Default value is False.')
+@click.option(
+    '--datasource',
+    '-d',
+    help='Specify which datasource name to load, if '
+    'omitted, all datasources will be refreshed',
+)
+@click.option(
+    '--merge',
+    '-m',
+    is_flag=True,
+    default=False,
+    help='Specify using \'merge\' property during operation. '
+    'Default value is False.',
+)
 def refresh_druid(datasource, merge):
     """Refresh druid datasources"""
     session = db.session()
     from superset.connectors.druid.models import DruidCluster
+
     for cluster in session.query(DruidCluster).all():
         try:
-            cluster.refresh_datasources(datasource_name=datasource,
-                                        merge_flag=merge)
+            cluster.refresh_datasources(datasource_name=datasource, merge_flag=merge)
         except Exception as e:
-            print(
-                "Error while processing cluster '{}'\n{}".format(
-                    cluster, str(e)))
+            print("Error while processing cluster '{}'\n{}".format(cluster, str(e)))
             logging.exception(e)
         cluster.metadata_last_refreshed = datetime.now()
-        print(
-            'Refreshed metadata from cluster '
-            '[' + cluster.cluster_name + ']')
+        print('Refreshed metadata from cluster ' '[' + cluster.cluster_name + ']')
     session.commit()
 
 
 @app.cli.command()
 @click.option(
-    '--path', '-p',
+    '--path',
+    '-p',
     help='Path to a single JSON file or path containing multiple JSON files'
-         'files to import (*.json)')
-@click.option(
-    '--recursive', '-r',
-    help='recursively search the path for json files')
+    'files to import (*.json)',
+)
+@click.option('--recursive', '-r', help='recursively search the path for json files')
 def import_dashboards(path, recursive=False):
     """Import dashboards from JSON"""
     p = Path(path)
@@ -248,8 +285,7 @@ def import_dashboards(path, recursive=False):
         logging.info('Importing dashboard from file %s', f)
         try:
             with f.open() as data_stream:
-                dashboard_import_export.import_dashboards(
-                    db.session, data_stream)
+                dashboard_import_export.import_dashboards(db.session, data_stream)
         except Exception as e:
             logging.error('Error when importing dashboard from file %s', f)
             logging.error(e)
@@ -257,11 +293,9 @@ def import_dashboards(path, recursive=False):
 
 @app.cli.command()
 @click.option(
-    '--dashboard-file', '-f', default=None,
-    help='Specify the the file to export to')
-@click.option(
-    '--print_stdout', '-p',
-    help='Print JSON to stdout')
+    '--dashboard-file', '-f', default=None, help='Specify the the file to export to'
+)
+@click.option('--print_stdout', '-p', help='Print JSON to stdout')
 def export_dashboards(print_stdout, dashboard_file):
     """Export dashboards to JSON"""
     data = dashboard_import_export.export_dashboards(db.session)
@@ -275,17 +309,21 @@ def export_dashboards(print_stdout, dashboard_file):
 
 @app.cli.command()
 @click.option(
-    '--path', '-p',
+    '--path',
+    '-p',
     help='Path to a single YAML file or path containing multiple YAML '
-         'files to import (*.yaml or *.yml)')
+    'files to import (*.yaml or *.yml)',
+)
 @click.option(
-    '--sync', '-s', 'sync', default='',
+    '--sync',
+    '-s',
+    'sync',
+    default='',
     help='comma seperated list of element types to synchronize '
-         'e.g. "metrics,columns" deletes metrics and columns in the DB '
-         'that are not specified in the YAML file')
-@click.option(
-    '--recursive', '-r',
-    help='recursively search the path for yaml files')
+    'e.g. "metrics,columns" deletes metrics and columns in the DB '
+    'that are not specified in the YAML file',
+)
+@click.option('--recursive', '-r', help='recursively search the path for yaml files')
 def import_datasources(path, sync, recursive=False):
     """Import datasources from YAML"""
     sync_array = sync.split(',')
@@ -304,9 +342,8 @@ def import_datasources(path, sync, recursive=False):
         try:
             with f.open() as data_stream:
                 dict_import_export.import_from_dict(
-                    db.session,
-                    yaml.safe_load(data_stream),
-                    sync=sync_array)
+                    db.session, yaml.safe_load(data_stream), sync=sync_array
+                )
         except Exception as e:
             logging.error('Error when importing datasources from file %s', f)
             logging.error(e)
@@ -314,25 +351,21 @@ def import_datasources(path, sync, recursive=False):
 
 @app.cli.command()
 @click.option(
-    '--datasource-file', '-f', default=None,
-    help='Specify the the file to export to')
-@click.option(
-    '--print_stdout', '-p',
-    help='Print YAML to stdout')
-@click.option(
-    '--back-references', '-b',
-    help='Include parent back references')
-@click.option(
-    '--include-defaults', '-d',
-    help='Include fields containing defaults')
-def export_datasources(print_stdout, datasource_file,
-                       back_references, include_defaults):
+    '--datasource-file', '-f', default=None, help='Specify the the file to export to'
+)
+@click.option('--print_stdout', '-p', help='Print YAML to stdout')
+@click.option('--back-references', '-b', help='Include parent back references')
+@click.option('--include-defaults', '-d', help='Include fields containing defaults')
+def export_datasources(
+    print_stdout, datasource_file, back_references, include_defaults
+):
     """Export datasources to YAML"""
     data = dict_import_export.export_to_dict(
         session=db.session,
         recursive=True,
         back_references=back_references,
-        include_defaults=include_defaults)
+        include_defaults=include_defaults,
+    )
     if print_stdout or not datasource_file:
         yaml.safe_dump(data, stdout, default_flow_style=False)
     if datasource_file:
@@ -342,13 +375,10 @@ def export_datasources(print_stdout, datasource_file,
 
 
 @app.cli.command()
-@click.option(
-    '--back-references', '-b',
-    help='Include parent back references')
+@click.option('--back-references', '-b', help='Include parent back references')
 def export_datasource_schema(back_references):
     """Export datasource YAML schema to stdout"""
-    data = dict_import_export.export_schema_to_dict(
-        back_references=back_references)
+    data = dict_import_export.export_schema_to_dict(back_references=back_references)
     yaml.safe_dump(data, stdout, default_flow_style=False)
 
 
@@ -356,33 +386,37 @@ def export_datasource_schema(back_references):
 def update_datasources_cache():
     """Refresh sqllab datasources cache"""
     from superset.models.core import Database
+
     for database in db.session.query(Database).all():
         if database.allow_multi_schema_metadata_fetch:
             print('Fetching {} datasources ...'.format(database.name))
             try:
                 database.all_table_names_in_database(
-                    force=True, cache=True, cache_timeout=24 * 60 * 60)
+                    force=True, cache=True, cache_timeout=24 * 60 * 60
+                )
                 database.all_view_names_in_database(
-                    force=True, cache=True, cache_timeout=24 * 60 * 60)
+                    force=True, cache=True, cache_timeout=24 * 60 * 60
+                )
             except Exception as e:
                 print('{}'.format(str(e)))
 
 
 @app.cli.command()
 @click.option(
-    '--workers', '-w',
-    type=int,
-    help='Number of celery server workers to fire up')
+    '--workers', '-w', type=int, help='Number of celery server workers to fire up'
+)
 def worker(workers):
     """Starts a Superset worker for async SQL query execution."""
     logging.info(
         "The 'superset worker' command is deprecated. Please use the 'celery "
-        "worker' command instead.")
+        "worker' command instead."
+    )
     if workers:
         celery_app.conf.update(CELERYD_CONCURRENCY=workers)
     elif config.get('SUPERSET_CELERY_WORKERS'):
         celery_app.conf.update(
-            CELERYD_CONCURRENCY=config.get('SUPERSET_CELERY_WORKERS'))
+            CELERYD_CONCURRENCY=config.get('SUPERSET_CELERY_WORKERS')
+        )
 
     worker = celery_app.Worker(optimization='fair')
     worker.start()
@@ -390,13 +424,11 @@ def worker(workers):
 
 @app.cli.command()
 @click.option(
-    '-p', '--port',
-    default='5555',
-    help='Port on which to start the Flower process')
+    '-p', '--port', default='5555', help='Port on which to start the Flower process'
+)
 @click.option(
-    '-a', '--address',
-    default='localhost',
-    help='Address on which to run the service')
+    '-a', '--address', default='localhost', help='Address on which to run the service'
+)
 def flower(port, address):
     """Runs a Celery Flower web server
 
@@ -411,7 +443,8 @@ def flower(port, address):
     )
     logging.info(
         "The 'superset flower' command is deprecated. Please use the 'celery "
-        "flower' command instead.")
+        "flower' command instead."
+    )
     print(Fore.GREEN + 'Starting a Celery Flower instance')
     print(Fore.BLUE + '-=' * 40)
     print(Fore.YELLOW + cmd)
@@ -445,7 +478,8 @@ def load_test_users_run():
         db_perm = utils.get_main_database(security_manager.get_session).perm
         security_manager.merge_perm('database_access', db_perm)
         db_pvm = security_manager.find_permission_view_menu(
-            view_menu_name=db_perm, permission_name='database_access')
+            view_menu_name=db_perm, permission_name='database_access'
+        )
         gamma_sqllab_role.permissions.append(db_pvm)
         for perm in security_manager.find_role('sql_lab').permissions:
             security_manager.add_permission_role(gamma_sqllab_role, perm)
@@ -453,34 +487,55 @@ def load_test_users_run():
         admin = security_manager.find_user('admin')
         if not admin:
             security_manager.add_user(
-                'admin', 'admin', ' user', 'admin@fab.org',
+                'admin',
+                'admin',
+                ' user',
+                'admin@fab.org',
                 security_manager.find_role('Admin'),
-                password='general')
+                password='general',
+            )
 
         gamma = security_manager.find_user('gamma')
         if not gamma:
             security_manager.add_user(
-                'gamma', 'gamma', 'user', 'gamma@fab.org',
+                'gamma',
+                'gamma',
+                'user',
+                'gamma@fab.org',
                 security_manager.find_role('Gamma'),
-                password='general')
+                password='general',
+            )
 
         gamma2 = security_manager.find_user('gamma2')
         if not gamma2:
             security_manager.add_user(
-                'gamma2', 'gamma2', 'user', 'gamma2@fab.org',
+                'gamma2',
+                'gamma2',
+                'user',
+                'gamma2@fab.org',
                 security_manager.find_role('Gamma'),
-                password='general')
+                password='general',
+            )
 
         gamma_sqllab_user = security_manager.find_user('gamma_sqllab')
         if not gamma_sqllab_user:
             security_manager.add_user(
-                'gamma_sqllab', 'gamma_sqllab', 'user', 'gamma_sqllab@fab.org',
-                gamma_sqllab_role, password='general')
+                'gamma_sqllab',
+                'gamma_sqllab',
+                'user',
+                'gamma_sqllab@fab.org',
+                gamma_sqllab_role,
+                password='general',
+            )
 
         alpha = security_manager.find_user('alpha')
         if not alpha:
             security_manager.add_user(
-                'alpha', 'alpha', 'user', 'alpha@fab.org',
+                'alpha',
+                'alpha',
+                'user',
+                'alpha@fab.org',
                 security_manager.find_role('Alpha'),
-                password='general')
+                password='general',
+            )
         security_manager.get_session.commit()
diff --git a/superset/common/query_context.py b/superset/common/query_context.py
index 21b0dac..b35c945 100644
--- a/superset/common/query_context.py
+++ b/superset/common/query_context.py
@@ -11,16 +11,13 @@ class QueryContext:
     The query context contains the query object and additional fields necessary
     to retrieve the data payload for a given viz.
     """
+
     # TODO: Type datasource and query_object dictionary with TypedDict when it becomes
     # a vanilla python type https://github.com/python/mypy/issues/5288
-    def __init__(
-            self,
-            datasource: Dict,
-            queries: List[Dict],
-    ):
-        self.datasource = ConnectorRegistry.get_datasource(datasource.get('type'),
-                                                           int(datasource.get('id')),
-                                                           db.session)
+    def __init__(self, datasource: Dict, queries: List[Dict]):
+        self.datasource = ConnectorRegistry.get_datasource(
+            datasource.get('type'), int(datasource.get('id')), db.session
+        )
         self.queries = list(map(lambda query_obj: QueryObject(**query_obj), queries))
 
     def get_data(self):
diff --git a/superset/common/query_object.py b/superset/common/query_object.py
index 8116d26..933a67f 100644
--- a/superset/common/query_object.py
+++ b/superset/common/query_object.py
@@ -14,20 +14,21 @@ class QueryObject:
     The query object's schema matches the interfaces of DB connectors like sqla
     and druid. The query objects are constructed on the client.
     """
+
     def __init__(
-            self,
-            granularity: str,
-            groupby: List[str] = None,
-            metrics: List[Metric] = None,
-            filters: List[str] = None,
-            time_range: Optional[str] = None,
-            time_shift: Optional[str] = None,
-            is_timeseries: bool = False,
-            row_limit: int = app.config.get('ROW_LIMIT'),
-            limit: int = 0,
-            timeseries_limit_metric: Optional[Metric] = None,
-            order_desc: bool = True,
-            extras: Optional[Dict] = None,
+        self,
+        granularity: str,
+        groupby: List[str] = None,
+        metrics: List[Metric] = None,
+        filters: List[str] = None,
+        time_range: Optional[str] = None,
+        time_shift: Optional[str] = None,
+        is_timeseries: bool = False,
+        row_limit: int = app.config.get('ROW_LIMIT'),
+        limit: int = 0,
+        timeseries_limit_metric: Optional[Metric] = None,
+        order_desc: bool = True,
+        extras: Optional[Dict] = None,
     ):
         self.granularity = granularity
         self.from_dttm, self.to_dttm = utils.get_since_until(time_range, time_shift)
diff --git a/superset/config.py b/superset/config.py
index 1613e75..392663e 100644
--- a/superset/config.py
+++ b/superset/config.py
@@ -207,9 +207,7 @@ ALLOWED_EXTENSIONS = set(['csv'])
 
 # CSV Options: key/value pairs that will be passed as argument to DataFrame.to_csv method
 # note: index option should not be overridden
-CSV_EXPORT = {
-    'encoding': 'utf-8',
-}
+CSV_EXPORT = {'encoding': 'utf-8'}
 
 # ---------------------------------------------------
 # Time grain configurations
@@ -252,10 +250,12 @@ DRUID_DATA_SOURCE_BLACKLIST = []
 # --------------------------------------------------
 # Modules, datasources and middleware to be registered
 # --------------------------------------------------
-DEFAULT_MODULE_DS_MAP = OrderedDict([
-    ('superset.connectors.sqla.models', ['SqlaTable']),
-    ('superset.connectors.druid.models', ['DruidDatasource']),
-])
+DEFAULT_MODULE_DS_MAP = OrderedDict(
+    [
+        ('superset.connectors.sqla.models', ['SqlaTable']),
+        ('superset.connectors.druid.models', ['DruidDatasource']),
+    ]
+)
 ADDITIONAL_MODULE_DS_MAP = {}
 ADDITIONAL_MIDDLEWARE = []
 
@@ -467,12 +467,15 @@ try:
     if CONFIG_PATH_ENV_VAR in os.environ:
         # Explicitly import config module that is not in pythonpath; useful
         # for case where app is being executed via pex.
-        print('Loaded your LOCAL configuration at [{}]'.format(
-            os.environ[CONFIG_PATH_ENV_VAR]))
+        print(
+            'Loaded your LOCAL configuration at [{}]'.format(
+                os.environ[CONFIG_PATH_ENV_VAR]
+            )
+        )
         module = sys.modules[__name__]
         override_conf = imp.load_source(
-            'superset_config',
-            os.environ[CONFIG_PATH_ENV_VAR])
+            'superset_config', os.environ[CONFIG_PATH_ENV_VAR]
+        )
         for key in dir(override_conf):
             if key.isupper():
                 setattr(module, key, getattr(override_conf, key))
@@ -480,7 +483,9 @@ try:
     else:
         from superset_config import *  # noqa
         import superset_config
-        print('Loaded your LOCAL configuration at [{}]'.format(
-            superset_config.__file__))
+
+        print(
+            'Loaded your LOCAL configuration at [{}]'.format(superset_config.__file__)
+        )
 except ImportError:
     pass
diff --git a/superset/connectors/base/models.py b/superset/connectors/base/models.py
index 216ed9e..3260cfe 100644
--- a/superset/connectors/base/models.py
+++ b/superset/connectors/base/models.py
@@ -2,9 +2,7 @@
 import json
 
 from past.builtins import basestring
-from sqlalchemy import (
-    and_, Boolean, Column, Integer, String, Text,
-)
+from sqlalchemy import and_, Boolean, Column, Integer, String, Text
 from sqlalchemy.ext.declarative import declared_attr
 from sqlalchemy.orm import foreign, relationship
 
@@ -119,11 +117,7 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
 
     @property
     def column_formats(self):
-        return {
-            m.metric_name: m.d3format
-            for m in self.metrics
-            if m.d3format
-        }
+        return {m.metric_name: m.d3format for m in self.metrics if m.d3format}
 
     def add_missing_metrics(self, metrics):
         exisiting_metrics = {m.metric_name for m in self.metrics}
@@ -137,8 +131,10 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
         return sorted(
             [
                 (m.metric_name, m.verbose_name or m.metric_name or '')
-                for m in self.metrics],
-            key=lambda x: x[1])
+                for m in self.metrics
+            ],
+            key=lambda x: x[1],
+        )
 
     @property
     def short_data(self):
@@ -167,14 +163,12 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
             order_by_choices.append((json.dumps([s, False]), s + ' [desc]'))
 
         verbose_map = {'__timestamp': 'Time'}
-        verbose_map.update({
-            o.metric_name: o.verbose_name or o.metric_name
-            for o in self.metrics
-        })
-        verbose_map.update({
-            o.column_name: o.verbose_name or o.column_name
-            for o in self.columns
-        })
+        verbose_map.update(
+            {o.metric_name: o.verbose_name or o.metric_name for o in self.metrics}
+        )
+        verbose_map.update(
+            {o.column_name: o.verbose_name or o.column_name for o in self.columns}
+        )
         return {
             # simple fields
             'id': self.id,
@@ -192,10 +186,8 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
             'cache_timeout': self.cache_timeout,
             'params': self.params,
             'perm': self.perm,
-
             # sqla-specific
             'sql': self.sql,
-
             # computed fields
             'all_cols': utils.choicify(self.column_names),
             'columns': [o.data for o in self.columns],
@@ -212,7 +204,8 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
 
     @staticmethod
     def filter_values_handler(
-            values, target_column_is_numeric=False, is_list_target=False):
+        values, target_column_is_numeric=False, is_list_target=False
+    ):
         def handle_single_value(v):
             # backward compatibility with previous <select> components
             if isinstance(v, basestring):
@@ -226,6 +219,7 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
                 elif v == '<empty string>':
                     return ''
             return v
+
         if isinstance(values, (list, tuple)):
             values = [handle_single_value(v) for v in values]
         else:
@@ -274,8 +268,7 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
             if col.column_name == column_name:
                 return col
 
-    def get_fk_many_from_list(
-            self, object_list, fkmany, fkmany_class, key_attr):
+    def get_fk_many_from_list(self, object_list, fkmany, fkmany_class, key_attr):
         """Update ORM one-to-many list from object list
 
         Used for syncing metrics and columns using the same code"""
@@ -301,10 +294,7 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
                 del obj['id']
                 orm_kwargs = {}
                 for k in obj:
-                    if (
-                        k in fkmany_class.update_from_object_fields and
-                        k in obj
-                    ):
+                    if k in fkmany_class.update_from_object_fields and k in obj:
                         orm_kwargs[k] = obj[k]
                 new_obj = fkmany_class(**orm_kwargs)
                 new_fks.append(new_obj)
@@ -329,12 +319,14 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
 
         # Syncing metrics
         metrics = self.get_fk_many_from_list(
-            obj.get('metrics'), self.metrics, self.metric_class, 'metric_name')
+            obj.get('metrics'), self.metrics, self.metric_class, 'metric_name'
+        )
         self.metrics = metrics
 
         # Syncing columns
         self.columns = self.get_fk_many_from_list(
-            obj.get('columns'), self.columns, self.column_class, 'column_name')
+            obj.get('columns'), self.columns, self.column_class, 'column_name'
+        )
 
 
 class BaseColumn(AuditMixinNullable, ImportMixin):
@@ -364,32 +356,30 @@ class BaseColumn(AuditMixinNullable, ImportMixin):
         return self.column_name
 
     num_types = (
-        'DOUBLE', 'FLOAT', 'INT', 'BIGINT',
-        'LONG', 'REAL', 'NUMERIC', 'DECIMAL', 'MONEY',
+        'DOUBLE',
+        'FLOAT',
+        'INT',
+        'BIGINT',
+        'LONG',
+        'REAL',
+        'NUMERIC',
+        'DECIMAL',
+        'MONEY',
     )
     date_types = ('DATE', 'TIME', 'DATETIME')
     str_types = ('VARCHAR', 'STRING', 'CHAR')
 
     @property
     def is_num(self):
-        return (
-            self.type and
-            any([t in self.type.upper() for t in self.num_types])
-        )
+        return self.type and any([t in self.type.upper() for t in self.num_types])
 
     @property
     def is_time(self):
-        return (
-            self.type and
-            any([t in self.type.upper() for t in self.date_types])
-        )
+        return self.type and any([t in self.type.upper() for t in self.date_types])
 
     @property
     def is_string(self):
-        return (
-            self.type and
-            any([t in self.type.upper() for t in self.str_types])
-        )
+        return self.type and any([t in self.type.upper() for t in self.str_types])
 
     @property
     def expression(self):
@@ -398,9 +388,17 @@ class BaseColumn(AuditMixinNullable, ImportMixin):
     @property
     def data(self):
         attrs = (
-            'id', 'column_name', 'verbose_name', 'description', 'expression',
-            'filterable', 'groupby', 'is_dttm', 'type',
-            'database_expression', 'python_date_format',
+            'id',
+            'column_name',
+            'verbose_name',
+            'description',
+            'expression',
+            'filterable',
+            'groupby',
+            'is_dttm',
+            'type',
+            'database_expression',
+            'python_date_format',
         )
         return {s: getattr(self, s) for s in attrs if hasattr(self, s)}
 
@@ -433,6 +431,7 @@ class BaseMetric(AuditMixinNullable, ImportMixin):
         backref=backref('metrics', cascade='all, delete-orphan'),
         enable_typechecks=False)
     """
+
     @property
     def perm(self):
         raise NotImplementedError()
@@ -444,6 +443,12 @@ class BaseMetric(AuditMixinNullable, ImportMixin):
     @property
     def data(self):
         attrs = (
-            'id', 'metric_name', 'verbose_name', 'description', 'expression',
-            'warning_text', 'd3format')
+            'id',
+            'metric_name',
+            'verbose_name',
+            'description',
+            'expression',
+            'warning_text',
+            'd3format',
+        )
         return {s: getattr(self, s) for s in attrs}
diff --git a/superset/connectors/base/views.py b/superset/connectors/base/views.py
index a77177e..9eccd8d 100644
--- a/superset/connectors/base/views.py
+++ b/superset/connectors/base/views.py
@@ -8,7 +8,10 @@ from superset.views.base import SupersetModelView
 class DatasourceModelView(SupersetModelView):
     def pre_delete(self, obj):
         if obj.slices:
-            raise SupersetException(Markup(
-                'Cannot delete a datasource that has slices attached to it.'
-                "Here's the list of associated charts: " +
-                ''.join([o.slice_link for o in obj.slices])))
+            raise SupersetException(
+                Markup(
+                    'Cannot delete a datasource that has slices attached to it.'
+                    "Here's the list of associated charts: "
+                    + ''.join([o.slice_link for o in obj.slices])
+                )
+            )
diff --git a/superset/connectors/connector_registry.py b/superset/connectors/connector_registry.py
index 258d2f5..626986a 100644
--- a/superset/connectors/connector_registry.py
+++ b/superset/connectors/connector_registry.py
@@ -35,15 +35,21 @@ class ConnectorRegistry(object):
         return datasources
 
     @classmethod
-    def get_datasource_by_name(cls, session, datasource_type, datasource_name,
-                               schema, database_name):
+    def get_datasource_by_name(
+        cls, session, datasource_type, datasource_name, schema, database_name
+    ):
         datasource_class = ConnectorRegistry.sources[datasource_type]
         datasources = session.query(datasource_class).all()
 
         # Filter datasoures that don't have database.
-        db_ds = [d for d in datasources if d.database and
-                 d.database.name == database_name and
-                 d.name == datasource_name and schema == schema]
+        db_ds = [
+            d
+            for d in datasources
+            if d.database
+            and d.database.name == database_name
+            and d.name == datasource_name
+            and schema == schema
+        ]
         return db_ds[0]
 
     @classmethod
@@ -71,8 +77,8 @@ class ConnectorRegistry(object):
         )
 
     @classmethod
-    def query_datasources_by_name(
-            cls, session, database, datasource_name, schema=None):
+    def query_datasources_by_name(cls, session, database, datasource_name, schema=None):
         datasource_class = ConnectorRegistry.sources[database.type]
         return datasource_class.query_datasources_by_name(
-            session, database, datasource_name, schema=None)
+            session, database, datasource_name, schema=None
+        )
diff --git a/superset/connectors/druid/models.py b/superset/connectors/druid/models.py
index 937c8d8..14ccf2c 100644
--- a/superset/connectors/druid/models.py
+++ b/superset/connectors/druid/models.py
@@ -21,25 +21,33 @@ from pydruid.utils.dimensions import MapLookupExtraction, RegexExtraction
 from pydruid.utils.filters import Dimension, Filter
 from pydruid.utils.having import Aggregation
 from pydruid.utils.postaggregator import (
-    Const, Field, HyperUniqueCardinality, Postaggregator, Quantile, Quantiles,
+    Const,
+    Field,
+    HyperUniqueCardinality,
+    Postaggregator,
+    Quantile,
+    Quantiles,
 )
 import requests
 import sqlalchemy as sa
 from sqlalchemy import (
-    Boolean, Column, DateTime, ForeignKey, Integer, String, Text, UniqueConstraint,
+    Boolean,
+    Column,
+    DateTime,
+    ForeignKey,
+    Integer,
+    String,
+    Text,
+    UniqueConstraint,
 )
 from sqlalchemy.orm import backref, relationship
 
 from superset import conf, db, security_manager
 from superset.connectors.base.models import BaseColumn, BaseDatasource, BaseMetric
 from superset.exceptions import MetricPermException, SupersetException
-from superset.models.helpers import (
-    AuditMixinNullable, ImportMixin, QueryResult,
-)
+from superset.models.helpers import AuditMixinNullable, ImportMixin, QueryResult
 from superset.utils import core as utils, import_datasource
-from superset.utils.core import (
-    DimSelector, DTTM_ALIAS, flasher,
-)
+from superset.utils.core import DimSelector, DTTM_ALIAS, flasher
 
 DRUID_TZ = conf.get('DRUID_TZ')
 POST_AGG_TYPE = 'postagg'
@@ -64,6 +72,7 @@ class JavascriptPostAggregator(Postaggregator):
 
 class CustomPostAggregator(Postaggregator):
     """A way to allow users to specify completely custom PostAggregators"""
+
     def __init__(self, name, post_aggregator):
         self.name = name
         self.post_aggregator = post_aggregator
@@ -86,8 +95,13 @@ class DruidCluster(Model, AuditMixinNullable, ImportMixin):
     metadata_last_refreshed = Column(DateTime)
     cache_timeout = Column(Integer)
 
-    export_fields = ('cluster_name', 'broker_host', 'broker_port',
-                     'broker_endpoint', 'cache_timeout')
+    export_fields = (
+        'cluster_name',
+        'broker_host',
+        'broker_port',
+        'broker_endpoint',
+        'cache_timeout',
+    )
     update_from_object_fields = export_fields
     export_children = ['datasources']
 
@@ -99,11 +113,7 @@ class DruidCluster(Model, AuditMixinNullable, ImportMixin):
 
     @property
     def data(self):
-        return {
-            'id': self.id,
-            'name': self.cluster_name,
-            'backend': 'druid',
-        }
+        return {'id': self.id, 'name': self.cluster_name, 'backend': 'druid'}
 
     @staticmethod
     def get_base_url(host, port):
@@ -114,14 +124,13 @@ class DruidCluster(Model, AuditMixinNullable, ImportMixin):
         return url
 
     def get_base_broker_url(self):
-        base_url = self.get_base_url(
-            self.broker_host, self.broker_port)
+        base_url = self.get_base_url(self.broker_host, self.broker_port)
         return f'{base_url}/{self.broker_endpoint}'
 
     def get_pydruid_client(self):
         cli = PyDruid(
-            self.get_base_url(self.broker_host, self.broker_port),
-            self.broker_endpoint)
+            self.get_base_url(self.broker_host, self.broker_port), self.broker_endpoint
+        )
         return cli
 
     def get_datasources(self):
@@ -129,8 +138,7 @@ class DruidCluster(Model, AuditMixinNullable, ImportMixin):
         return json.loads(requests.get(endpoint).text)
 
     def get_druid_version(self):
-        endpoint = self.get_base_url(
-            self.broker_host, self.broker_port) + '/status'
+        endpoint = self.get_base_url(self.broker_host, self.broker_port) + '/status'
         return json.loads(requests.get(endpoint).text)['version']
 
     @property
@@ -139,10 +147,8 @@ class DruidCluster(Model, AuditMixinNullable, ImportMixin):
         return self.get_druid_version()
 
     def refresh_datasources(
-            self,
-            datasource_name=None,
-            merge_flag=True,
-            refreshAll=True):
+        self, datasource_name=None, merge_flag=True, refreshAll=True
+    ):
         """Refresh metadata of all datasources in the cluster
         If ``datasource_name`` is specified, only that datasource is updated
         """
@@ -175,12 +181,10 @@ class DruidCluster(Model, AuditMixinNullable, ImportMixin):
                 datasource = DruidDatasource(datasource_name=ds_name)
                 with session.no_autoflush:
                     session.add(datasource)
-                flasher(
-                    _('Adding new datasource [{}]').format(ds_name), 'success')
+                flasher(_('Adding new datasource [{}]').format(ds_name), 'success')
                 ds_map[ds_name] = datasource
             elif refreshAll:
-                flasher(
-                    _('Refreshing datasource [{}]').format(ds_name), 'info')
+                flasher(_('Refreshing datasource [{}]').format(ds_name), 'info')
             else:
                 del ds_map[ds_name]
                 continue
@@ -211,8 +215,8 @@ class DruidCluster(Model, AuditMixinNullable, ImportMixin):
                     col_obj = col_objs.get(col)
                     if not col_obj:
                         col_obj = DruidColumn(
-                            datasource_id=datasource.id,
-                            column_name=col)
+                            datasource_id=datasource.id, column_name=col
+                        )
                         with session.no_autoflush:
                             session.add(col_obj)
                     col_obj.type = cols[col]['type']
@@ -251,20 +255,30 @@ class DruidColumn(Model, BaseColumn):
     __tablename__ = 'columns'
     __table_args__ = (UniqueConstraint('column_name', 'datasource_id'),)
 
-    datasource_id = Column(
-        Integer,
-        ForeignKey('datasources.id'))
+    datasource_id = Column(Integer, ForeignKey('datasources.id'))
     # Setting enable_typechecks=False disables polymorphic inheritance.
     datasource = relationship(
         'DruidDatasource',
         backref=backref('columns', cascade='all, delete-orphan'),
-        enable_typechecks=False)
+        enable_typechecks=False,
+    )
     dimension_spec_json = Column(Text)
 
     export_fields = (
-        'datasource_id', 'column_name', 'is_active', 'type', 'groupby',
-        'count_distinct', 'sum', 'avg', 'max', 'min', 'filterable',
-        'description', 'dimension_spec_json', 'verbose_name',
+        'datasource_id',
+        'column_name',
+        'is_active',
+        'type',
+        'groupby',
+        'count_distinct',
+        'sum',
+        'avg',
+        'max',
+        'min',
+        'filterable',
+        'description',
+        'dimension_spec_json',
+        'verbose_name',
     )
     update_from_object_fields = export_fields
     export_parent = 'datasource'
@@ -302,8 +316,9 @@ class DruidColumn(Model, BaseColumn):
                 metric_name=name,
                 metric_type='sum',
                 verbose_name='SUM({})'.format(self.column_name),
-                json=json.dumps({
-                    'type': mt, 'name': name, 'fieldName': self.column_name}),
+                json=json.dumps(
+                    {'type': mt, 'name': name, 'fieldName': self.column_name}
+                ),
             )
 
         if self.avg and self.is_num:
@@ -313,8 +328,9 @@ class DruidColumn(Model, BaseColumn):
                 metric_name=name,
                 metric_type='avg',
                 verbose_name='AVG({})'.format(self.column_name),
-                json=json.dumps({
-                    'type': mt, 'name': name, 'fieldName': self.column_name}),
+                json=json.dumps(
+                    {'type': mt, 'name': name, 'fieldName': self.column_name}
+                ),
             )
 
         if self.min and self.is_num:
@@ -324,8 +340,9 @@ class DruidColumn(Model, BaseColumn):
                 metric_name=name,
                 metric_type='min',
                 verbose_name='MIN({})'.format(self.column_name),
-                json=json.dumps({
-                    'type': mt, 'name': name, 'fieldName': self.column_name}),
+                json=json.dumps(
+                    {'type': mt, 'name': name, 'fieldName': self.column_name}
+                ),
             )
         if self.max and self.is_num:
             mt = corrected_type.lower() + 'Max'
@@ -334,8 +351,9 @@ class DruidColumn(Model, BaseColumn):
                 metric_name=name,
                 metric_type='max',
                 verbose_name='MAX({})'.format(self.column_name),
-                json=json.dumps({
-                    'type': mt, 'name': name, 'fieldName': self.column_name}),
+                json=json.dumps(
+                    {'type': mt, 'name': name, 'fieldName': self.column_name}
+                ),
             )
         if self.count_distinct:
             name = 'count_distinct__' + self.column_name
@@ -344,21 +362,22 @@ class DruidColumn(Model, BaseColumn):
                     metric_name=name,
                     verbose_name='COUNT(DISTINCT {})'.format(self.column_name),
                     metric_type=self.type,
-                    json=json.dumps({
-                        'type': self.type,
-                        'name': name,
-                        'fieldName': self.column_name,
-                    }),
+                    json=json.dumps(
+                        {'type': self.type, 'name': name, 'fieldName': self.column_name}
+                    ),
                 )
             else:
                 metrics[name] = DruidMetric(
                     metric_name=name,
                     verbose_name='COUNT(DISTINCT {})'.format(self.column_name),
                     metric_type='count_distinct',
-                    json=json.dumps({
-                        'type': 'cardinality',
-                        'name': name,
-                        'fieldNames': [self.column_name]}),
+                    json=json.dumps(
+                        {
+                            'type': 'cardinality',
+                            'name': name,
+                            'fieldNames': [self.column_name],
+                        }
+                    ),
                 )
         return metrics
 
@@ -384,9 +403,14 @@ class DruidColumn(Model, BaseColumn):
     @classmethod
     def import_obj(cls, i_column):
         def lookup_obj(lookup_column):
-            return db.session.query(DruidColumn).filter(
-                DruidColumn.datasource_id == lookup_column.datasource_id,
-                DruidColumn.column_name == lookup_column.column_name).first()
+            return (
+                db.session.query(DruidColumn)
+                .filter(
+                    DruidColumn.datasource_id == lookup_column.datasource_id,
+                    DruidColumn.column_name == lookup_column.column_name,
+                )
+                .first()
+            )
 
         return import_datasource.import_simple_obj(db.session, i_column, lookup_obj)
 
@@ -397,19 +421,25 @@ class DruidMetric(Model, BaseMetric):
 
     __tablename__ = 'metrics'
     __table_args__ = (UniqueConstraint('metric_name', 'datasource_id'),)
-    datasource_id = Column(
-        Integer,
-        ForeignKey('datasources.id'))
+    datasource_id = Column(Integer, ForeignKey('datasources.id'))
     # Setting enable_typechecks=False disables polymorphic inheritance.
     datasource = relationship(
         'DruidDatasource',
         backref=backref('metrics', cascade='all, delete-orphan'),
-        enable_typechecks=False)
+        enable_typechecks=False,
+    )
     json = Column(Text)
 
     export_fields = (
-        'metric_name', 'verbose_name', 'metric_type', 'datasource_id',
-        'json', 'description', 'is_restricted', 'd3format', 'warning_text',
+        'metric_name',
+        'verbose_name',
+        'metric_type',
+        'datasource_id',
+        'json',
+        'description',
+        'is_restricted',
+        'd3format',
+        'warning_text',
     )
     update_from_object_fields = export_fields
     export_parent = 'datasource'
@@ -429,10 +459,12 @@ class DruidMetric(Model, BaseMetric):
     @property
     def perm(self):
         return (
-            '{parent_name}.[{obj.metric_name}](id:{obj.id})'
-        ).format(obj=self,
-                 parent_name=self.datasource.full_name,
-                 ) if self.datasource else None
+            ('{parent_name}.[{obj.metric_name}](id:{obj.id})').format(
+                obj=self, parent_name=self.datasource.full_name
+            )
+            if self.datasource
+            else None
+        )
 
     def get_perm(self):
         return self.perm
@@ -440,9 +472,15 @@ class DruidMetric(Model, BaseMetric):
     @classmethod
     def import_obj(cls, i_metric):
         def lookup_obj(lookup_metric):
-            return db.session.query(DruidMetric).filter(
-                DruidMetric.datasource_id == lookup_metric.datasource_id,
-                DruidMetric.metric_name == lookup_metric.metric_name).first()
+            return (
+                db.session.query(DruidMetric)
+                .filter(
+                    DruidMetric.datasource_id == lookup_metric.datasource_id,
+                    DruidMetric.metric_name == lookup_metric.metric_name,
+                )
+                .first()
+            )
+
         return import_datasource.import_simple_obj(db.session, i_metric, lookup_obj)
 
 
@@ -466,20 +504,27 @@ class DruidDatasource(Model, BaseDatasource):
     is_hidden = Column(Boolean, default=False)
     filter_select_enabled = Column(Boolean, default=True)  # override default
     fetch_values_from = Column(String(100))
-    cluster_name = Column(
-        String(250), ForeignKey('clusters.cluster_name'))
+    cluster_name = Column(String(250), ForeignKey('clusters.cluster_name'))
     cluster = relationship(
-        'DruidCluster', backref='datasources', foreign_keys=[cluster_name])
+        'DruidCluster', backref='datasources', foreign_keys=[cluster_name]
+    )
     user_id = Column(Integer, ForeignKey('ab_user.id'))
     owner = relationship(
         security_manager.user_model,
         backref=backref('datasources', cascade='all, delete-orphan'),
-        foreign_keys=[user_id])
+        foreign_keys=[user_id],
+    )
     UniqueConstraint('cluster_name', 'datasource_name')
 
     export_fields = (
-        'datasource_name', 'is_hidden', 'description', 'default_endpoint',
-        'cluster_name', 'offset', 'cache_timeout', 'params',
+        'datasource_name',
+        'is_hidden',
+        'description',
+        'default_endpoint',
+        'cluster_name',
+        'offset',
+        'cache_timeout',
+        'params',
         'filter_select_enabled',
     )
     update_from_object_fields = export_fields
@@ -518,9 +563,9 @@ class DruidDatasource(Model, BaseDatasource):
         return security_manager.get_schema_perm(self.cluster, self.schema)
 
     def get_perm(self):
-        return (
-            '[{obj.cluster_name}].[{obj.datasource_name}]'
-            '(id:{obj.id})').format(obj=self)
+        return ('[{obj.cluster_name}].[{obj.datasource_name}]' '(id:{obj.id})').format(
+            obj=self
+        )
 
     def update_from_object(self, obj):
         return NotImplementedError()
@@ -532,17 +577,28 @@ class DruidDatasource(Model, BaseDatasource):
 
     @property
     def full_name(self):
-        return utils.get_datasource_full_name(
-            self.cluster_name, self.datasource_name)
+        return utils.get_datasource_full_name(self.cluster_name, self.datasource_name)
 
     @property
     def time_column_grains(self):
         return {
             'time_columns': [
-                'all', '5 seconds', '30 seconds', '1 minute', '5 minutes',
-                '30 minutes', '1 hour', '6 hour', '1 day', '7 days',
-                'week', 'week_starting_sunday', 'week_ending_saturday',
-                'month', 'quarter', 'year',
+                'all',
+                '5 seconds',
+                '30 seconds',
+                '1 minute',
+                '5 minutes',
+                '30 minutes',
+                '1 hour',
+                '6 hour',
+                '1 day',
+                '7 days',
+                'week',
+                'week_starting_sunday',
+                'week_ending_saturday',
+                'month',
+                'quarter',
+                'year',
             ],
             'time_grains': ['now'],
         }
@@ -557,10 +613,7 @@ class DruidDatasource(Model, BaseDatasource):
         return Markup(f'<a href="{url}">{name}</a>')
 
     def get_metric_obj(self, metric_name):
-        return [
-            m.json_obj for m in self.metrics
-            if m.metric_name == metric_name
-        ][0]
+        return [m.json_obj for m in self.metrics if m.metric_name == metric_name][0]
 
     @classmethod
     def import_obj(cls, i_datasource, import_time=None):
@@ -570,18 +623,27 @@ class DruidDatasource(Model, BaseDatasource):
          This function can be used to import/export dashboards between multiple
          superset instances. Audit metadata isn't copies over.
         """
+
         def lookup_datasource(d):
-            return db.session.query(DruidDatasource).filter(
-                DruidDatasource.datasource_name == d.datasource_name,
-                DruidCluster.cluster_name == d.cluster_name,
-            ).first()
+            return (
+                db.session.query(DruidDatasource)
+                .filter(
+                    DruidDatasource.datasource_name == d.datasource_name,
+                    DruidCluster.cluster_name == d.cluster_name,
+                )
+                .first()
+            )
 
         def lookup_cluster(d):
-            return db.session.query(DruidCluster).filter_by(
-                cluster_name=d.cluster_name).one()
+            return (
+                db.session.query(DruidCluster)
+                .filter_by(cluster_name=d.cluster_name)
+                .one()
+            )
+
         return import_datasource.import_datasource(
-            db.session, i_datasource, lookup_cluster, lookup_datasource,
-            import_time)
+            db.session, i_datasource, lookup_cluster, lookup_datasource, import_time
+        )
 
     def latest_metadata(self):
         """Returns segment metadata from the latest segment"""
@@ -611,7 +673,8 @@ class DruidDatasource(Model, BaseDatasource):
                 datasource=self.datasource_name,
                 intervals=lbound + '/' + rbound,
                 merge=self.merge_flag,
-                analysisTypes=[])
+                analysisTypes=[],
+            )
         except Exception as e:
             logging.warning('Failed first attempt to get latest segment')
             logging.exception(e)
@@ -627,7 +690,8 @@ class DruidDatasource(Model, BaseDatasource):
                     datasource=self.datasource_name,
                     intervals=lbound + '/' + rbound,
                     merge=self.merge_flag,
-                    analysisTypes=[])
+                    analysisTypes=[],
+                )
             except Exception as e:
                 logging.warning('Failed 2nd attempt to get latest segment')
                 logging.exception(e)
@@ -639,18 +703,11 @@ class DruidDatasource(Model, BaseDatasource):
             col.refresh_metrics()
 
     @classmethod
-    def sync_to_db_from_config(
-            cls,
-            druid_config,
-            user,
-            cluster,
-            refresh=True):
+    def sync_to_db_from_config(cls, druid_config, user, cluster, refresh=True):
         """Merges the ds config from druid_config into one stored in the db."""
         session = db.session
         datasource = (
-            session.query(cls)
-            .filter_by(datasource_name=druid_config['name'])
-            .first()
+            session.query(cls).filter_by(datasource_name=druid_config['name']).first()
         )
         # Create a new datasource.
         if not datasource:
@@ -689,9 +746,11 @@ class DruidDatasource(Model, BaseDatasource):
         metric_objs = (
             session.query(DruidMetric)
             .filter(DruidMetric.datasource_id == datasource.id)
-            .filter(DruidMetric.metric_name.in_(
-                spec['name'] for spec in druid_config['metrics_spec']
-            ))
+            .filter(
+                DruidMetric.metric_name.in_(
+                    spec['name'] for spec in druid_config['metrics_spec']
+                )
+            )
         )
         metric_objs = {metric.metric_name: metric for metric in metric_objs}
         for metric_spec in druid_config['metrics_spec']:
@@ -701,11 +760,9 @@ class DruidDatasource(Model, BaseDatasource):
 
             if metric_type == 'count':
                 metric_type = 'longSum'
-                metric_json = json.dumps({
-                    'type': 'longSum',
-                    'name': metric_name,
-                    'fieldName': metric_name,
-                })
+                metric_json = json.dumps(
+                    {'type': 'longSum', 'name': metric_name, 'fieldName': metric_name}
+                )
 
             metric_obj = metric_objs.get(metric_name, None)
             if not metric_obj:
@@ -716,8 +773,9 @@ class DruidDatasource(Model, BaseDatasource):
                     datasource=datasource,
                     json=metric_json,
                     description=(
-                        'Imported from the airolap config dir for %s' %
-                        druid_config['name']),
+                        'Imported from the airolap config dir for %s'
+                        % druid_config['name']
+                    ),
                 )
                 session.add(metric_obj)
         session.commit()
@@ -775,8 +833,9 @@ class DruidDatasource(Model, BaseDatasource):
             granularity['period'] = period_name
         else:
             granularity['type'] = 'duration'
-            granularity['duration'] = utils.parse_human_timedelta(
-                period_name).total_seconds() * 1000
+            granularity['duration'] = (
+                utils.parse_human_timedelta(period_name).total_seconds() * 1000
+            )
         return granularity
 
     @staticmethod
@@ -789,43 +848,31 @@ class DruidDatasource(Model, BaseDatasource):
             return JavascriptPostAggregator(
                 name=mconf.get('name', ''),
                 field_names=mconf.get('fieldNames', []),
-                function=mconf.get('function', ''))
-        elif mconf.get('type') == 'quantile':
-            return Quantile(
-                mconf.get('name', ''),
-                mconf.get('probability', ''),
+                function=mconf.get('function', ''),
             )
+        elif mconf.get('type') == 'quantile':
+            return Quantile(mconf.get('name', ''), mconf.get('probability', ''))
         elif mconf.get('type') == 'quantiles':
-            return Quantiles(
-                mconf.get('name', ''),
-                mconf.get('probabilities', ''),
-            )
+            return Quantiles(mconf.get('name', ''), mconf.get('probabilities', ''))
         elif mconf.get('type') == 'fieldAccess':
             return Field(mconf.get('name'))
         elif mconf.get('type') == 'constant':
-            return Const(
-                mconf.get('value'),
-                output_name=mconf.get('name', ''),
-            )
+            return Const(mconf.get('value'), output_name=mconf.get('name', ''))
         elif mconf.get('type') == 'hyperUniqueCardinality':
-            return HyperUniqueCardinality(
-                mconf.get('name'),
-            )
+            return HyperUniqueCardinality(mconf.get('name'))
         elif mconf.get('type') == 'arithmetic':
             return Postaggregator(
-                mconf.get('fn', '/'),
-                mconf.get('fields', []),
-                mconf.get('name', ''))
+                mconf.get('fn', '/'), mconf.get('fields', []), mconf.get('name', '')
+            )
         else:
-            return CustomPostAggregator(
-                mconf.get('name', ''),
-                mconf)
+            return CustomPostAggregator(mconf.get('name', ''), mconf)
 
     @staticmethod
     def find_postaggs_for(postagg_names, metrics_dict):
         """Return a list of metrics that are post aggregations"""
         postagg_metrics = [
-            metrics_dict[name] for name in postagg_names
+            metrics_dict[name]
+            for name in postagg_names
             if metrics_dict[name].metric_type == POST_AGG_TYPE
         ]
         # Remove post aggregations that were found
@@ -839,8 +886,7 @@ class DruidDatasource(Model, BaseDatasource):
         _field = _conf.get('field')
         _fields = _conf.get('fields')
         field_names = []
-        if _type in ['fieldAccess', 'hyperUniqueCardinality',
-                     'quantile', 'quantiles']:
+        if _type in ['fieldAccess', 'hyperUniqueCardinality', 'quantile', 'quantiles']:
             field_names.append(_conf.get('fieldName', ''))
         if _field:
             field_names += DruidDatasource.recursive_get_fields(_field)
@@ -853,18 +899,22 @@ class DruidDatasource(Model, BaseDatasource):
     def resolve_postagg(postagg, post_aggs, agg_names, visited_postaggs, metrics_dict):
         mconf = postagg.json_obj
         required_fields = set(
-            DruidDatasource.recursive_get_fields(mconf) +
-            mconf.get('fieldNames', []))
+            DruidDatasource.recursive_get_fields(mconf) + mconf.get('fieldNames', [])
+        )
         # Check if the fields are already in aggs
         # or is a previous postagg
-        required_fields = set([
-            field for field in required_fields
-            if field not in visited_postaggs and field not in agg_names
-        ])
+        required_fields = set(
+            [
+                field
+                for field in required_fields
+                if field not in visited_postaggs and field not in agg_names
+            ]
+        )
         # First try to find postaggs that match
         if len(required_fields) > 0:
             missing_postaggs = DruidDatasource.find_postaggs_for(
-                required_fields, metrics_dict)
+                required_fields, metrics_dict
+            )
             for missing_metric in required_fields:
                 agg_names.add(missing_metric)
             for missing_postagg in missing_postaggs:
@@ -873,7 +923,12 @@ class DruidDatasource(Model, BaseDatasource):
                 visited_postaggs.add(missing_postagg.metric_name)
             for missing_postagg in missing_postaggs:
                 DruidDatasource.resolve_postagg(
-                    missing_postagg, post_aggs, agg_names, visited_postaggs, metrics_dict)
+                    missing_postagg,
+                    post_aggs,
+                    agg_names,
+                    visited_postaggs,
+                    metrics_dict,
+                )
         post_aggs[postagg.metric_name] = DruidDatasource.get_post_agg(postagg.json_obj)
 
     @staticmethod
@@ -898,21 +953,18 @@ class DruidDatasource(Model, BaseDatasource):
             postagg = metrics_dict[postagg_name]
             visited_postaggs.add(postagg_name)
             DruidDatasource.resolve_postagg(
-                postagg, post_aggs, saved_agg_names, visited_postaggs, metrics_dict)
+                postagg, post_aggs, saved_agg_names, visited_postaggs, metrics_dict
+            )
         aggs = DruidDatasource.get_aggregations(
-            metrics_dict,
-            saved_agg_names,
-            adhoc_agg_configs,
+            metrics_dict, saved_agg_names, adhoc_agg_configs
         )
         return aggs, post_aggs
 
-    def values_for_column(self,
-                          column_name,
-                          limit=10000):
+    def values_for_column(self, column_name, limit=10000):
         """Retrieve some values for the given column"""
         logging.info(
-            'Getting values for columns [{}] limited to [{}]'
-            .format(column_name, limit))
+            'Getting values for columns [{}] limited to [{}]'.format(column_name, limit)
+        )
         # TODO: Use Lexicographic TopNMetricSpec once supported by PyDruid
         if self.fetch_values_from:
             from_dttm = utils.parse_human_datetime(self.fetch_values_from)
@@ -948,7 +1000,9 @@ class DruidDatasource(Model, BaseDatasource):
                     # Check if this dimension uses an extraction function
                     # If so, create the appropriate pydruid extraction object
                     if isinstance(dim, dict) and 'extractionFn' in dim:
-                        (col, extraction_fn) = DruidDatasource._create_extraction_fn(dim)
+                        (col, extraction_fn) = DruidDatasource._create_extraction_fn(
+                            dim
+                        )
                         dim_val = dim['outputName']
                         f = Filter(
                             dimension=col,
@@ -1011,7 +1065,8 @@ class DruidDatasource(Model, BaseDatasource):
                 invalid_metric_names.append(metric_name)
         if len(invalid_metric_names) > 0:
             raise SupersetException(
-                _('Metric(s) {} must be aggregations.').format(invalid_metric_names))
+                _('Metric(s) {} must be aggregations.').format(invalid_metric_names)
+            )
         for adhoc_metric in adhoc_metrics:
             aggregations[adhoc_metric['label']] = {
                 'fieldName': adhoc_metric['column']['column_name'],
@@ -1023,14 +1078,15 @@ class DruidDatasource(Model, BaseDatasource):
 
     def check_restricted_metrics(self, aggregations):
         rejected_metrics = [
-            m.metric_name for m in self.metrics
-            if m.is_restricted and
-            m.metric_name in aggregations.keys() and
-            not security_manager.has_access('metric_access', m.perm)
+            m.metric_name
+            for m in self.metrics
+            if m.is_restricted
+            and m.metric_name in aggregations.keys()
+            and not security_manager.has_access('metric_access', m.perm)
         ]
         if rejected_metrics:
             raise MetricPermException(
-                'Access to the metrics denied: ' + ', '.join(rejected_metrics),
+                'Access to the metrics denied: ' + ', '.join(rejected_metrics)
             )
 
     def get_dimensions(self, groupby, columns_dict):
@@ -1083,29 +1139,34 @@ class DruidDatasource(Model, BaseDatasource):
         :param dict metric: The metric to sanitize
         """
         if (
-            utils.is_adhoc_metric(metric) and
-            metric['column']['type'].upper() == 'FLOAT'
+            utils.is_adhoc_metric(metric)
+            and metric['column']['type'].upper() == 'FLOAT'
         ):
             metric['column']['type'] = 'DOUBLE'
 
     def run_query(  # noqa / druid
-            self,
-            groupby, metrics,
-            granularity,
-            from_dttm, to_dttm,
-            filter=None,  # noqa
-            is_timeseries=True,
-            timeseries_limit=None,
-            timeseries_limit_metric=None,
-            row_limit=None,
-            inner_from_dttm=None, inner_to_dttm=None,
-            orderby=None,
-            extras=None,  # noqa
-            columns=None, phase=2, client=None,
-            order_desc=True,
-            prequeries=None,
-            is_prequery=False,
-        ):
+        self,
+        groupby,
+        metrics,
+        granularity,
+        from_dttm,
+        to_dttm,
+        filter=None,  # noqa
+        is_timeseries=True,
+        timeseries_limit=None,
+        timeseries_limit_metric=None,
+        row_limit=None,
+        inner_from_dttm=None,
+        inner_to_dttm=None,
+        orderby=None,
+        extras=None,  # noqa
+        columns=None,
+        phase=2,
+        client=None,
+        order_desc=True,
+        prequeries=None,
+        is_prequery=False,
+    ):
         """Runs a query against Druid and returns a dataframe.
         """
         # TODO refactor into using a TBD Query object
@@ -1126,17 +1187,16 @@ class DruidDatasource(Model, BaseDatasource):
         metrics_dict = {m.metric_name: m for m in self.metrics}
         columns_dict = {c.column_name: c for c in self.columns}
 
-        if (
-            self.cluster and
-            LooseVersion(self.cluster.get_druid_version()) < LooseVersion('0.11.0')
-        ):
+        if self.cluster and LooseVersion(
+            self.cluster.get_druid_version()
+        ) < LooseVersion('0.11.0'):
             for metric in metrics:
                 self.sanitize_metric_object(metric)
             self.sanitize_metric_object(timeseries_limit_metric)
 
         aggregations, post_aggs = DruidDatasource.metrics_and_post_aggs(
-            metrics,
-            metrics_dict)
+            metrics, metrics_dict
+        )
 
         self.check_restricted_metrics(aggregations)
 
@@ -1148,9 +1208,7 @@ class DruidDatasource(Model, BaseDatasource):
             dimensions=dimensions,
             aggregations=aggregations,
             granularity=DruidDatasource.granularity(
-                granularity,
-                timezone=timezone,
-                origin=extras.get('druid_time_origin'),
+                granularity, timezone=timezone, origin=extras.get('druid_time_origin')
             ),
             post_aggregations=post_aggs,
             intervals=self.intervals_from_dttms(from_dttm, to_dttm),
@@ -1179,19 +1237,15 @@ class DruidDatasource(Model, BaseDatasource):
             logging.info('Running timeseries query for no groupby values')
             del qry['dimensions']
             client.timeseries(**qry)
-        elif (
-                not having_filters and
-                len(groupby) == 1 and
-                order_desc
-        ):
+        elif not having_filters and len(groupby) == 1 and order_desc:
             dim = list(qry.get('dimensions'))[0]
             logging.info('Running two-phase topn query for dimension [{}]'.format(dim))
             pre_qry = deepcopy(qry)
             if timeseries_limit_metric:
                 order_by = utils.get_metric_name(timeseries_limit_metric)
                 aggs_dict, post_aggs_dict = DruidDatasource.metrics_and_post_aggs(
-                    [timeseries_limit_metric],
-                    metrics_dict)
+                    [timeseries_limit_metric], metrics_dict
+                )
                 if phase == 1:
                     pre_qry['aggregations'].update(aggs_dict)
                     pre_qry['post_aggregations'].update(post_aggs_dict)
@@ -1202,8 +1256,7 @@ class DruidDatasource(Model, BaseDatasource):
                 order_by = list(qry['aggregations'].keys())[0]
             # Limit on the number of timeseries, doing a two-phases query
             pre_qry['granularity'] = 'all'
-            pre_qry['threshold'] = min(row_limit,
-                                       timeseries_limit or row_limit)
+            pre_qry['threshold'] = min(row_limit, timeseries_limit or row_limit)
             pre_qry['metric'] = order_by
             pre_qry['dimension'] = self._dimensions_to_values(qry.get('dimensions'))[0]
             del pre_qry['dimensions']
@@ -1213,17 +1266,16 @@ class DruidDatasource(Model, BaseDatasource):
             if phase == 2:
                 query_str += '// Two phase query\n// Phase 1\n'
             query_str += json.dumps(
-                client.query_builder.last_query.query_dict, indent=2)
+                client.query_builder.last_query.query_dict, indent=2
+            )
             query_str += '\n'
             if phase == 1:
                 return query_str
-            query_str += (
-                "// Phase 2 (built based on phase one's results)\n")
+            query_str += "// Phase 2 (built based on phase one's results)\n"
             df = client.export_pandas()
             qry['filter'] = self._add_filter_from_pre_query_data(
-                df,
-                [pre_qry['dimension']],
-                filters)
+                df, [pre_qry['dimension']], filters
+            )
             qry['threshold'] = timeseries_limit or 1000
             if row_limit and granularity == 'all':
                 qry['threshold'] = row_limit
@@ -1245,7 +1297,7 @@ class DruidDatasource(Model, BaseDatasource):
                 # Can't use set on an array with dicts
                 # Use set with non-dict items only
                 non_dict_dims = list(
-                    set([x for x in pre_qry_dims if not isinstance(x, dict)]),
+                    set([x for x in pre_qry_dims if not isinstance(x, dict)])
                 )
                 dict_dims = [x for x in pre_qry_dims if isinstance(x, dict)]
                 pre_qry['dimensions'] = non_dict_dims + dict_dims
@@ -1259,8 +1311,8 @@ class DruidDatasource(Model, BaseDatasource):
                 if timeseries_limit_metric:
                     order_by = utils.get_metric_name(timeseries_limit_metric)
                     aggs_dict, post_aggs_dict = DruidDatasource.metrics_and_post_aggs(
-                        [timeseries_limit_metric],
-                        metrics_dict)
+                        [timeseries_limit_metric], metrics_dict
+                    )
                     if phase == 1:
                         pre_qry['aggregations'].update(aggs_dict)
                         pre_qry['post_aggregations'].update(post_aggs_dict)
@@ -1274,27 +1326,23 @@ class DruidDatasource(Model, BaseDatasource):
                     'type': 'default',
                     'limit': min(timeseries_limit, row_limit),
                     'intervals': self.intervals_from_dttms(
-                        inner_from_dttm, inner_to_dttm),
-                    'columns': [{
-                        'dimension': order_by,
-                        'direction': order_direction,
-                    }],
+                        inner_from_dttm, inner_to_dttm
+                    ),
+                    'columns': [{'dimension': order_by, 'direction': order_direction}],
                 }
                 client.groupby(**pre_qry)
                 logging.info('Phase 1 Complete')
                 query_str += '// Two phase query\n// Phase 1\n'
                 query_str += json.dumps(
-                    client.query_builder.last_query.query_dict, indent=2)
+                    client.query_builder.last_query.query_dict, indent=2
+                )
                 query_str += '\n'
                 if phase == 1:
                     return query_str
-                query_str += (
-                    "// Phase 2 (built based on phase one's results)\n")
+                query_str += "// Phase 2 (built based on phase one's results)\n"
                 df = client.export_pandas()
                 qry['filter'] = self._add_filter_from_pre_query_data(
-                    df,
-                    pre_qry['dimensions'],
-                    filters,
+                    df, pre_qry['dimensions'], filters
                 )
                 qry['limit_spec'] = None
             if row_limit:
@@ -1302,19 +1350,20 @@ class DruidDatasource(Model, BaseDatasource):
                 qry['limit_spec'] = {
                     'type': 'default',
                     'limit': row_limit,
-                    'columns': [{
-                        'dimension': (
-                            utils.get_metric_name(
-                                metrics[0],
-                            ) if metrics else dimension_values[0]
-                        ),
-                        'direction': order_direction,
-                    }],
+                    'columns': [
+                        {
+                            'dimension': (
+                                utils.get_metric_name(metrics[0])
+                                if metrics
+                                else dimension_values[0]
+                            ),
+                            'direction': order_direction,
+                        }
+                    ],
                 }
             client.groupby(**qry)
             logging.info('Query Complete')
-        query_str += json.dumps(
-            client.query_builder.last_query.query_dict, indent=2)
+        query_str += json.dumps(client.query_builder.last_query.query_dict, indent=2)
         return query_str
 
     @staticmethod
@@ -1335,27 +1384,25 @@ class DruidDatasource(Model, BaseDatasource):
     def query(self, query_obj):
         qry_start_dttm = datetime.now()
         client = self.cluster.get_pydruid_client()
-        query_str = self.get_query_str(
-            client=client, query_obj=query_obj, phase=2)
+        query_str = self.get_query_str(client=client, query_obj=query_obj, phase=2)
         df = client.export_pandas()
 
         if df is None or df.size == 0:
             return QueryResult(
                 df=pandas.DataFrame([]),
                 query=query_str,
-                duration=datetime.now() - qry_start_dttm)
+                duration=datetime.now() - qry_start_dttm,
+            )
 
         df = self.homogenize_types(df, query_obj.get('groupby', []))
         df.columns = [
-            DTTM_ALIAS if c in ('timestamp', '__time') else c
-            for c in df.columns
+            DTTM_ALIAS if c in ('timestamp', '__time') else c for c in df.columns
         ]
 
-        is_timeseries = query_obj['is_timeseries'] \
-            if 'is_timeseries' in query_obj else True
-        if (
-                not is_timeseries and
-                DTTM_ALIAS in df.columns):
+        is_timeseries = (
+            query_obj['is_timeseries'] if 'is_timeseries' in query_obj else True
+        )
+        if not is_timeseries and DTTM_ALIAS in df.columns:
             del df[DTTM_ALIAS]
 
         # Reordering columns
@@ -1373,16 +1420,15 @@ class DruidDatasource(Model, BaseDatasource):
         time_offset = DruidDatasource.time_offset(query_obj['granularity'])
 
         def increment_timestamp(ts):
-            dt = utils.parse_human_datetime(ts).replace(
-                tzinfo=DRUID_TZ)
+            dt = utils.parse_human_datetime(ts).replace(tzinfo=DRUID_TZ)
             return dt + timedelta(milliseconds=time_offset)
+
         if DTTM_ALIAS in df.columns and time_offset:
             df[DTTM_ALIAS] = df[DTTM_ALIAS].apply(increment_timestamp)
 
         return QueryResult(
-            df=df,
-            query=query_str,
-            duration=datetime.now() - qry_start_dttm)
+            df=df, query=query_str, duration=datetime.now() - qry_start_dttm
+        )
 
     @staticmethod
     def _create_extraction_fn(dim_spec):
@@ -1416,9 +1462,10 @@ class DruidDatasource(Model, BaseDatasource):
             op = flt.get('op')
             eq = flt.get('val')
             if (
-                    not col or
-                    not op or
-                    (eq is None and op not in ('IS NULL', 'IS NOT NULL'))):
+                not col
+                or not op
+                or (eq is None and op not in ('IS NULL', 'IS NOT NULL'))
+            ):
                 continue
 
             # Check if this dimension uses an extraction function
@@ -1433,15 +1480,21 @@ class DruidDatasource(Model, BaseDatasource):
             is_numeric_col = col in num_cols
             is_list_target = op in ('in', 'not in')
             eq = cls.filter_values_handler(
-                eq, is_list_target=is_list_target,
-                target_column_is_numeric=is_numeric_col)
+                eq,
+                is_list_target=is_list_target,
+                target_column_is_numeric=is_numeric_col,
+            )
 
             # For these two ops, could have used Dimension,
             # but it doesn't support extraction functions
             if op == '==':
-                cond = Filter(dimension=col, value=eq, extraction_function=extraction_fn)
+                cond = Filter(
+                    dimension=col, value=eq, extraction_function=extraction_fn
+                )
             elif op == '!=':
-                cond = ~Filter(dimension=col, value=eq, extraction_function=extraction_fn)
+                cond = ~Filter(
+                    dimension=col, value=eq, extraction_function=extraction_fn
+                )
             elif op in ('in', 'not in'):
                 fields = []
                 # ignore the filter if it has no value
@@ -1524,10 +1577,7 @@ class DruidDatasource(Model, BaseDatasource):
                 cond = Dimension(col) != None  # NOQA
 
             if filters:
-                filters = Filter(type='and', fields=[
-                    cond,
-                    filters,
-                ])
+                filters = Filter(type='and', fields=[cond, filters])
             else:
                 filters = cond
 
@@ -1549,11 +1599,7 @@ class DruidDatasource(Model, BaseDatasource):
 
     def get_having_filters(self, raw_filters):
         filters = None
-        reversed_op_map = {
-            '!=': '==',
-            '>=': '<',
-            '<=': '>',
-        }
+        reversed_op_map = {'!=': '==', '>=': '<', '<=': '>'}
 
         for flt in raw_filters:
             if not all(f in flt for f in ['col', 'op', 'val']):
@@ -1574,8 +1620,7 @@ class DruidDatasource(Model, BaseDatasource):
         return filters
 
     @classmethod
-    def query_datasources_by_name(
-            cls, session, database, datasource_name, schema=None):
+    def query_datasources_by_name(cls, session, database, datasource_name, schema=None):
         return (
             session.query(cls)
             .filter_by(cluster_name=database.id)
@@ -1586,10 +1631,7 @@ class DruidDatasource(Model, BaseDatasource):
     def external_metadata(self):
         self.merge_flag = True
         return [
-            {
-                'name': k,
-                'type': v.get('type'),
-            }
+            {'name': k, 'type': v.get('type')}
             for k, v in self.latest_metadata().items()
         ]
 
diff --git a/superset/connectors/druid/views.py b/superset/connectors/druid/views.py
index 18c1aef..57e5e44 100644
--- a/superset/connectors/druid/views.py
+++ b/superset/connectors/druid/views.py
@@ -15,9 +15,14 @@ from superset.connectors.base.views import DatasourceModelView
 from superset.connectors.connector_registry import ConnectorRegistry
 from superset.utils import core as utils
 from superset.views.base import (
-    BaseSupersetView, DatasourceFilter, DeleteMixin,
-    get_datasource_exist_error_msg, ListWidgetWithCheckboxes, SupersetModelView,
-    validate_json, YamlExportMixin,
+    BaseSupersetView,
+    DatasourceFilter,
+    DeleteMixin,
+    get_datasource_exist_error_msg,
+    ListWidgetWithCheckboxes,
+    SupersetModelView,
+    validate_json,
+    YamlExportMixin,
 )
 from . import models
 
@@ -33,12 +38,30 @@ class DruidColumnInlineView(CompactCRUDMixin, SupersetModelView):  # noqa
     list_widget = ListWidgetWithCheckboxes
 
     edit_columns = [
-        'column_name', 'verbose_name', 'description', 'dimension_spec_json', 'datasource',
-        'groupby', 'filterable', 'count_distinct', 'sum', 'min', 'max']
+        'column_name',
+        'verbose_name',
+        'description',
+        'dimension_spec_json',
+        'datasource',
+        'groupby',
+        'filterable',
+        'count_distinct',
+        'sum',
+        'min',
+        'max',
+    ]
     add_columns = edit_columns
     list_columns = [
-        'column_name', 'verbose_name', 'type', 'groupby', 'filterable', 'count_distinct',
-        'sum', 'min', 'max']
+        'column_name',
+        'verbose_name',
+        'type',
+        'groupby',
+        'filterable',
+        'count_distinct',
+        'sum',
+        'min',
+        'max',
+    ]
     can_delete = False
     page_size = 500
     label_columns = {
@@ -57,7 +80,8 @@ class DruidColumnInlineView(CompactCRUDMixin, SupersetModelView):  # noqa
     description_columns = {
         'filterable': _(
             'Whether this column is exposed in the `Filters` section '
-            'of the explore view.'),
+            'of the explore view.'
+        ),
         'dimension_spec_json': utils.markdown(
             'this field can be used to specify  '
             'a `dimensionSpec` as documented [here]'
@@ -65,7 +89,8 @@ class DruidColumnInlineView(CompactCRUDMixin, SupersetModelView):  # noqa
             'Make sure to input valid JSON and that the '
             '`outputName` matches the `column_name` defined '
             'above.',
-            True),
+            True,
+        ),
     }
 
     def pre_update(self, col):
@@ -85,8 +110,10 @@ class DruidColumnInlineView(CompactCRUDMixin, SupersetModelView):  # noqa
             # `outputName` should be the same as the `column_name`
             if dimension_spec['outputName'] != col.column_name:
                 raise ValueError(
-                    '`outputName` [{}] unequal to `column_name` [{}]'
-                    .format(dimension_spec['outputName'], col.column_name))
+                    '`outputName` [{}] unequal to `column_name` [{}]'.format(
+                        dimension_spec['outputName'], col.column_name
+                    )
+                )
 
     def post_update(self, col):
         col.refresh_metrics()
@@ -108,23 +135,32 @@ class DruidMetricInlineView(CompactCRUDMixin, SupersetModelView):  # noqa
 
     list_columns = ['metric_name', 'verbose_name', 'metric_type']
     edit_columns = [
-        'metric_name', 'description', 'verbose_name', 'metric_type', 'json',
-        'datasource', 'd3format', 'is_restricted', 'warning_text']
+        'metric_name',
+        'description',
+        'verbose_name',
+        'metric_type',
+        'json',
+        'datasource',
+        'd3format',
+        'is_restricted',
+        'warning_text',
+    ]
     add_columns = edit_columns
     page_size = 500
-    validators_columns = {
-        'json': [validate_json],
-    }
+    validators_columns = {'json': [validate_json]}
     description_columns = {
         'metric_type': utils.markdown(
             'use `postagg` as the metric type if you are defining a '
             '[Druid Post Aggregation]'
             '(http://druid.io/docs/latest/querying/post-aggregations.html)',
-            True),
-        'is_restricted': _('Whether access to this metric is restricted '
-                           'to certain roles. Only roles with the permission '
-                           "'metric access on XXX (the name of this metric)' "
-                           'are allowed to access this metric'),
+            True,
+        ),
+        'is_restricted': _(
+            'Whether access to this metric is restricted '
+            'to certain roles. Only roles with the permission '
+            "'metric access on XXX (the name of this metric)' "
+            'are allowed to access this metric'
+        ),
     }
     label_columns = {
         'metric_name': _('Metric'),
@@ -158,8 +194,12 @@ class DruidClusterModelView(SupersetModelView, DeleteMixin, YamlExportMixin):  #
     edit_title = _('Edit Druid Cluster')
 
     add_columns = [
-        'verbose_name', 'broker_host', 'broker_port',
-        'broker_endpoint', 'cache_timeout', 'cluster_name',
+        'verbose_name',
+        'broker_host',
+        'broker_port',
+        'broker_endpoint',
+        'cache_timeout',
+        'cluster_name',
     ]
     edit_columns = add_columns
     list_columns = ['cluster_name', 'metadata_last_refreshed']
@@ -177,7 +217,8 @@ class DruidClusterModelView(SupersetModelView, DeleteMixin, YamlExportMixin):  #
         'cache_timeout': _(
             'Duration (in seconds) of the caching timeout for this cluster. '
             'A timeout of 0 indicates that the cache never expires. '
-            'Note this defaults to the global timeout if undefined.'),
+            'Note this defaults to the global timeout if undefined.'
+        )
     }
 
     def pre_add(self, cluster):
@@ -201,7 +242,9 @@ appbuilder.add_view(
 )
 
 
-class DruidDatasourceModelView(DatasourceModelView, DeleteMixin, YamlExportMixin):  # noqa
+class DruidDatasourceModelView(
+    DatasourceModelView, DeleteMixin, YamlExportMixin
+):  # noqa
     datamodel = SQLAInterface(models.DruidDatasource)
 
     list_title = _('List Druid Datasource')
@@ -209,18 +252,22 @@ class DruidDatasourceModelView(DatasourceModelView, DeleteMixin, YamlExportMixin
     add_title = _('Add Druid Datasource')
     edit_title = _('Edit Druid Datasource')
 
-    list_columns = [
-        'datasource_link', 'cluster', 'changed_by_', 'modified']
+    list_columns = ['datasource_link', 'cluster', 'changed_by_', 'modified']
     order_columns = ['datasource_link', 'modified']
     related_views = [DruidColumnInlineView, DruidMetricInlineView]
     edit_columns = [
-        'datasource_name', 'cluster', 'description', 'owner',
+        'datasource_name',
+        'cluster',
+        'description',
+        'owner',
         'is_hidden',
-        'filter_select_enabled', 'fetch_values_from',
-        'default_endpoint', 'offset', 'cache_timeout']
-    search_columns = (
-        'datasource_name', 'cluster', 'description', 'owner',
-    )
+        'filter_select_enabled',
+        'fetch_values_from',
+        'default_endpoint',
+        'offset',
+        'cache_timeout',
+    ]
+    search_columns = ('datasource_name', 'cluster', 'description', 'owner')
     add_columns = edit_columns
     show_columns = add_columns + ['perm', 'slices']
     page_size = 500
@@ -233,29 +280,35 @@ class DruidDatasourceModelView(DatasourceModelView, DeleteMixin, YamlExportMixin
             'Also note that charts need to point to a datasource, so '
             'this form will fail at saving if removing charts from a '
             'datasource. If you want to change the datasource for a chart, '
-            "overwrite the chart from the 'explore view'"),
+            "overwrite the chart from the 'explore view'"
+        ),
         'offset': _('Timezone offset (in hours) for this datasource'),
         'description': Markup(
             'Supports <a href="'
-            'https://daringfireball.net/projects/markdown/">markdown</a>'),
+            'https://daringfireball.net/projects/markdown/">markdown</a>'
+        ),
         'fetch_values_from': _(
             'Time expression to use as a predicate when retrieving '
             'distinct values to populate the filter component. '
             'Only applies when `Enable Filter Select` is on. If '
             'you enter `7 days ago`, the distinct list of values in '
             'the filter will be populated based on the distinct value over '
-            'the past week'),
+            'the past week'
+        ),
         'filter_select_enabled': _(
             "Whether to populate the filter's dropdown in the explore "
             "view's filter section with a list of distinct values fetched "
-            'from the backend on the fly'),
+            'from the backend on the fly'
+        ),
         'default_endpoint': _(
             'Redirects to this endpoint when clicking on the datasource '
-            'from the datasource list'),
+            'from the datasource list'
+        ),
         'cache_timeout': _(
             'Duration (in seconds) of the caching timeout for this datasource. '
             'A timeout of 0 indicates that the cache never expires. '
-            'Note this defaults to the cluster timeout if undefined.'),
+            'Note this defaults to the cluster timeout if undefined.'
+        ),
     }
     base_filters = [['id', DatasourceFilter, lambda: []]]
     label_columns = {
@@ -277,16 +330,12 @@ class DruidDatasourceModelView(DatasourceModelView, DeleteMixin, YamlExportMixin
 
     def pre_add(self, datasource):
         with db.session.no_autoflush:
-            query = (
-                db.session.query(models.DruidDatasource)
-                .filter(models.DruidDatasource.datasource_name ==
-                        datasource.datasource_name,
-                        models.DruidDatasource.cluster_name ==
-                        datasource.cluster.id)
+            query = db.session.query(models.DruidDatasource).filter(
+                models.DruidDatasource.datasource_name == datasource.datasource_name,
+                models.DruidDatasource.cluster_name == datasource.cluster.id,
             )
             if db.session.query(query.exists()).scalar():
-                raise Exception(get_datasource_exist_error_msg(
-                    datasource.full_name))
+                raise Exception(get_datasource_exist_error_msg(datasource.full_name))
 
     def post_add(self, datasource):
         datasource.refresh_metrics()
@@ -307,7 +356,8 @@ appbuilder.add_view(
     label=__('Druid Datasources'),
     category='Sources',
     category_label=__('Sources'),
-    icon='fa-cube')
+    icon='fa-cube',
+)
 
 
 class Druid(BaseSupersetView):
@@ -326,15 +376,17 @@ class Druid(BaseSupersetView):
             except Exception as e:
                 flash(
                     "Error while processing cluster '{}'\n{}".format(
-                        cluster_name, utils.error_msg_from_exception(e)),
-                    'danger')
+                        cluster_name, utils.error_msg_from_exception(e)
+                    ),
+                    'danger',
+                )
                 logging.exception(e)
                 return redirect('/druidclustermodelview/list/')
             cluster.metadata_last_refreshed = datetime.now()
             flash(
-                _('Refreshed metadata from cluster [{}]').format(
-                    cluster.cluster_name),
-                'info')
+                _('Refreshed metadata from cluster [{}]').format(cluster.cluster_name),
+                'info',
+            )
         session.commit()
         return redirect('/druiddatasourcemodelview/list/')
 
@@ -357,7 +409,8 @@ appbuilder.add_link(
     category='Sources',
     category_label=__('Sources'),
     category_icon='fa-database',
-    icon='fa-refresh')
+    icon='fa-refresh',
+)
 appbuilder.add_link(
     'Refresh Druid Metadata',
     label=__('Refresh Druid Metadata'),
@@ -365,7 +418,8 @@ appbuilder.add_link(
     category='Sources',
     category_label=__('Sources'),
     category_icon='fa-database',
-    icon='fa-cog')
+    icon='fa-cog',
+)
 
 
 appbuilder.add_separator('Sources')
diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py
index cf22add..d189a40 100644
--- a/superset/connectors/sqla/models.py
+++ b/superset/connectors/sqla/models.py
@@ -8,8 +8,18 @@ from flask_babel import lazy_gettext as _
 import pandas as pd
 import sqlalchemy as sa
 from sqlalchemy import (
-    and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, or_,
-    select, String, Text,
+    and_,
+    asc,
+    Boolean,
+    Column,
+    DateTime,
+    desc,
+    ForeignKey,
+    Integer,
+    or_,
+    select,
+    String,
+    Text,
 )
 from sqlalchemy.exc import CompileError
 from sqlalchemy.orm import backref, relationship
@@ -51,14 +61,10 @@ class AnnotationDatasource(BaseDatasource):
         except Exception as e:
             status = utils.QueryStatus.FAILED
             logging.exception(e)
-            error_message = (
-                utils.error_msg_from_exception(e))
+            error_message = utils.error_msg_from_exception(e)
         return QueryResult(
-            status=status,
-            df=df,
-            duration=0,
-            query='',
-            error_message=error_message)
+            status=status, df=df, duration=0, query='', error_message=error_message
+        )
 
     def get_query_str(self, query_obj):
         raise NotImplementedError()
@@ -77,26 +83,41 @@ class TableColumn(Model, BaseColumn):
     table = relationship(
         'SqlaTable',
         backref=backref('columns', cascade='all, delete-orphan'),
-        foreign_keys=[table_id])
+        foreign_keys=[table_id],
+    )
     is_dttm = Column(Boolean, default=False)
     expression = Column(Text, default='')
     python_date_format = Column(String(255))
     database_expression = Column(String(255))
 
     export_fields = (
-        'table_id', 'column_name', 'verbose_name', 'is_dttm', 'is_active',
-        'type', 'groupby', 'count_distinct', 'sum', 'avg', 'max', 'min',
-        'filterable', 'expression', 'description', 'python_date_format',
+        'table_id',
+        'column_name',
+        'verbose_name',
+        'is_dttm',
+        'is_active',
+        'type',
+        'groupby',
+        'count_distinct',
+        'sum',
+        'avg',
+        'max',
+        'min',
+        'filterable',
+        'expression',
+        'description',
+        'python_date_format',
         'database_expression',
     )
 
-    update_from_object_fields = [
-        s for s in export_fields if s not in ('table_id',)]
+    update_from_object_fields = [s for s in export_fields if s not in ('table_id',)]
     export_parent = 'table'
 
     def get_sqla_col(self, label=None):
         db_engine_spec = self.table.database.db_engine_spec
-        label = db_engine_spec.make_label_compatible(label if label else self.column_name)
+        label = db_engine_spec.make_label_compatible(
+            label if label else self.column_name
+        )
         if not self.expression:
             col = column(self.column_name).label(label)
         else:
@@ -140,9 +161,15 @@ class TableColumn(Model, BaseColumn):
     @classmethod
     def import_obj(cls, i_column):
         def lookup_obj(lookup_column):
-            return db.session.query(TableColumn).filter(
-                TableColumn.table_id == lookup_column.table_id,
-                TableColumn.column_name == lookup_column.column_name).first()
+            return (
+                db.session.query(TableColumn)
+                .filter(
+                    TableColumn.table_id == lookup_column.table_id,
+                    TableColumn.column_name == lookup_column.column_name,
+                )
+                .first()
+            )
+
         return import_datasource.import_simple_obj(db.session, i_column, lookup_obj)
 
     def dttm_sql_literal(self, dttm):
@@ -164,8 +191,7 @@ class TableColumn(Model, BaseColumn):
                 return str((dttm - datetime(1970, 1, 1)).total_seconds() * 1000.0)
             return "'{}'".format(dttm.strftime(tf))
         else:
-            s = self.table.database.db_engine_spec.convert_dttm(
-                self.type or '', dttm)
+            s = self.table.database.db_engine_spec.convert_dttm(self.type or '', dttm)
             return s or "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S.%f'))
 
     def get_metrics(self):
@@ -174,35 +200,45 @@ class TableColumn(Model, BaseColumn):
         M = SqlMetric  # noqa
         quoted = self.column_name
         if self.sum:
-            metrics.append(M(
-                metric_name='sum__' + self.column_name,
-                metric_type='sum',
-                expression='SUM({})'.format(quoted),
-            ))
+            metrics.append(
+                M(
+                    metric_name='sum__' + self.column_name,
+                    metric_type='sum',
+                    expression='SUM({})'.format(quoted),
+                )
+            )
         if self.avg:
-            metrics.append(M(
-                metric_name='avg__' + self.column_name,
-                metric_type='avg',
-                expression='AVG({})'.format(quoted),
-            ))
+            metrics.append(
+                M(
+                    metric_name='avg__' + self.column_name,
+                    metric_type='avg',
+                    expression='AVG({})'.format(quoted),
+                )
+            )
         if self.max:
-            metrics.append(M(
-                metric_name='max__' + self.column_name,
-                metric_type='max',
-                expression='MAX({})'.format(quoted),
-            ))
+            metrics.append(
+                M(
+                    metric_name='max__' + self.column_name,
+                    metric_type='max',
+                    expression='MAX({})'.format(quoted),
+                )
+            )
         if self.min:
-            metrics.append(M(
-                metric_name='min__' + self.column_name,
-                metric_type='min',
-                expression='MIN({})'.format(quoted),
-            ))
+            metrics.append(
+                M(
+                    metric_name='min__' + self.column_name,
+                    metric_type='min',
+                    expression='MIN({})'.format(quoted),
+                )
+            )
         if self.count_distinct:
-            metrics.append(M(
-                metric_name='count_distinct__' + self.column_name,
-                metric_type='count_distinct',
-                expression='COUNT(DISTINCT {})'.format(quoted),
-            ))
+            metrics.append(
+                M(
+                    metric_name='count_distinct__' + self.column_name,
+                    metric_type='count_distinct',
+                    expression='COUNT(DISTINCT {})'.format(quoted),
+                )
+            )
         return {m.metric_name: m for m in metrics}
 
 
@@ -216,27 +252,42 @@ class SqlMetric(Model, BaseMetric):
     table = relationship(
         'SqlaTable',
         backref=backref('metrics', cascade='all, delete-orphan'),
-        foreign_keys=[table_id])
+        foreign_keys=[table_id],
+    )
     expression = Column(Text)
 
     export_fields = (
-        'metric_name', 'verbose_name', 'metric_type', 'table_id', 'expression',
-        'description', 'is_restricted', 'd3format', 'warning_text')
-    update_from_object_fields = list([
-        s for s in export_fields if s not in ('table_id', )])
+        'metric_name',
+        'verbose_name',
+        'metric_type',
+        'table_id',
+        'expression',
+        'description',
+        'is_restricted',
+        'd3format',
+        'warning_text',
+    )
+    update_from_object_fields = list(
+        [s for s in export_fields if s not in ('table_id',)]
+    )
     export_parent = 'table'
 
     def get_sqla_col(self, label=None):
         db_engine_spec = self.table.database.db_engine_spec
-        label = db_engine_spec.make_label_compatible(label if label else self.metric_name)
+        label = db_engine_spec.make_label_compatible(
+            label if label else self.metric_name
+        )
         return literal_column(self.expression).label(label)
 
     @property
     def perm(self):
         return (
-            '{parent_name}.[{obj.metric_name}](id:{obj.id})'
-        ).format(obj=self,
-                 parent_name=self.table.full_name) if self.table else None
+            ('{parent_name}.[{obj.metric_name}](id:{obj.id})').format(
+                obj=self, parent_name=self.table.full_name
+            )
+            if self.table
+            else None
+        )
 
     def get_perm(self):
         return self.perm
@@ -244,9 +295,15 @@ class SqlMetric(Model, BaseMetric):
     @classmethod
     def import_obj(cls, i_metric):
         def lookup_obj(lookup_metric):
-            return db.session.query(SqlMetric).filter(
-                SqlMetric.table_id == lookup_metric.table_id,
-                SqlMetric.metric_name == lookup_metric.metric_name).first()
+            return (
+                db.session.query(SqlMetric)
+                .filter(
+                    SqlMetric.table_id == lookup_metric.table_id,
+                    SqlMetric.metric_name == lookup_metric.metric_name,
+                )
+                .first()
+            )
+
         return import_datasource.import_simple_obj(db.session, i_metric, lookup_obj)
 
 
@@ -268,13 +325,13 @@ class SqlaTable(Model, BaseDatasource):
     fetch_values_predicate = Column(String(1000))
     user_id = Column(Integer, ForeignKey('ab_user.id'))
     owner = relationship(
-        security_manager.user_model,
-        backref='tables',
-        foreign_keys=[user_id])
+        security_manager.user_model, backref='tables', foreign_keys=[user_id]
+    )
     database = relationship(
         'Database',
         backref=backref('tables', cascade='all, delete-orphan'),
-        foreign_keys=[database_id])
+        foreign_keys=[database_id],
+    )
     schema = Column(String(255))
     sql = Column(Text)
     is_sqllab_view = Column(Boolean, default=False)
@@ -283,13 +340,23 @@ class SqlaTable(Model, BaseDatasource):
     baselink = 'tablemodelview'
 
     export_fields = (
-        'table_name', 'main_dttm_col', 'description', 'default_endpoint',
-        'database_id', 'offset', 'cache_timeout', 'schema',
-        'sql', 'params', 'template_params', 'filter_select_enabled',
+        'table_name',
+        'main_dttm_col',
+        'description',
+        'default_endpoint',
+        'database_id',
+        'offset',
+        'cache_timeout',
+        'schema',
+        'sql',
+        'params',
+        'template_params',
+        'filter_select_enabled',
         'fetch_values_predicate',
     )
     update_from_object_fields = [
-        f for f in export_fields if f not in ('table_name', 'database_id')]
+        f for f in export_fields if f not in ('table_name', 'database_id')
+    ]
     export_parent = 'database'
     export_children = ['metrics', 'columns']
 
@@ -333,9 +400,7 @@ class SqlaTable(Model, BaseDatasource):
         return security_manager.get_schema_perm(self.database, self.schema)
 
     def get_perm(self):
-        return (
-            '[{obj.database}].[{obj.table_name}]'
-            '(id:{obj.id})').format(obj=self)
+        return ('[{obj.database}].[{obj.table_name}]' '(id:{obj.id})').format(obj=self)
 
     @property
     def name(self):
@@ -346,7 +411,8 @@ class SqlaTable(Model, BaseDatasource):
     @property
     def full_name(self):
         return utils.get_datasource_full_name(
-            self.database, self.table_name, schema=self.schema)
+            self.database, self.table_name, schema=self.schema
+        )
 
     @property
     def dttm_cols(self):
@@ -372,9 +438,8 @@ class SqlaTable(Model, BaseDatasource):
         df.columns = ['field', 'type']
         return df.to_html(
             index=False,
-            classes=(
-                'dataframe table table-striped table-bordered '
-                'table-condensed'))
+            classes=('dataframe table table-striped table-bordered ' 'table-condensed'),
+        )
 
     @property
     def sql_url(self):
@@ -401,7 +466,8 @@ class SqlaTable(Model, BaseDatasource):
         # show_cols and latest_partition set to false to avoid
         # the expensive cost of inspecting the DB
         return self.database.select_star(
-            self.name, show_cols=False, latest_partition=False)
+            self.name, show_cols=False, latest_partition=False
+        )
 
     def get_col(self, col_name):
         columns = self.columns
@@ -443,9 +509,7 @@ class SqlaTable(Model, BaseDatasource):
             qry = qry.where(tp.process_template(self.fetch_values_predicate))
 
         engine = self.database.get_sqla_engine()
-        sql = '{}'.format(
-            qry.compile(engine, compile_kwargs={'literal_binds': True}),
-        )
+        sql = '{}'.format(qry.compile(engine, compile_kwargs={'literal_binds': True}))
         sql = self.mutate_query_from_config(sql)
 
         df = pd.read_sql_query(sql=sql, con=engine)
@@ -462,8 +526,7 @@ class SqlaTable(Model, BaseDatasource):
         return sql
 
     def get_template_processor(self, **kwargs):
-        return get_template_processor(
-            table=self, database=self.database, **kwargs)
+        return get_template_processor(table=self, database=self.database, **kwargs)
 
     def get_query_str(self, query_obj):
         qry = self.get_sqla_query(**query_obj)
@@ -523,24 +586,26 @@ class SqlaTable(Model, BaseDatasource):
             return None
 
     def get_sqla_query(  # sqla
-            self,
-            groupby, metrics,
-            granularity,
-            from_dttm, to_dttm,
-            filter=None,  # noqa
-            is_timeseries=True,
-            timeseries_limit=15,
-            timeseries_limit_metric=None,
-            row_limit=None,
-            inner_from_dttm=None,
-            inner_to_dttm=None,
-            orderby=None,
-            extras=None,
-            columns=None,
-            order_desc=True,
-            prequeries=None,
-            is_prequery=False,
-        ):
+        self,
+        groupby,
+        metrics,
+        granularity,
+        from_dttm,
+        to_dttm,
+        filter=None,  # noqa
+        is_timeseries=True,
+        timeseries_limit=15,
+        timeseries_limit_metric=None,
+        row_limit=None,
+        inner_from_dttm=None,
+        inner_to_dttm=None,
+        orderby=None,
+        extras=None,
+        columns=None,
+        order_desc=True,
+        prequeries=None,
+        is_prequery=False,
+    ):
         """Querying any sqla table from this common interface"""
         template_kwargs = {
             'from_dttm': from_dttm,
@@ -568,9 +633,12 @@ class SqlaTable(Model, BaseDatasource):
         metrics_dict = {m.metric_name: m for m in self.metrics}
 
         if not granularity and is_timeseries:
-            raise Exception(_(
-                'Datetime column not provided as part table configuration '
-                'and is required by this type of chart'))
+            raise Exception(
+                _(
+                    'Datetime column not provided as part table configuration '
+                    'and is required by this type of chart'
+                )
+            )
         if not groupby and not metrics and not columns:
             raise Exception(_('Empty query?'))
         metrics_exprs = []
@@ -585,7 +653,8 @@ class SqlaTable(Model, BaseDatasource):
             main_metric_expr = metrics_exprs[0]
         else:
             main_metric_expr = literal_column('COUNT(*)').label(
-                db_engine_spec.make_label_compatible('count'))
+                db_engine_spec.make_label_compatible('count')
+            )
 
         select_exprs = []
         groupby_exprs = []
@@ -619,11 +688,14 @@ class SqlaTable(Model, BaseDatasource):
                 groupby_exprs += [timestamp]
 
             # Use main dttm column to support index with secondary dttm columns
-            if db_engine_spec.time_secondary_columns and \
-                    self.main_dttm_col in self.dttm_cols and \
-                    self.main_dttm_col != dttm_col.column_name:
-                time_filters.append(cols[self.main_dttm_col].
-                                    get_time_filter(from_dttm, to_dttm))
+            if (
+                db_engine_spec.time_secondary_columns
+                and self.main_dttm_col in self.dttm_cols
+                and self.main_dttm_col != dttm_col.column_name
+            ):
+                time_filters.append(
+                    cols[self.main_dttm_col].get_time_filter(from_dttm, to_dttm)
+                )
             time_filters.append(dttm_col.get_time_filter(from_dttm, to_dttm))
 
         select_exprs += metrics_exprs
@@ -647,7 +719,8 @@ class SqlaTable(Model, BaseDatasource):
                 eq = self.filter_values_handler(
                     flt.get('val'),
                     target_column_is_numeric=col_obj.is_num,
-                    is_list_target=is_list_target)
+                    is_list_target=is_list_target,
+                )
                 if op in ('in', 'not in'):
                     cond = col_obj.get_sqla_col().in_(eq)
                     if '<NULL>' in eq:
@@ -675,8 +748,7 @@ class SqlaTable(Model, BaseDatasource):
                     elif op == 'IS NULL':
                         where_clause_and.append(col_obj.get_sqla_col() == None)  # noqa
                     elif op == 'IS NOT NULL':
-                        where_clause_and.append(
-                            col_obj.get_sqla_col() != None)  # noqa
+                        where_clause_and.append(col_obj.get_sqla_col() != None)  # noqa
         if extras:
             where = extras.get('where')
             if where:
@@ -704,8 +776,7 @@ class SqlaTable(Model, BaseDatasource):
         if row_limit:
             qry = qry.limit(row_limit)
 
-        if is_timeseries and \
-                timeseries_limit and groupby and not time_groupby_inline:
+        if is_timeseries and timeseries_limit and groupby and not time_groupby_inline:
             if self.database.db_engine_spec.inner_joins:
                 # some sql dialects require for order by expressions
                 # to also be in the select clause -- others, e.g. vertica,
@@ -715,8 +786,7 @@ class SqlaTable(Model, BaseDatasource):
                 subq = select(inner_select_exprs)
                 subq = subq.select_from(tbl)
                 inner_time_filter = dttm_col.get_time_filter(
-                    inner_from_dttm or from_dttm,
-                    inner_to_dttm or to_dttm,
+                    inner_from_dttm or from_dttm, inner_to_dttm or to_dttm
                 )
                 subq = subq.where(and_(*(where_clause_and + [inner_time_filter])))
                 subq = subq.group_by(*inner_groupby_exprs)
@@ -727,7 +797,7 @@ class SqlaTable(Model, BaseDatasource):
                         ob = self.adhoc_metric_to_sqla(timeseries_limit_metric, cols)
                     elif timeseries_limit_metric in metrics_dict:
                         timeseries_limit_metric = metrics_dict.get(
-                            timeseries_limit_metric,
+                            timeseries_limit_metric
                         )
                         ob = timeseries_limit_metric.get_sqla_col()
                     else:
@@ -738,8 +808,7 @@ class SqlaTable(Model, BaseDatasource):
 
                 on_clause = []
                 for i, gb in enumerate(groupby):
-                    on_clause.append(
-                        groupby_exprs[i] == column(gb + '__'))
+                    on_clause.append(groupby_exprs[i] == column(gb + '__'))
 
                 tbl = tbl.join(subq.alias(), and_(*on_clause))
             else:
@@ -763,8 +832,7 @@ class SqlaTable(Model, BaseDatasource):
                 result = self.query(subquery_obj)
                 cols = {col.column_name: col for col in self.columns}
                 dimensions = [
-                    c for c in result.df.columns
-                    if c not in metrics and c in cols
+                    c for c in result.df.columns if c not in metrics and c in cols
                 ]
                 top_groups = self._get_top_groups(result.df, dimensions)
                 qry = qry.where(top_groups)
@@ -794,8 +862,7 @@ class SqlaTable(Model, BaseDatasource):
         except Exception as e:
             status = utils.QueryStatus.FAILED
             logging.exception(e)
-            error_message = (
-                self.database.db_engine_spec.extract_error_message(e))
+            error_message = self.database.db_engine_spec.extract_error_message(e)
 
         # if this is a main query with prequeries, combine them together
         if not query_obj['is_prequery']:
@@ -808,7 +875,8 @@ class SqlaTable(Model, BaseDatasource):
             df=df,
             duration=datetime.now() - qry_start_dttm,
             query=sql,
-            error_message=error_message)
+            error_message=error_message,
+        )
 
     def get_sqla_table_object(self):
         return self.database.get_table(self.table_name, schema=self.schema)
@@ -819,9 +887,12 @@ class SqlaTable(Model, BaseDatasource):
             table = self.get_sqla_table_object()
         except Exception as e:
             logging.exception(e)
-            raise Exception(_(
-                "Table [{}] doesn't seem to exist in the specified database, "
-                "couldn't fetch column information").format(self.table_name))
+            raise Exception(
+                _(
+                    "Table [{}] doesn't seem to exist in the specified database, "
+                    "couldn't fetch column information"
+                ).format(self.table_name)
+            )
 
         M = SqlMetric  # noqa
         metrics = []
@@ -830,8 +901,8 @@ class SqlaTable(Model, BaseDatasource):
         dbcols = (
             db.session.query(TableColumn)
             .filter(TableColumn.table == self)
-            .filter(or_(TableColumn.column_name == col.name
-                        for col in table.columns)))
+            .filter(or_(TableColumn.column_name == col.name for col in table.columns))
+        )
         dbcols = {dbcol.column_name: dbcol for dbcol in dbcols}
         db_engine_spec = self.database.db_engine_spec
 
@@ -840,8 +911,7 @@ class SqlaTable(Model, BaseDatasource):
                 datatype = col.type.compile(dialect=db_dialect).upper()
             except Exception as e:
                 datatype = 'UNKNOWN'
-                logging.error(
-                    'Unrecognized data type in {}.{}'.format(table, col.name))
+                logging.error('Unrecognized data type in {}.{}'.format(table, col.name))
                 logging.exception(e)
             dbcol = dbcols.get(col.name, None)
             if not dbcol:
@@ -858,17 +928,20 @@ class SqlaTable(Model, BaseDatasource):
                 any_date_col = col.name
             metrics += dbcol.get_metrics().values()
 
-        metrics.append(M(
-            metric_name='count',
-            verbose_name='COUNT(*)',
-            metric_type='count',
-            expression='COUNT(*)',
-        ))
+        metrics.append(
+            M(
+                metric_name='count',
+                verbose_name='COUNT(*)',
+                metric_type='count',
+                expression='COUNT(*)',
+            )
+        )
         if not self.main_dttm_col:
             self.main_dttm_col = any_date_col
         for metric in metrics:
             metric.metric_name = db_engine_spec.mutate_expression_label(
-                metric.metric_name)
+                metric.metric_name
+            )
         self.add_missing_metrics(metrics)
         db.session.merge(self)
         db.session.commit()
@@ -881,23 +954,32 @@ class SqlaTable(Model, BaseDatasource):
          This function can be used to import/export dashboards between multiple
          superset instances. Audit metadata isn't copies over.
         """
+
         def lookup_sqlatable(table):
-            return db.session.query(SqlaTable).join(Database).filter(
-                SqlaTable.table_name == table.table_name,
-                SqlaTable.schema == table.schema,
-                Database.id == table.database_id,
-            ).first()
+            return (
+                db.session.query(SqlaTable)
+                .join(Database)
+                .filter(
+                    SqlaTable.table_name == table.table_name,
+                    SqlaTable.schema == table.schema,
+                    Database.id == table.database_id,
+                )
+                .first()
+            )
 
         def lookup_database(table):
-            return db.session.query(Database).filter_by(
-                database_name=table.params_dict['database_name']).one()
+            return (
+                db.session.query(Database)
+                .filter_by(database_name=table.params_dict['database_name'])
+                .one()
+            )
+
         return import_datasource.import_datasource(
-            db.session, i_datasource, lookup_database, lookup_sqlatable,
-            import_time)
+            db.session, i_datasource, lookup_database, lookup_sqlatable, import_time
+        )
 
     @classmethod
-    def query_datasources_by_name(
-            cls, session, database, datasource_name, schema=None):
+    def query_datasources_by_name(cls, session, database, datasource_name, schema=None):
         query = (
             session.query(cls)
             .filter_by(database_id=database.id)
diff --git a/superset/connectors/sqla/views.py b/superset/connectors/sqla/views.py
index c085958..0b0bbaf 100644
--- a/superset/connectors/sqla/views.py
+++ b/superset/connectors/sqla/views.py
@@ -13,8 +13,12 @@ from superset import appbuilder, db, security_manager
 from superset.connectors.base.views import DatasourceModelView
 from superset.utils import core as utils
 from superset.views.base import (
-    DatasourceFilter, DeleteMixin, get_datasource_exist_error_msg,
-    ListWidgetWithCheckboxes, SupersetModelView, YamlExportMixin,
+    DatasourceFilter,
+    DeleteMixin,
+    get_datasource_exist_error_msg,
+    ListWidgetWithCheckboxes,
+    SupersetModelView,
+    YamlExportMixin,
 )
 from . import models
 
@@ -30,40 +34,62 @@ class TableColumnInlineView(CompactCRUDMixin, SupersetModelView):  # noqa
     can_delete = False
     list_widget = ListWidgetWithCheckboxes
     edit_columns = [
-        'column_name', 'verbose_name', 'description',
-        'type', 'groupby', 'filterable',
-        'table', 'expression',
-        'is_dttm', 'python_date_format', 'database_expression']
+        'column_name',
+        'verbose_name',
+        'description',
+        'type',
+        'groupby',
+        'filterable',
+        'table',
+        'expression',
+        'is_dttm',
+        'python_date_format',
+        'database_expression',
+    ]
     add_columns = edit_columns
     list_columns = [
-        'column_name', 'verbose_name', 'type', 'groupby', 'filterable',
-        'is_dttm']
+        'column_name',
+        'verbose_name',
+        'type',
+        'groupby',
+        'filterable',
+        'is_dttm',
+    ]
     page_size = 500
     description_columns = {
         'is_dttm': _(
             'Whether to make this column available as a '
             '[Time Granularity] option, column has to be DATETIME or '
-            'DATETIME-like'),
+            'DATETIME-like'
+        ),
         'filterable': _(
             'Whether this column is exposed in the `Filters` section '
-            'of the explore view.'),
+            'of the explore view.'
+        ),
         'type': _(
             'The data type that was inferred by the database. '
             'It may be necessary to input a type manually for '
             'expression-defined columns in some cases. In most case '
-            'users should not need to alter this.'),
+            'users should not need to alter this.'
+        ),
         'expression': utils.markdown(
             'a valid, *non-aggregating* SQL expression as supported by the '
-            'underlying backend. Example: `substr(name, 1, 1)`', True),
-        'python_date_format': utils.markdown(Markup(
-            'The pattern of timestamp format, use '
-            '<a href="https://docs.python.org/2/library/'
-            'datetime.html#strftime-strptime-behavior">'
-            'python datetime string pattern</a> '
-            'expression. If time is stored in epoch '
-            'format, put `epoch_s` or `epoch_ms`. Leave `Database Expression` '
-            'below empty if timestamp is stored in '
-            'String or Integer(epoch) type'), True),
+            'underlying backend. Example: `substr(name, 1, 1)`',
+            True,
+        ),
+        'python_date_format': utils.markdown(
+            Markup(
+                'The pattern of timestamp format, use '
+                '<a href="https://docs.python.org/2/library/'
+                'datetime.html#strftime-strptime-behavior">'
+                'python datetime string pattern</a> '
+                'expression. If time is stored in epoch '
+                'format, put `epoch_s` or `epoch_ms`. Leave `Database Expression` '
+                'below empty if timestamp is stored in '
+                'String or Integer(epoch) type'
+            ),
+            True,
+        ),
         'database_expression': utils.markdown(
             'The database expression to cast internal datetime '
             'constants to database date/timestamp type according to the DBAPI. '
@@ -72,7 +98,9 @@ class TableColumnInlineView(CompactCRUDMixin, SupersetModelView):  # noqa
             'The string should be a python string formatter \n'
             "`Ex: TO_DATE('{}', 'YYYY-MM-DD HH24:MI:SS')` for Oracle "
             'Superset uses default expression based on DB URI if this '
-            'field is blank.', True),
+            'field is blank.',
+            True,
+        ),
     }
     label_columns = {
         'column_name': _('Column'),
@@ -102,22 +130,35 @@ class SqlMetricInlineView(CompactCRUDMixin, SupersetModelView):  # noqa
 
     list_columns = ['metric_name', 'verbose_name', 'metric_type']
     edit_columns = [
-        'metric_name', 'description', 'verbose_name', 'metric_type',
-        'expression', 'table', 'd3format', 'is_restricted', 'warning_text']
+        'metric_name',
+        'description',
+        'verbose_name',
+        'metric_type',
+        'expression',
+        'table',
+        'd3format',
+        'is_restricted',
+        'warning_text',
+    ]
     description_columns = {
         'expression': utils.markdown(
             'a valid, *aggregating* SQL expression as supported by the '
-            'underlying backend. Example: `count(DISTINCT userid)`', True),
-        'is_restricted': _('Whether access to this metric is restricted '
-                           'to certain roles. Only roles with the permission '
-                           "'metric access on XXX (the name of this metric)' "
-                           'are allowed to access this metric'),
+            'underlying backend. Example: `count(DISTINCT userid)`',
+            True,
+        ),
+        'is_restricted': _(
+            'Whether access to this metric is restricted '
+            'to certain roles. Only roles with the permission '
+            "'metric access on XXX (the name of this metric)' "
+            'are allowed to access this metric'
+        ),
         'd3format': utils.markdown(
             'd3 formatting string as defined [here]'
             '(https://github.com/d3/d3-format/blob/master/README.md#format). '
             'For instance, this default formatting applies in the Table '
             'visualization and allow for different metric to use different '
-            'formats', True,
+            'formats',
+            True,
         ),
     }
     add_columns = edit_columns
@@ -154,25 +195,30 @@ class TableModelView(DatasourceModelView, DeleteMixin, YamlExportMixin):  # noqa
     add_title = _('Import a table definition')
     edit_title = _('Edit Table')
 
-    list_columns = [
-        'link', 'database_name',
-        'changed_by_', 'modified']
+    list_columns = ['link', 'database_name', 'changed_by_', 'modified']
     order_columns = ['modified']
     add_columns = ['database', 'schema', 'table_name']
     edit_columns = [
-        'table_name', 'sql', 'filter_select_enabled',
-        'fetch_values_predicate', 'database', 'schema',
-        'description', 'owner',
-        'main_dttm_col', 'default_endpoint', 'offset', 'cache_timeout',
-        'is_sqllab_view', 'template_params',
+        'table_name',
+        'sql',
+        'filter_select_enabled',
+        'fetch_values_predicate',
+        'database',
+        'schema',
+        'description',
+        'owner',
+        'main_dttm_col',
+        'default_endpoint',
+        'offset',
+        'cache_timeout',
+        'is_sqllab_view',
+        'template_params',
     ]
     base_filters = [['id', DatasourceFilter, lambda: []]]
     show_columns = edit_columns + ['perm', 'slices']
     related_views = [TableColumnInlineView, SqlMetricInlineView]
     base_order = ('changed_on', 'desc')
-    search_columns = (
-        'database', 'schema', 'table_name', 'owner', 'is_sqllab_view',
-    )
+    search_columns = ('database', 'schema', 'table_name', 'owner', 'is_sqllab_view')
     description_columns = {
         'slices': _(
             'The list of charts associated with this table. By '
@@ -181,43 +227,48 @@ class TableModelView(DatasourceModelView, DeleteMixin, YamlExportMixin):  # noqa
             'Also note that charts need to point to a datasource, so '
             'this form will fail at saving if removing charts from a '
             'datasource. If you want to change the datasource for a chart, '
-            "overwrite the chart from the 'explore view'"),
+            "overwrite the chart from the 'explore view'"
+        ),
         'offset': _('Timezone offset (in hours) for this datasource'),
-        'table_name': _(
-            'Name of the table that exists in the source database'),
+        'table_name': _('Name of the table that exists in the source database'),
         'schema': _(
-            'Schema, as used only in some databases like Postgres, Redshift '
-            'and DB2'),
+            'Schema, as used only in some databases like Postgres, Redshift ' 'and DB2'
+        ),
         'description': Markup(
             'Supports <a href="https://daringfireball.net/projects/markdown/">'
-            'markdown</a>'),
+            'markdown</a>'
+        ),
         'sql': _(
             'This fields acts a Superset view, meaning that Superset will '
-            'run a query against this string as a subquery.',
+            'run a query against this string as a subquery.'
         ),
         'fetch_values_predicate': _(
             'Predicate applied when fetching distinct value to '
             'populate the filter control component. Supports '
             'jinja template syntax. Applies only when '
-            '`Enable Filter Select` is on.',
+            '`Enable Filter Select` is on.'
         ),
         'default_endpoint': _(
             'Redirects to this endpoint when clicking on the table '
-            'from the table list'),
+            'from the table list'
+        ),
         'filter_select_enabled': _(
             "Whether to populate the filter's dropdown in the explore "
             "view's filter section with a list of distinct values fetched "
-            'from the backend on the fly'),
+            'from the backend on the fly'
+        ),
         'is_sqllab_view': _(
-            "Whether the table was generated by the 'Visualize' flow "
-            'in SQL Lab'),
+            "Whether the table was generated by the 'Visualize' flow " 'in SQL Lab'
+        ),
         'template_params': _(
             'A set of parameters that become available in the query using '
-            'Jinja templating syntax'),
+            'Jinja templating syntax'
+        ),
         'cache_timeout': _(
             'Duration (in seconds) of the caching timeout for this table. '
             'A timeout of 0 indicates that the cache never expires. '
-            'Note this defaults to the database timeout if undefined.'),
+            'Note this defaults to the database timeout if undefined.'
+        ),
     }
     label_columns = {
         'slices': _('Associated Charts'),
@@ -246,20 +297,23 @@ class TableModelView(DatasourceModelView, DeleteMixin, YamlExportMixin):  # noqa
             table_query = db.session.query(models.SqlaTable).filter(
                 models.SqlaTable.table_name == table.table_name,
                 models.SqlaTable.schema == table.schema,
-                models.SqlaTable.database_id == table.database.id)
+                models.SqlaTable.database_id == table.database.id,
+            )
             if db.session.query(table_query.exists()).scalar():
-                raise Exception(
-                    get_datasource_exist_error_msg(table.full_name))
+                raise Exception(get_datasource_exist_error_msg(table.full_name))
 
         # Fail before adding if the table can't be found
         try:
             table.get_sqla_table_object()
         except Exception:
-            raise Exception(_(
-                'Table [{}] could not be found, '
-                'please double check your '
-                'database connection, schema, and '
-                'table name').format(table.name))
+            raise Exception(
+                _(
+                    'Table [{}] could not be found, '
+                    'please double check your '
+                    'database connection, schema, and '
+                    'table name'
+                ).format(table.name)
+            )
 
     def post_add(self, table, flash_message=True):
         table.fetch_metadata()
@@ -268,11 +322,15 @@ class TableModelView(DatasourceModelView, DeleteMixin, YamlExportMixin):  # noqa
             security_manager.merge_perm('schema_access', table.schema_perm)
 
         if flash_message:
-            flash(_(
-                'The table was created. '
-                'As part of this two phase configuration '
-                'process, you should now click the edit button by '
-                'the new table to configure it.'), 'info')
+            flash(
+                _(
+                    'The table was created. '
+                    'As part of this two phase configuration '
+                    'process, you should now click the edit button by '
+                    'the new table to configure it.'
+                ),
+                'info',
+            )
 
     def post_update(self, table):
         self.post_add(table, flash_message=False)
@@ -290,10 +348,8 @@ class TableModelView(DatasourceModelView, DeleteMixin, YamlExportMixin):  # noqa
         return redirect('/superset/explore/table/{}/'.format(pk))
 
     @action(
-        'refresh',
-        __('Refresh Metadata'),
-        __('Refresh column metadata'),
-        'fa-refresh')
+        'refresh', __('Refresh Metadata'), __('Refresh column metadata'), 'fa-refresh'
+    )
     def refresh(self, tables):
         if not isinstance(tables, list):
             tables = [tables]
@@ -309,12 +365,14 @@ class TableModelView(DatasourceModelView, DeleteMixin, YamlExportMixin):  # noqa
         if len(successes) > 0:
             success_msg = _(
                 'Metadata refreshed for the following table(s): %(tables)s',
-                tables=', '.join([t.table_name for t in successes]))
+                tables=', '.join([t.table_name for t in successes]),
+            )
             flash(success_msg, 'info')
         if len(failures) > 0:
             failure_msg = _(
                 'Unable to retrieve metadata for the following table(s): %(tables)s',
-                tables=', '.join([t.table_name for t in failures]))
+                tables=', '.join([t.table_name for t in failures]),
+            )
             flash(failure_msg, 'danger')
 
         return redirect('/tablemodelview/list/')
@@ -328,6 +386,7 @@ appbuilder.add_link(
     icon='fa-table',
     category='Sources',
     category_label=__('Sources'),
-    category_icon='fa-table')
+    category_icon='fa-table',
+)
 
 appbuilder.add_separator('Sources')
diff --git a/superset/data/bart_lines.py b/superset/data/bart_lines.py
index 8ae8cf4..428cbf4 100644
--- a/superset/data/bart_lines.py
+++ b/superset/data/bart_lines.py
@@ -29,7 +29,8 @@ def load_bart_lines():
             'polyline': Text,
             'path_json': Text,
         },
-        index=False)
+        index=False,
+    )
     print('Creating table {} reference'.format(tbl_name))
     tbl = db.session.query(TBL).filter_by(table_name=tbl_name).first()
     if not tbl:
diff --git a/superset/data/birth_names.py b/superset/data/birth_names.py
index b697c31..1fb3bf5 100644
--- a/superset/data/birth_names.py
+++ b/superset/data/birth_names.py
@@ -37,7 +37,8 @@ def load_birth_names():
             'state': String(10),
             'name': String(255),
         },
-        index=False)
+        index=False,
+    )
     print('Done loading table!')
     print('-' * 80)
 
@@ -50,10 +51,12 @@ def load_birth_names():
     obj.filter_select_enabled = True
 
     if not any(col.column_name == 'num_california' for col in obj.columns):
-        obj.columns.append(TableColumn(
-            column_name='num_california',
-            expression="CASE WHEN state = 'CA' THEN num ELSE 0 END",
-        ))
+        obj.columns.append(
+            TableColumn(
+                column_name='num_california',
+                expression="CASE WHEN state = 'CA' THEN num ELSE 0 END",
+            )
+        )
 
     db.session.merge(obj)
     db.session.commit()
@@ -86,13 +89,11 @@ def load_birth_names():
             params=get_slice_json(
                 defaults,
                 groupby=['name'],
-                filters=[{
-                    'col': 'gender',
-                    'op': 'in',
-                    'val': ['girl'],
-                }],
+                filters=[{'col': 'gender', 'op': 'in', 'val': ['girl']}],
                 row_limit=50,
-                timeseries_limit_metric='sum__num')),
+                timeseries_limit_metric='sum__num',
+            ),
+        ),
         Slice(
             slice_name='Boys',
             viz_type='table',
@@ -101,12 +102,10 @@ def load_birth_names():
             params=get_slice_json(
                 defaults,
                 groupby=['name'],
-                filters=[{
-                    'col': 'gender',
-                    'op': 'in',
-                    'val': ['boy'],
-                }],
-                row_limit=50)),
+                filters=[{'col': 'gender', 'op': 'in', 'val': ['boy']}],
+                row_limit=50,
+            ),
+        ),
         Slice(
             slice_name='Participants',
             viz_type='big_number',
@@ -114,16 +113,19 @@ def load_birth_names():
             datasource_id=tbl.id,
             params=get_slice_json(
                 defaults,
-                viz_type='big_number', granularity_sqla='ds',
-                compare_lag='5', compare_suffix='over 5Y')),
+                viz_type='big_number',
+                granularity_sqla='ds',
+                compare_lag='5',
+                compare_suffix='over 5Y',
+            ),
+        ),
         Slice(
             slice_name='Genders',
             viz_type='pie',
             datasource_type='table',
             datasource_id=tbl.id,
-            params=get_slice_json(
-                defaults,
-                viz_type='pie', groupby=['gender'])),
+            params=get_slice_json(defaults, viz_type='pie', groupby=['gender']),
+        ),
         Slice(
             slice_name='Genders by State',
             viz_type='dist_bar',
@@ -139,32 +141,28 @@ def load_birth_names():
                         'comparator': ['other'],
                         'operator': 'not in',
                         'subject': 'state',
-                    },
+                    }
                 ],
                 viz_type='dist_bar',
                 metrics=[
                     {
                         'expressionType': 'SIMPLE',
-                        'column': {
-                            'column_name': 'sum_boys',
-                            'type': 'BIGINT(20)',
-                        },
+                        'column': {'column_name': 'sum_boys', 'type': 'BIGINT(20)'},
                         'aggregate': 'SUM',
                         'label': 'Boys',
                         'optionName': 'metric_11',
                     },
                     {
                         'expressionType': 'SIMPLE',
-                        'column': {
-                            'column_name': 'sum_girls',
-                            'type': 'BIGINT(20)',
-                        },
+                        'column': {'column_name': 'sum_girls', 'type': 'BIGINT(20)'},
                         'aggregate': 'SUM',
                         'label': 'Girls',
                         'optionName': 'metric_12',
                     },
                 ],
-                groupby=['state'])),
+                groupby=['state'],
+            ),
+        ),
         Slice(
             slice_name='Trends',
             viz_type='line',
@@ -172,8 +170,13 @@ def load_birth_names():
             datasource_id=tbl.id,
             params=get_slice_json(
                 defaults,
-                viz_type='line', groupby=['name'],
-                granularity_sqla='ds', rich_tooltip=True, show_legend=True)),
+                viz_type='line',
+                groupby=['name'],
+                granularity_sqla='ds',
+                rich_tooltip=True,
+                show_legend=True,
+            ),
+        ),
         Slice(
             slice_name='Average and Sum Trends',
             viz_type='dual_line',
@@ -184,16 +187,15 @@ def load_birth_names():
                 viz_type='dual_line',
                 metric={
                     'expressionType': 'SIMPLE',
-                    'column': {
-                        'column_name': 'num',
-                        'type': 'BIGINT(20)',
-                    },
+                    'column': {'column_name': 'num', 'type': 'BIGINT(20)'},
                     'aggregate': 'AVG',
                     'label': 'AVG(num)',
                     'optionName': 'metric_vgops097wej_g8uff99zhk7',
                 },
                 metric_2='sum__num',
-                granularity_sqla='ds')),
+                granularity_sqla='ds',
+            ),
+        ),
         Slice(
             slice_name='Title',
             viz_type='markup',
@@ -201,7 +203,8 @@ def load_birth_names():
             datasource_id=tbl.id,
             params=get_slice_json(
                 defaults,
-                viz_type='markup', markup_type='html',
+                viz_type='markup',
+                markup_type='html',
                 code="""\
     <div style='text-align:center'>
         <h1>Birth Names Dashboard</h1>
@@ -211,7 +214,9 @@ def load_birth_names():
         </p>
         <img src='/static/assets/images/babytux.jpg'>
     </div>
-    """)),
+    """,
+            ),
+        ),
         Slice(
             slice_name='Name Cloud',
             viz_type='word_cloud',
@@ -219,9 +224,14 @@ def load_birth_names():
             datasource_id=tbl.id,
             params=get_slice_json(
                 defaults,
-                viz_type='word_cloud', size_from='10',
-                series='name', size_to='70', rotation='square',
-                limit='100')),
+                viz_type='word_cloud',
+                size_from='10',
+                series='name',
+                size_to='70',
+                rotation='square',
+                limit='100',
+            ),
+        ),
         Slice(
             slice_name='Pivot Table',
             viz_type='pivot_table',
@@ -229,8 +239,12 @@ def load_birth_names():
             datasource_id=tbl.id,
             params=get_slice_json(
                 defaults,
-                viz_type='pivot_table', metrics=['sum__num'],
-                groupby=['name'], columns=['state'])),
+                viz_type='pivot_table',
+                metrics=['sum__num'],
+                groupby=['name'],
+                columns=['state'],
+            ),
+        ),
         Slice(
             slice_name='Number of Girls',
             viz_type='big_number_total',
@@ -238,13 +252,12 @@ def load_birth_names():
             datasource_id=tbl.id,
             params=get_slice_json(
                 defaults,
-                viz_type='big_number_total', granularity_sqla='ds',
-                filters=[{
-                    'col': 'gender',
-                    'op': 'in',
-                    'val': ['girl'],
-                }],
-                subheader='total female participants')),
+                viz_type='big_number_total',
+                granularity_sqla='ds',
+                filters=[{'col': 'gender', 'op': 'in', 'val': ['girl']}],
+                subheader='total female participants',
+            ),
+        ),
         Slice(
             slice_name='Number of California Births',
             viz_type='big_number_total',
@@ -262,7 +275,9 @@ def load_birth_names():
                     'label': 'SUM(num_california)',
                 },
                 viz_type='big_number_total',
-                granularity_sqla='ds')),
+                granularity_sqla='ds',
+            ),
+        ),
         Slice(
             slice_name='Top 10 California Names Timeseries',
             viz_type='line',
@@ -270,15 +285,17 @@ def load_birth_names():
             datasource_id=tbl.id,
             params=get_slice_json(
                 defaults,
-                metrics=[{
-                    'expressionType': 'SIMPLE',
-                    'column': {
-                        'column_name': 'num_california',
-                        'expression': "CASE WHEN state = 'CA' THEN num ELSE 0 END",
-                    },
-                    'aggregate': 'SUM',
-                    'label': 'SUM(num_california)',
-                }],
+                metrics=[
+                    {
+                        'expressionType': 'SIMPLE',
+                        'column': {
+                            'column_name': 'num_california',
+                            'expression': "CASE WHEN state = 'CA' THEN num ELSE 0 END",
+                        },
+                        'aggregate': 'SUM',
+                        'label': 'SUM(num_california)',
+                    }
+                ],
                 viz_type='line',
                 granularity_sqla='ds',
                 groupby=['name'],
@@ -291,7 +308,9 @@ def load_birth_names():
                     'aggregate': 'SUM',
                     'label': 'SUM(num_california)',
                 },
-                limit='10')),
+                limit='10',
+            ),
+        ),
         Slice(
             slice_name='Names Sorted by Num in California',
             viz_type='table',
@@ -309,15 +328,16 @@ def load_birth_names():
                     },
                     'aggregate': 'SUM',
                     'label': 'SUM(num_california)',
-                })),
+                },
+            ),
+        ),
         Slice(
             slice_name='Num Births Trend',
             viz_type='line',
             datasource_type='table',
             datasource_id=tbl.id,
-            params=get_slice_json(
-                defaults,
-                viz_type='line')),
+            params=get_slice_json(defaults, viz_type='line'),
+        ),
     ]
     for slc in slices:
         merge_slice(slc)
@@ -327,7 +347,8 @@ def load_birth_names():
 
     if not dash:
         dash = Dash()
-    js = textwrap.dedent("""\
+    js = textwrap.dedent(
+        """\
 {
     "CHART-0dd270f0": {
         "meta": {
@@ -576,7 +597,8 @@ def load_birth_names():
     },
     "DASHBOARD_VERSION_KEY": "v2"
 }
-        """)
+        """
+    )
     pos = json.loads(js)
     # dashboard v2 doesn't allow add markup slice
     dash.slices = [slc for slc in slices if slc.viz_type != 'markup']
diff --git a/superset/data/countries.py b/superset/data/countries.py
index c0dd8d6..b43d190 100644
--- a/superset/data/countries.py
+++ b/superset/data/countries.py
@@ -8,7 +8,7 @@ countries = [
         "capital": "Luanda",
         "lat": -12.5,
         "lng": 18.5,
-        "cca3": "AGO"
+        "cca3": "AGO",
     },
     {
         "name": "Algeria",
@@ -18,7 +18,7 @@ countries = [
         "capital": "Algiers",
         "lat": 28,
         "lng": 3,
-        "cca3": "DZA"
+        "cca3": "DZA",
     },
     {
         "name": "Egypt",
@@ -28,7 +28,7 @@ countries = [
         "capital": "Cairo",
         "lat": 27,
         "lng": 30,
-        "cca3": "EGY"
+        "cca3": "EGY",
     },
     {
         "name": "Bangladesh",
@@ -38,7 +38,7 @@ countries = [
         "capital": "Dhaka",
         "lat": 24,
         "lng": 90,
-        "cca3": "BGD"
+        "cca3": "BGD",
     },
     {
         "name": "Niger",
@@ -48,7 +48,7 @@ countries = [
         "capital": "Niamey",
         "lat": 16,
         "lng": 8,
-        "cca3": "NER"
+        "cca3": "NER",
     },
     {
         "name": "Liechtenstein",
@@ -58,7 +58,7 @@ countries = [
         "capital": "Vaduz",
         "lat": 47.26666666,
         "lng": 9.53333333,
-        "cca3": "LIE"
+        "cca3": "LIE",
     },
     {
         "name": "Namibia",
@@ -68,7 +68,7 @@ countries = [
         "capital": "Windhoek",
         "lat": -22,
         "lng": 17,
-        "cca3": "NAM"
+        "cca3": "NAM",
     },
     {
         "name": "Bulgaria",
@@ -78,7 +78,7 @@ countries = [
         "capital": "Sofia",
         "lat": 43,
         "lng": 25,
-        "cca3": "BGR"
+        "cca3": "BGR",
     },
     {
         "name": "Bolivia",
@@ -88,7 +88,7 @@ countries = [
         "capital": "Sucre",
         "lat": -17,
         "lng": -65,
-        "cca3": "BOL"
+        "cca3": "BOL",
     },
     {
         "name": "Ghana",
@@ -98,7 +98,7 @@ countries = [
         "capital": "Accra",
         "lat": 8,
         "lng": -2,
-        "cca3": "GHA"
+        "cca3": "GHA",
     },
     {
         "name": "Cocos (Keeling) Islands",
@@ -108,7 +108,7 @@ countries = [
         "capital": "West Island",
         "lat": -12.5,
         "lng": 96.83333333,
-        "cca3": "CCK"
+        "cca3": "CCK",
     },
     {
         "name": "Pakistan",
@@ -118,7 +118,7 @@ countries = [
         "capital": "Islamabad",
         "lat": 30,
         "lng": 70,
-        "cca3": "PAK"
+        "cca3": "PAK",
     },
     {
         "name": "Cape Verde",
@@ -128,7 +128,7 @@ countries = [
         "capital": "Praia",
         "lat": 16,
         "lng": -24,
-        "cca3": "CPV"
+        "cca3": "CPV",
     },
     {
         "name": "Jordan",
@@ -138,7 +138,7 @@ countries = [
         "capital": "Amman",
         "lat": 31,
         "lng": 36,
-        "cca3": "JOR"
+        "cca3": "JOR",
     },
     {
         "name": "Liberia",
@@ -148,7 +148,7 @@ countries = [
         "capital": "Monrovia",
         "lat": 6.5,
         "lng": -9.5,
-        "cca3": "LBR"
+        "cca3": "LBR",
     },
     {
         "name": "Libya",
@@ -158,7 +158,7 @@ countries = [
         "capital": "Tripoli",
         "lat": 25,
         "lng": 17,
-        "cca3": "LBY"
+        "cca3": "LBY",
     },
     {
         "name": "Malaysia",
@@ -168,7 +168,7 @@ countries = [
         "capital": "Kuala Lumpur",
         "lat": 2.5,
         "lng": 112.5,
-        "cca3": "MYS"
+        "cca3": "MYS",
     },
     {
         "name": "Dominican Republic",
@@ -178,7 +178,7 @@ countries = [
         "capital": "Santo Domingo",
         "lat": 19,
         "lng": -70.66666666,
-        "cca3": "DOM"
+        "cca3": "DOM",
     },
     {
         "name": "Puerto Rico",
@@ -188,7 +188,7 @@ countries = [
         "capital": "San Juan",
         "lat": 18.25,
         "lng": -66.5,
-        "cca3": "PRI"
+        "cca3": "PRI",
     },
     {
         "name": "Mayotte",
@@ -198,7 +198,7 @@ countries = [
         "capital": "Mamoudzou",
         "lat": -12.83333333,
         "lng": 45.16666666,
-        "cca3": "MYT"
+        "cca3": "MYT",
     },
     {
         "name": "North Korea",
@@ -208,7 +208,7 @@ countries = [
         "capital": "Pyongyang",
         "lat": 40,
         "lng": 127,
-        "cca3": "PRK"
+        "cca3": "PRK",
     },
     {
         "name": "Palestine",
@@ -218,7 +218,7 @@ countries = [
         "capital": "Ramallah",
         "lat": 31.9,
         "lng": 35.2,
-        "cca3": "PSE"
+        "cca3": "PSE",
     },
     {
         "name": "Tanzania",
@@ -228,7 +228,7 @@ countries = [
         "capital": "Dodoma",
         "lat": -6,
         "lng": 35,
-        "cca3": "TZA"
+        "cca3": "TZA",
     },
     {
         "name": "Botswana",
@@ -238,7 +238,7 @@ countries = [
         "capital": "Gaborone",
         "lat": -22,
         "lng": 24,
-        "cca3": "BWA"
+        "cca3": "BWA",
     },
     {
         "name": "Cambodia",
@@ -248,7 +248,7 @@ countries = [
         "capital": "Phnom Penh",
         "lat": 13,
         "lng": 105,
-        "cca3": "KHM"
+        "cca3": "KHM",
     },
     {
         "name": "Nicaragua",
@@ -258,7 +258,7 @@ countries = [
         "capital": "Managua",
         "lat": 13,
         "lng": -85,
-        "cca3": "NIC"
+        "cca3": "NIC",
     },
     {
         "name": "Trinidad and Tobago",
@@ -268,7 +268,7 @@ countries = [
         "capital": "Port of Spain",
         "lat": 11,
         "lng": -61,
-        "cca3": "TTO"
+        "cca3": "TTO",
     },
     {
         "name": "Ethiopia",
@@ -278,7 +278,7 @@ countries = [
         "capital": "Addis Ababa",
         "lat": 8,
         "lng": 38,
-        "cca3": "ETH"
+        "cca3": "ETH",
     },
     {
         "name": "Paraguay",
@@ -288,7 +288,7 @@ countries = [
         "capital": "Asuncion",
         "lat": -23,
         "lng": -58,
-        "cca3": "PRY"
+        "cca3": "PRY",
     },
     {
         "name": "Hong Kong",
@@ -298,7 +298,7 @@ countries = [
         "capital": "City of Victoria",
         "lat": 22.267,
         "lng": 114.188,
-        "cca3": "HKG"
+        "cca3": "HKG",
     },
     {
         "name": "Saudi Arabia",
@@ -308,7 +308,7 @@ countries = [
         "capital": "Riyadh",
         "lat": 25,
         "lng": 45,
-        "cca3": "SAU"
+        "cca3": "SAU",
     },
     {
         "name": "Lebanon",
@@ -318,7 +318,7 @@ countries = [
         "capital": "Beirut",
         "lat": 33.83333333,
         "lng": 35.83333333,
-        "cca3": "LBN"
+        "cca3": "LBN",
     },
     {
         "name": "Slovenia",
@@ -328,7 +328,7 @@ countries = [
         "capital": "Ljubljana",
         "lat": 46.11666666,
         "lng": 14.81666666,
-        "cca3": "SVN"
+        "cca3": "SVN",
     },
     {
         "name": "Burkina Faso",
@@ -338,7 +338,7 @@ countries = [
         "capital": "Ouagadougou",
         "lat": 13,
         "lng": -2,
-        "cca3": "BFA"
+        "cca3": "BFA",
     },
     {
         "name": "Switzerland",
@@ -348,7 +348,7 @@ countries = [
         "capital": "Bern",
         "lat": 47,
         "lng": 8,
-        "cca3": "CHE"
+        "cca3": "CHE",
     },
     {
         "name": "Mauritania",
@@ -358,7 +358,7 @@ countries = [
         "capital": "Nouakchott",
         "lat": 20,
         "lng": -12,
-        "cca3": "MRT"
+        "cca3": "MRT",
     },
     {
         "name": "Croatia",
@@ -368,7 +368,7 @@ countries = [
         "capital": "Zagreb",
         "lat": 45.16666666,
         "lng": 15.5,
-        "cca3": "HRV"
+        "cca3": "HRV",
     },
     {
         "name": "Chile",
@@ -378,7 +378,7 @@ countries = [
         "capital": "Santiago",
         "lat": -30,
         "lng": -71,
-        "cca3": "CHL"
+        "cca3": "CHL",
     },
     {
         "name": "China",
@@ -388,7 +388,7 @@ countries = [
         "capital": "Beijing",
         "lat": 35,
         "lng": 105,
-        "cca3": "CHN"
+        "cca3": "CHN",
     },
     {
         "name": "Saint Kitts and Nevis",
@@ -398,7 +398,7 @@ countries = [
         "capital": "Basseterre",
         "lat": 17.33333333,
         "lng": -62.75,
-        "cca3": "KNA"
+        "cca3": "KNA",
     },
     {
         "name": "Sierra Leone",
@@ -408,7 +408,7 @@ countries = [
         "capital": "Freetown",
         "lat": 8.5,
         "lng": -11.5,
-        "cca3": "SLE"
+        "cca3": "SLE",
     },
     {
         "name": "Jamaica",
@@ -418,7 +418,7 @@ countries = [
         "capital": "Kingston",
         "lat": 18.25,
         "lng": -77.5,
-        "cca3": "JAM"
+        "cca3": "JAM",
     },
     {
         "name": "San Marino",
@@ -428,7 +428,7 @@ countries = [
         "capital": "City of San Marino",
         "lat": 43.76666666,
         "lng": 12.41666666,
-        "cca3": "SMR"
+        "cca3": "SMR",
     },
     {
         "name": "Gibraltar",
@@ -438,7 +438,7 @@ countries = [
         "capital": "Gibraltar",
         "lat": 36.13333333,
         "lng": -5.35,
-        "cca3": "GIB"
+        "cca3": "GIB",
     },
     {
         "name": "Djibouti",
@@ -448,7 +448,7 @@ countries = [
         "capital": "Djibouti",
         "lat": 11.5,
         "lng": 43,
-        "cca3": "DJI"
+        "cca3": "DJI",
     },
     {
         "name": "Guinea",
@@ -458,7 +458,7 @@ countries = [
         "capital": "Conakry",
         "lat": 11,
         "lng": -10,
-        "cca3": "GIN"
+        "cca3": "GIN",
     },
     {
         "name": "Finland",
@@ -468,7 +468,7 @@ countries = [
         "capital": "Helsinki",
         "lat": 64,
         "lng": 26,
-        "cca3": "FIN"
+        "cca3": "FIN",
     },
     {
         "name": "Uruguay",
@@ -478,7 +478,7 @@ countries = [
         "capital": "Montevideo",
         "lat": -33,
         "lng": -56,
-        "cca3": "URY"
+        "cca3": "URY",
     },
     {
         "name": "Thailand",
@@ -488,7 +488,7 @@ countries = [
         "capital": "Bangkok",
         "lat": 15,
         "lng": 100,
-        "cca3": "THA"
+        "cca3": "THA",
     },
     {
         "name": "Sao Tome and Principe",
@@ -498,7 +498,7 @@ countries = [
         "capital": "Sao Tome",
         "lat": 1,
         "lng": 7,
-        "cca3": "STP"
+        "cca3": "STP",
     },
     {
         "name": "Seychelles",
@@ -508,7 +508,7 @@ countries = [
         "capital": "Victoria",
         "lat": -4.58333333,
         "lng": 55.66666666,
-        "cca3": "SYC"
+        "cca3": "SYC",
     },
     {
         "name": "Nepal",
@@ -518,7 +518,7 @@ countries = [
         "capital": "Kathmandu",
         "lat": 28,
         "lng": 84,
-        "cca3": "NPL"
+        "cca3": "NPL",
     },
     {
         "name": "Christmas Island",
@@ -528,7 +528,7 @@ countries = [
         "capital": "Flying Fish Cove",
         "lat": -10.5,
         "lng": 105.66666666,
-        "cca3": "CXR"
+        "cca3": "CXR",
     },
     {
         "name": "Laos",
@@ -538,7 +538,7 @@ countries = [
         "capital": "Vientiane",
         "lat": 18,
         "lng": 105,
-        "cca3": "LAO"
+        "cca3": "LAO",
     },
     {
         "name": "Yemen",
@@ -548,7 +548,7 @@ countries = [
         "capital": "Sana'a",
         "lat": 15,
         "lng": 48,
-        "cca3": "YEM"
+        "cca3": "YEM",
     },
     {
         "name": "Bouvet Island",
@@ -558,7 +558,7 @@ countries = [
         "capital": "",
         "lat": -54.43333333,
         "lng": 3.4,
-        "cca3": "BVT"
+        "cca3": "BVT",
     },
     {
         "name": "South Africa",
@@ -568,7 +568,7 @@ countries = [
         "capital": "Pretoria",
         "lat": -29,
         "lng": 24,
-        "cca3": "ZAF"
+        "cca3": "ZAF",
     },
     {
         "name": "Kiribati",
@@ -578,7 +578,7 @@ countries = [
         "capital": "South Tarawa",
         "lat": 1.41666666,
         "lng": 173,
-        "cca3": "KIR"
+        "cca3": "KIR",
     },
     {
         "name": "Philippines",
@@ -588,7 +588,7 @@ countries = [
         "capital": "Manila",
         "lat": 13,
         "lng": 122,
-        "cca3": "PHL"
+        "cca3": "PHL",
     },
     {
         "name": "Sint Maarten",
@@ -598,7 +598,7 @@ countries = [
         "capital": "Philipsburg",
         "lat": 18.033333,
         "lng": -63.05,
-        "cca3": "SXM"
+        "cca3": "SXM",
     },
     {
         "name": "Romania",
@@ -608,7 +608,7 @@ countries = [
         "capital": "Bucharest",
         "lat": 46,
         "lng": 25,
-        "cca3": "ROU"
+        "cca3": "ROU",
     },
     {
         "name": "United States Virgin Islands",
@@ -618,7 +618,7 @@ countries = [
         "capital": "Charlotte Amalie",
         "lat": 18.35,
         "lng": -64.933333,
-        "cca3": "VIR"
+        "cca3": "VIR",
     },
     {
         "name": "Syria",
@@ -628,7 +628,7 @@ countries = [
         "capital": "Damascus",
         "lat": 35,
         "lng": 38,
-        "cca3": "SYR"
+        "cca3": "SYR",
     },
     {
         "name": "Macau",
@@ -638,7 +638,7 @@ countries = [
         "capital": "",
         "lat": 22.16666666,
         "lng": 113.55,
-        "cca3": "MAC"
+        "cca3": "MAC",
     },
     {
         "name": "Saint Martin",
@@ -648,7 +648,7 @@ countries = [
         "capital": "Marigot",
         "lat": 18.08333333,
         "lng": -63.95,
-        "cca3": "MAF"
+        "cca3": "MAF",
     },
     {
         "name": "Malta",
@@ -658,7 +658,7 @@ countries = [
         "capital": "Valletta",
         "lat": 35.83333333,
         "lng": 14.58333333,
-        "cca3": "MLT"
+        "cca3": "MLT",
     },
     {
         "name": "Kazakhstan",
@@ -668,7 +668,7 @@ countries = [
         "capital": "Astana",
         "lat": 48,
         "lng": 68,
-        "cca3": "KAZ"
+        "cca3": "KAZ",
     },
     {
         "name": "Turks and Caicos Islands",
@@ -678,7 +678,7 @@ countries = [
         "capital": "Cockburn Town",
         "lat": 21.75,
         "lng": -71.58333333,
-        "cca3": "TCA"
+        "cca3": "TCA",
     },
     {
         "name": "French Polynesia",
@@ -688,7 +688,7 @@ countries = [
         "capital": "Papeete",
         "lat": -15,
         "lng": -140,
-        "cca3": "PYF"
+        "cca3": "PYF",
     },
     {
         "name": "Niue",
@@ -698,7 +698,7 @@ countries = [
         "capital": "Alofi",
         "lat": -19.03333333,
         "lng": -169.86666666,
-        "cca3": "NIU"
+        "cca3": "NIU",
     },
     {
         "name": "Dominica",
@@ -708,7 +708,7 @@ countries = [
         "capital": "Roseau",
         "lat": 15.41666666,
         "lng": -61.33333333,
-        "cca3": "DMA"
+        "cca3": "DMA",
     },
     {
         "name": "Benin",
@@ -718,7 +718,7 @@ countries = [
         "capital": "Porto-Novo",
         "lat": 9.5,
         "lng": 2.25,
-        "cca3": "BEN"
+        "cca3": "BEN",
     },
     {
         "name": "French Guiana",
@@ -728,7 +728,7 @@ countries = [
         "capital": "Cayenne",
         "lat": 4,
         "lng": -53,
-        "cca3": "GUF"
+        "cca3": "GUF",
     },
     {
         "name": "Belgium",
@@ -738,7 +738,7 @@ countries = [
         "capital": "Brussels",
         "lat": 50.83333333,
         "lng": 4,
-        "cca3": "BEL"
+        "cca3": "BEL",
     },
     {
         "name": "Montserrat",
@@ -748,7 +748,7 @@ countries = [
         "capital": "Plymouth",
         "lat": 16.75,
         "lng": -62.2,
-        "cca3": "MSR"
+        "cca3": "MSR",
     },
     {
         "name": "Togo",
@@ -758,7 +758,7 @@ countries = [
         "capital": "Lome",
         "lat": 8,
         "lng": 1.16666666,
-        "cca3": "TGO"
+        "cca3": "TGO",
     },
     {
         "name": "Germany",
@@ -768,7 +768,7 @@ countries = [
         "capital": "Berlin",
         "lat": 51,
         "lng": 9,
-        "cca3": "DEU"
+        "cca3": "DEU",
     },
     {
         "name": "Guam",
@@ -778,7 +778,7 @@ countries = [
         "capital": "Hagatna",
         "lat": 13.46666666,
         "lng": 144.78333333,
-        "cca3": "GUM"
+        "cca3": "GUM",
     },
     {
         "name": "Sri Lanka",
@@ -788,7 +788,7 @@ countries = [
         "capital": "Colombo",
         "lat": 7,
         "lng": 81,
-        "cca3": "LKA"
+        "cca3": "LKA",
     },
     {
         "name": "South Sudan",
@@ -798,7 +798,7 @@ countries = [
         "capital": "Juba",
         "lat": 7,
         "lng": 30,
-        "cca3": "SSD"
+        "cca3": "SSD",
     },
     {
         "name": "Falkland Islands",
@@ -808,7 +808,7 @@ countries = [
         "capital": "Stanley",
         "lat": -51.75,
         "lng": -59,
-        "cca3": "FLK"
+        "cca3": "FLK",
     },
     {
         "name": "United Kingdom",
@@ -818,7 +818,7 @@ countries = [
         "capital": "London",
         "lat": 54,
         "lng": -2,
-        "cca3": "GBR"
+        "cca3": "GBR",
     },
     {
         "name": "Guyana",
@@ -828,7 +828,7 @@ countries = [
         "capital": "Georgetown",
         "lat": 5,
         "lng": -59,
-        "cca3": "GUY"
+        "cca3": "GUY",
     },
     {
         "name": "Costa Rica",
@@ -838,7 +838,7 @@ countries = [
         "capital": "San Jose",
         "lat": 10,
         "lng": -84,
-        "cca3": "CRI"
+        "cca3": "CRI",
     },
     {
         "name": "Cameroon",
@@ -848,7 +848,7 @@ countries = [
         "capital": "Yaounde",
         "lat": 6,
         "lng": 12,
-        "cca3": "CMR"
+        "cca3": "CMR",
     },
     {
         "name": "Morocco",
@@ -858,7 +858,7 @@ countries = [
         "capital": "Rabat",
         "lat": 32,
         "lng": -5,
-        "cca3": "MAR"
+        "cca3": "MAR",
     },
     {
         "name": "Northern Mariana Islands",
@@ -868,7 +868,7 @@ countries = [
         "capital": "Saipan",
         "lat": 15.2,
         "lng": 145.75,
-        "cca3": "MNP"
+        "cca3": "MNP",
     },
     {
         "name": "Lesotho",
@@ -878,7 +878,7 @@ countries = [
         "capital": "Maseru",
         "lat": -29.5,
         "lng": 28.5,
-        "cca3": "LSO"
+        "cca3": "LSO",
     },
     {
         "name": "Hungary",
@@ -888,7 +888,7 @@ countries = [
         "capital": "Budapest",
         "lat": 47,
         "lng": 20,
-        "cca3": "HUN"
+        "cca3": "HUN",
     },
     {
         "name": "Turkmenistan",
@@ -898,7 +898,7 @@ countries = [
         "capital": "Ashgabat",
         "lat": 40,
         "lng": 60,
-        "cca3": "TKM"
+        "cca3": "TKM",
     },
     {
         "name": "Suriname",
@@ -908,7 +908,7 @@ countries = [
         "capital": "Paramaribo",
         "lat": 4,
         "lng": -56,
-        "cca3": "SUR"
+        "cca3": "SUR",
     },
     {
         "name": "Netherlands",
@@ -918,7 +918,7 @@ countries = [
         "capital": "Amsterdam",
         "lat": 52.5,
         "lng": 5.75,
-        "cca3": "NLD"
+        "cca3": "NLD",
     },
     {
         "name": "Bermuda",
@@ -928,7 +928,7 @@ countries = [
         "capital": "Hamilton",
         "lat": 32.33333333,
         "lng": -64.75,
-        "cca3": "BMU"
+        "cca3": "BMU",
     },
     {
         "name": "Heard Island and McDonald Islands",
@@ -938,7 +938,7 @@ countries = [
         "capital": "",
         "lat": -53.1,
         "lng": 72.51666666,
-        "cca3": "HMD"
+        "cca3": "HMD",
     },
     {
         "name": "Chad",
@@ -948,7 +948,7 @@ countries = [
         "capital": "N'Djamena",
         "lat": 15,
         "lng": 19,
-        "cca3": "TCD"
+        "cca3": "TCD",
     },
     {
         "name": "Georgia",
@@ -958,7 +958,7 @@ countries = [
         "capital": "Tbilisi",
         "lat": 42,
         "lng": 43.5,
-        "cca3": "GEO"
+        "cca3": "GEO",
     },
     {
         "name": "Montenegro",
@@ -968,7 +968,7 @@ countries = [
         "capital": "Podgorica",
         "lat": 42.5,
         "lng": 19.3,
-        "cca3": "MNE"
+        "cca3": "MNE",
     },
     {
         "name": "Mongolia",
@@ -978,7 +978,7 @@ countries = [
         "capital": "Ulan Bator",
         "lat": 46,
         "lng": 105,
-        "cca3": "MNG"
+        "cca3": "MNG",
     },
     {
         "name": "Marshall Islands",
@@ -988,7 +988,7 @@ countries = [
         "capital": "Majuro",
         "lat": 9,
         "lng": 168,
-        "cca3": "MHL"
+        "cca3": "MHL",
     },
     {
         "name": "Martinique",
@@ -998,7 +998,7 @@ countries = [
         "capital": "Fort-de-France",
         "lat": 14.666667,
         "lng": -61,
-        "cca3": "MTQ"
+        "cca3": "MTQ",
     },
     {
         "name": "Belize",
@@ -1008,7 +1008,7 @@ countries = [
         "capital": "Belmopan",
         "lat": 17.25,
         "lng": -88.75,
-        "cca3": "BLZ"
+        "cca3": "BLZ",
     },
     {
         "name": "Norfolk Island",
@@ -1018,7 +1018,7 @@ countries = [
         "capital": "Kingston",
         "lat": -29.03333333,
         "lng": 167.95,
-        "cca3": "NFK"
+        "cca3": "NFK",
     },
     {
         "name": "Myanmar",
@@ -1028,7 +1028,7 @@ countries = [
         "capital": "Naypyidaw",
         "lat": 22,
         "lng": 98,
-        "cca3": "MMR"
+        "cca3": "MMR",
     },
     {
         "name": "Afghanistan",
@@ -1038,7 +1038,7 @@ countries = [
         "capital": "Kabul",
         "lat": 33,
         "lng": 65,
-        "cca3": "AFG"
+        "cca3": "AFG",
     },
     {
         "name": "Burundi",
@@ -1048,7 +1048,7 @@ countries = [
         "capital": "Bujumbura",
         "lat": -3.5,
         "lng": 30,
-        "cca3": "BDI"
+        "cca3": "BDI",
     },
     {
         "name": "British Virgin Islands",
@@ -1058,7 +1058,7 @@ countries = [
         "capital": "Road Town",
         "lat": 18.431383,
         "lng": -64.62305,
-        "cca3": "VGB"
+        "cca3": "VGB",
     },
     {
         "name": "Belarus",
@@ -1068,7 +1068,7 @@ countries = [
         "capital": "Minsk",
         "lat": 53,
         "lng": 28,
-        "cca3": "BLR"
+        "cca3": "BLR",
     },
     {
         "name": "Saint Barthelemy",
@@ -1078,7 +1078,7 @@ countries = [
         "capital": "Gustavia",
         "lat": 18.5,
         "lng": -63.41666666,
-        "cca3": "BLM"
+        "cca3": "BLM",
     },
     {
         "name": "Grenada",
@@ -1088,7 +1088,7 @@ countries = [
         "capital": "St. George's",
         "lat": 12.11666666,
         "lng": -61.66666666,
-        "cca3": "GRD"
+        "cca3": "GRD",
     },
     {
         "name": "Tokelau",
@@ -1098,7 +1098,7 @@ countries = [
         "capital": "Fakaofo",
         "lat": -9,
         "lng": -172,
-        "cca3": "TKL"
+        "cca3": "TKL",
     },
     {
         "name": "Greece",
@@ -1108,7 +1108,7 @@ countries = [
         "capital": "Athens",
         "lat": 39,
         "lng": 22,
-        "cca3": "GRC"
+        "cca3": "GRC",
     },
     {
         "name": "Russia",
@@ -1118,7 +1118,7 @@ countries = [
         "capital": "Moscow",
         "lat": 60,
         "lng": 100,
-        "cca3": "RUS"
+        "cca3": "RUS",
     },
     {
         "name": "Greenland",
@@ -1128,7 +1128,7 @@ countries = [
         "capital": "Nuuk",
         "lat": 72,
         "lng": -40,
-        "cca3": "GRL"
+        "cca3": "GRL",
     },
     {
         "name": "Andorra",
@@ -1138,7 +1138,7 @@ countries = [
         "capital": "Andorra la Vella",
         "lat": 42.5,
         "lng": 1.5,
-        "cca3": "AND"
+        "cca3": "AND",
     },
     {
         "name": "Mozambique",
@@ -1148,7 +1148,7 @@ countries = [
         "capital": "Maputo",
         "lat": -18.25,
         "lng": 35,
-        "cca3": "MOZ"
+        "cca3": "MOZ",
     },
     {
         "name": "Tajikistan",
@@ -1158,7 +1158,7 @@ countries = [
         "capital": "Dushanbe",
         "lat": 39,
         "lng": 71,
-        "cca3": "TJK"
+        "cca3": "TJK",
     },
     {
         "name": "Haiti",
@@ -1168,7 +1168,7 @@ countries = [
         "capital": "Port-au-Prince",
         "lat": 19,
         "lng": -72.41666666,
-        "cca3": "HTI"
+        "cca3": "HTI",
     },
     {
         "name": "Mexico",
@@ -1178,7 +1178,7 @@ countries = [
         "capital": "Mexico City",
         "lat": 23,
         "lng": -102,
-        "cca3": "MEX"
+        "cca3": "MEX",
     },
     {
         "name": "Zimbabwe",
@@ -1188,7 +1188,7 @@ countries = [
         "capital": "Harare",
         "lat": -20,
         "lng": 30,
-        "cca3": "ZWE"
+        "cca3": "ZWE",
     },
     {
         "name": "Saint Lucia",
@@ -1198,7 +1198,7 @@ countries = [
         "capital": "Castries",
         "lat": 13.88333333,
         "lng": -60.96666666,
-        "cca3": "LCA"
+        "cca3": "LCA",
     },
     {
         "name": "India",
@@ -1208,7 +1208,7 @@ countries = [
         "capital": "New Delhi",
         "lat": 20,
         "lng": 77,
-        "cca3": "IND"
+        "cca3": "IND",
     },
     {
         "name": "Latvia",
@@ -1218,7 +1218,7 @@ countries = [
         "capital": "Riga",
         "lat": 57,
         "lng": 25,
-        "cca3": "LVA"
+        "cca3": "LVA",
     },
     {
         "name": "Bhutan",
@@ -1228,7 +1228,7 @@ countries = [
         "capital": "Thimphu",
         "lat": 27.5,
         "lng": 90.5,
-        "cca3": "BTN"
+        "cca3": "BTN",
     },
     {
         "name": "Saint Vincent and the Grenadines",
@@ -1238,7 +1238,7 @@ countries = [
         "capital": "Kingstown",
         "lat": 13.25,
         "lng": -61.2,
-        "cca3": "VCT"
+        "cca3": "VCT",
     },
     {
         "name": "Vietnam",
@@ -1248,7 +1248,7 @@ countries = [
         "capital": "Hanoi",
         "lat": 16.16666666,
         "lng": 107.83333333,
-        "cca3": "VNM"
+        "cca3": "VNM",
     },
     {
         "name": "Norway",
@@ -1258,7 +1258,7 @@ countries = [
         "capital": "Oslo",
         "lat": 62,
         "lng": 10,
-        "cca3": "NOR"
+        "cca3": "NOR",
     },
     {
         "name": "Czech Republic",
@@ -1268,7 +1268,7 @@ countries = [
         "capital": "Prague",
         "lat": 49.75,
         "lng": 15.5,
-        "cca3": "CZE"
+        "cca3": "CZE",
     },
     {
         "name": "French Southern and Antarctic Lands",
@@ -1278,7 +1278,7 @@ countries = [
         "capital": "Port-aux-Francais",
         "lat": -49.25,
         "lng": 69.167,
-        "cca3": "ATF"
+        "cca3": "ATF",
     },
     {
         "name": "Antigua and Barbuda",
@@ -1288,7 +1288,7 @@ countries = [
         "capital": "Saint John's",
         "lat": 17.05,
         "lng": -61.8,
-        "cca3": "ATG"
+        "cca3": "ATG",
     },
     {
         "name": "Fiji",
@@ -1298,7 +1298,7 @@ countries = [
         "capital": "Suva",
         "lat": -18,
         "lng": 175,
-        "cca3": "FJI"
+        "cca3": "FJI",
     },
     {
         "name": "British Indian Ocean Territory",
@@ -1308,7 +1308,7 @@ countries = [
         "capital": "Diego Garcia",
         "lat": -6,
         "lng": 71.5,
-        "cca3": "IOT"
+        "cca3": "IOT",
     },
     {
         "name": "Honduras",
@@ -1318,7 +1318,7 @@ countries = [
         "capital": "Tegucigalpa",
         "lat": 15,
         "lng": -86.5,
-        "cca3": "HND"
+        "cca3": "HND",
     },
     {
         "name": "Mauritius",
@@ -1328,7 +1328,7 @@ countries = [
         "capital": "Port Louis",
         "lat": -20.28333333,
         "lng": 57.55,
-        "cca3": "MUS"
+        "cca3": "MUS",
     },
     {
         "name": "Antarctica",
@@ -1338,7 +1338,7 @@ countries = [
         "capital": "",
         "lat": -90,
         "lng": 0,
-        "cca3": "ATA"
+        "cca3": "ATA",
     },
     {
         "name": "Luxembourg",
@@ -1348,7 +1348,7 @@ countries = [
         "capital": "Luxembourg",
         "lat": 49.75,
         "lng": 6.16666666,
-        "cca3": "LUX"
+        "cca3": "LUX",
     },
     {
         "name": "Israel",
@@ -1358,7 +1358,7 @@ countries = [
         "capital": "Jerusalem",
         "lat": 31.47,
         "lng": 35.13,
-        "cca3": "ISR"
+        "cca3": "ISR",
     },
     {
         "name": "Micronesia",
@@ -1368,7 +1368,7 @@ countries = [
         "capital": "Palikir",
         "lat": 6.91666666,
         "lng": 158.25,
-        "cca3": "FSM"
+        "cca3": "FSM",
     },
     {
         "name": "Peru",
@@ -1378,7 +1378,7 @@ countries = [
         "capital": "Lima",
         "lat": -10,
         "lng": -76,
-        "cca3": "PER"
+        "cca3": "PER",
     },
     {
         "name": "Reunion",
@@ -1388,7 +1388,7 @@ countries = [
         "capital": "Saint-Denis",
         "lat": -21.15,
         "lng": 55.5,
-        "cca3": "REU"
+        "cca3": "REU",
     },
     {
         "name": "Indonesia",
@@ -1398,7 +1398,7 @@ countries = [
         "capital": "Jakarta",
         "lat": -5,
         "lng": 120,
-        "cca3": "IDN"
+        "cca3": "IDN",
     },
     {
         "name": "Vanuatu",
@@ -1408,7 +1408,7 @@ countries = [
         "capital": "Port Vila",
         "lat": -16,
         "lng": 167,
-        "cca3": "VUT"
+        "cca3": "VUT",
     },
     {
         "name": "Macedonia",
@@ -1418,7 +1418,7 @@ countries = [
         "capital": "Skopje",
         "lat": 41.83333333,
         "lng": 22,
-        "cca3": "MKD"
+        "cca3": "MKD",
     },
     {
         "name": "DR Congo",
@@ -1428,7 +1428,7 @@ countries = [
         "capital": "Kinshasa",
         "lat": 0,
         "lng": 25,
-        "cca3": "COD"
+        "cca3": "COD",
     },
     {
         "name": "Republic of the Congo",
@@ -1438,7 +1438,7 @@ countries = [
         "capital": "Brazzaville",
         "lat": -1,
         "lng": 15,
-        "cca3": "COG"
+        "cca3": "COG",
     },
     {
         "name": "Iceland",
@@ -1448,7 +1448,7 @@ countries = [
         "capital": "Reykjavik",
         "lat": 65,
         "lng": -18,
-        "cca3": "ISL"
+        "cca3": "ISL",
     },
     {
         "name": "Guadeloupe",
@@ -1458,7 +1458,7 @@ countries = [
         "capital": "Basse-Terre",
         "lat": 16.25,
         "lng": -61.583333,
-        "cca3": "GLP"
+        "cca3": "GLP",
     },
     {
         "name": "Cook Islands",
@@ -1468,7 +1468,7 @@ countries = [
         "capital": "Avarua",
         "lat": -21.23333333,
         "lng": -159.76666666,
-        "cca3": "COK"
+        "cca3": "COK",
     },
     {
         "name": "Comoros",
@@ -1478,7 +1478,7 @@ countries = [
         "capital": "Moroni",
         "lat": -12.16666666,
         "lng": 44.25,
-        "cca3": "COM"
+        "cca3": "COM",
     },
     {
         "name": "Colombia",
@@ -1488,7 +1488,7 @@ countries = [
         "capital": "Bogota",
         "lat": 4,
         "lng": -72,
-        "cca3": "COL"
+        "cca3": "COL",
     },
     {
         "name": "Nigeria",
@@ -1498,7 +1498,7 @@ countries = [
         "capital": "Abuja",
         "lat": 10,
         "lng": 8,
-        "cca3": "NGA"
+        "cca3": "NGA",
     },
     {
         "name": "Timor-Leste",
@@ -1508,7 +1508,7 @@ countries = [
         "capital": "Dili",
         "lat": -8.83333333,
         "lng": 125.91666666,
-        "cca3": "TLS"
+        "cca3": "TLS",
     },
     {
         "name": "Taiwan",
@@ -1518,7 +1518,7 @@ countries = [
         "capital": "Taipei",
         "lat": 23.5,
         "lng": 121,
-        "cca3": "TWN"
+        "cca3": "TWN",
     },
     {
         "name": "Portugal",
@@ -1528,7 +1528,7 @@ countries = [
         "capital": "Lisbon",
         "lat": 39.5,
         "lng": -8,
-        "cca3": "PRT"
+        "cca3": "PRT",
     },
     {
         "name": "Moldova",
@@ -1538,7 +1538,7 @@ countries = [
         "capital": "Chisinau",
         "lat": 47,
         "lng": 29,
-        "cca3": "MDA"
+        "cca3": "MDA",
     },
     {
         "name": "Guernsey",
@@ -1548,7 +1548,7 @@ countries = [
         "capital": "St. Peter Port",
         "lat": 49.46666666,
         "lng": -2.58333333,
-        "cca3": "GGY"
+        "cca3": "GGY",
     },
     {
         "name": "Madagascar",
@@ -1558,7 +1558,7 @@ countries = [
         "capital": "Antananarivo",
         "lat": -20,
         "lng": 47,
-        "cca3": "MDG"
+        "cca3": "MDG",
     },
     {
         "name": "Ecuador",
@@ -1568,7 +1568,7 @@ countries = [
         "capital": "Quito",
         "lat": -2,
         "lng": -77.5,
-        "cca3": "ECU"
+        "cca3": "ECU",
     },
     {
         "name": "Senegal",
@@ -1578,7 +1578,7 @@ countries = [
         "capital": "Dakar",
         "lat": 14,
         "lng": -14,
-        "cca3": "SEN"
+        "cca3": "SEN",
     },
     {
         "name": "New Zealand",
@@ -1588,7 +1588,7 @@ countries = [
         "capital": "Wellington",
         "lat": -41,
         "lng": 174,
-        "cca3": "NZL"
+        "cca3": "NZL",
     },
     {
         "name": "Maldives",
@@ -1598,7 +1598,7 @@ countries = [
         "capital": "Male",
         "lat": 3.25,
         "lng": 73,
-        "cca3": "MDV"
+        "cca3": "MDV",
     },
     {
         "name": "American Samoa",
@@ -1608,7 +1608,7 @@ countries = [
         "capital": "Pago Pago",
         "lat": -14.33333333,
         "lng": -170,
-        "cca3": "ASM"
+        "cca3": "ASM",
     },
     {
         "name": "Saint Pierre and Miquelon",
@@ -1618,7 +1618,7 @@ countries = [
         "capital": "Saint-Pierre",
         "lat": 46.83333333,
         "lng": -56.33333333,
-        "cca3": "SPM"
+        "cca3": "SPM",
     },
     {
         "name": "Curacao",
@@ -1628,7 +1628,7 @@ countries = [
         "capital": "Willemstad",
         "lat": 12.116667,
         "lng": -68.933333,
-        "cca3": "CUW"
+        "cca3": "CUW",
     },
     {
         "name": "France",
@@ -1638,7 +1638,7 @@ countries = [
         "capital": "Paris",
         "lat": 46,
         "lng": 2,
-        "cca3": "FRA"
+        "cca3": "FRA",
     },
     {
         "name": "Lithuania",
@@ -1648,7 +1648,7 @@ countries = [
         "capital": "Vilnius",
         "lat": 56,
         "lng": 24,
-        "cca3": "LTU"
+        "cca3": "LTU",
     },
     {
         "name": "Rwanda",
@@ -1658,7 +1658,7 @@ countries = [
         "capital": "Kigali",
         "lat": -2,
         "lng": 30,
-        "cca3": "RWA"
+        "cca3": "RWA",
     },
     {
         "name": "Zambia",
@@ -1668,7 +1668,7 @@ countries = [
         "capital": "Lusaka",
         "lat": -15,
         "lng": 30,
-        "cca3": "ZMB"
+        "cca3": "ZMB",
     },
     {
         "name": "Gambia",
@@ -1678,7 +1678,7 @@ countries = [
         "capital": "Banjul",
         "lat": 13.46666666,
         "lng": -16.56666666,
-        "cca3": "GMB"
+        "cca3": "GMB",
     },
     {
         "name": "Wallis and Futuna",
@@ -1688,7 +1688,7 @@ countries = [
         "capital": "Mata-Utu",
         "lat": -13.3,
         "lng": -176.2,
-        "cca3": "WLF"
+        "cca3": "WLF",
     },
     {
         "name": "Jersey",
@@ -1698,7 +1698,7 @@ countries = [
         "capital": "Saint Helier",
         "lat": 49.25,
         "lng": -2.16666666,
-        "cca3": "JEY"
+        "cca3": "JEY",
     },
     {
         "name": "Faroe Islands",
@@ -1708,7 +1708,7 @@ countries = [
         "capital": "Torshavn",
         "lat": 62,
         "lng": -7,
-        "cca3": "FRO"
+        "cca3": "FRO",
     },
     {
         "name": "Guatemala",
@@ -1718,7 +1718,7 @@ countries = [
         "capital": "Guatemala City",
         "lat": 15.5,
         "lng": -90.25,
-        "cca3": "GTM"
+        "cca3": "GTM",
     },
     {
         "name": "Denmark",
@@ -1728,7 +1728,7 @@ countries = [
         "capital": "Copenhagen",
         "lat": 56,
         "lng": 10,
-        "cca3": "DNK"
+        "cca3": "DNK",
     },
     {
         "name": "Isle of Man",
@@ -1738,7 +1738,7 @@ countries = [
         "capital": "Douglas",
         "lat": 54.25,
         "lng": -4.5,
-        "cca3": "IMN"
+        "cca3": "IMN",
     },
     {
         "name": "Australia",
@@ -1748,7 +1748,7 @@ countries = [
         "capital": "Canberra",
         "lat": -27,
         "lng": 133,
-        "cca3": "AUS"
+        "cca3": "AUS",
     },
     {
         "name": "Austria",
@@ -1758,7 +1758,7 @@ countries = [
         "capital": "Vienna",
         "lat": 47.33333333,
         "lng": 13.33333333,
-        "cca3": "AUT"
+        "cca3": "AUT",
     },
     {
         "name": "Svalbard and Jan Mayen",
@@ -1768,7 +1768,7 @@ countries = [
         "capital": "Longyearbyen",
         "lat": 78,
         "lng": 20,
-        "cca3": "SJM"
+        "cca3": "SJM",
     },
     {
         "name": "Venezuela",
@@ -1778,7 +1778,7 @@ countries = [
         "capital": "Caracas",
         "lat": 8,
         "lng": -66,
-        "cca3": "VEN"
+        "cca3": "VEN",
     },
     {
         "name": "Kosovo",
@@ -1788,7 +1788,7 @@ countries = [
         "capital": "Pristina",
         "lat": 42.666667,
         "lng": 21.166667,
-        "cca3": "UNK"
+        "cca3": "UNK",
     },
     {
         "name": "Palau",
@@ -1798,7 +1798,7 @@ countries = [
         "capital": "Ngerulmud",
         "lat": 7.5,
         "lng": 134.5,
-        "cca3": "PLW"
+        "cca3": "PLW",
     },
     {
         "name": "Kenya",
@@ -1808,7 +1808,7 @@ countries = [
         "capital": "Nairobi",
         "lat": 1,
         "lng": 38,
-        "cca3": "KEN"
+        "cca3": "KEN",
     },
     {
         "name": "Samoa",
@@ -1818,7 +1818,7 @@ countries = [
         "capital": "Apia",
         "lat": -13.58333333,
         "lng": -172.33333333,
-        "cca3": "WSM"
+        "cca3": "WSM",
     },
     {
         "name": "Turkey",
@@ -1828,7 +1828,7 @@ countries = [
         "capital": "Ankara",
         "lat": 39,
         "lng": 35,
-        "cca3": "TUR"
+        "cca3": "TUR",
     },
     {
         "name": "Albania",
@@ -1838,7 +1838,7 @@ countries = [
         "capital": "Tirana",
         "lat": 41,
         "lng": 20,
-        "cca3": "ALB"
+        "cca3": "ALB",
     },
     {
         "name": "Oman",
@@ -1848,7 +1848,7 @@ countries = [
         "capital": "Muscat",
         "lat": 21,
         "lng": 57,
-        "cca3": "OMN"
+        "cca3": "OMN",
     },
     {
         "name": "Tuvalu",
@@ -1858,7 +1858,7 @@ countries = [
         "capital": "Funafuti",
         "lat": -8,
         "lng": 178,
-        "cca3": "TUV"
+        "cca3": "TUV",
     },
     {
         "name": "Aland Islands",
@@ -1868,7 +1868,7 @@ countries = [
         "capital": "Mariehamn",
         "lat": 60.116667,
         "lng": 19.9,
-        "cca3": "ALA"
+        "cca3": "ALA",
     },
     {
         "name": "Brunei",
@@ -1878,7 +1878,7 @@ countries = [
         "capital": "Bandar Seri Begawan",
         "lat": 4.5,
         "lng": 114.66666666,
-        "cca3": "BRN"
+        "cca3": "BRN",
     },
     {
         "name": "Tunisia",
@@ -1888,7 +1888,7 @@ countries = [
         "capital": "Tunis",
         "lat": 34,
         "lng": 9,
-        "cca3": "TUN"
+        "cca3": "TUN",
     },
     {
         "name": "Pitcairn Islands",
@@ -1898,7 +1898,7 @@ countries = [
         "capital": "Adamstown",
         "lat": -25.06666666,
         "lng": -130.1,
-        "cca3": "PCN"
+        "cca3": "PCN",
     },
     {
         "name": "Barbados",
@@ -1908,7 +1908,7 @@ countries = [
         "capital": "Bridgetown",
         "lat": 13.16666666,
         "lng": -59.53333333,
-        "cca3": "BRB"
+        "cca3": "BRB",
     },
     {
         "name": "Brazil",
@@ -1918,7 +1918,7 @@ countries = [
         "capital": "Brasilia",
         "lat": -10,
         "lng": -55,
-        "cca3": "BRA"
+        "cca3": "BRA",
     },
     {
         "name": "Ivory Coast",
@@ -1928,7 +1928,7 @@ countries = [
         "capital": "Yamoussoukro",
         "lat": 8,
         "lng": -5,
-        "cca3": "CIV"
+        "cca3": "CIV",
     },
     {
         "name": "Serbia",
@@ -1938,7 +1938,7 @@ countries = [
         "capital": "Belgrade",
         "lat": 44,
         "lng": 21,
-        "cca3": "SRB"
+        "cca3": "SRB",
     },
     {
         "name": "Equatorial Guinea",
@@ -1948,7 +1948,7 @@ countries = [
         "capital": "Malabo",
         "lat": 2,
         "lng": 10,
-        "cca3": "GNQ"
+        "cca3": "GNQ",
     },
     {
         "name": "United States",
@@ -1958,7 +1958,7 @@ countries = [
         "capital": "Washington D.C.",
         "lat": 38,
         "lng": -97,
-        "cca3": "USA"
+        "cca3": "USA",
     },
     {
         "name": "Qatar",
@@ -1968,7 +1968,7 @@ countries = [
         "capital": "Doha",
         "lat": 25.5,
         "lng": 51.25,
-        "cca3": "QAT"
+        "cca3": "QAT",
     },
     {
         "name": "Sweden",
@@ -1978,7 +1978,7 @@ countries = [
         "capital": "Stockholm",
         "lat": 62,
         "lng": 15,
-        "cca3": "SWE"
+        "cca3": "SWE",
     },
     {
         "name": "Azerbaijan",
@@ -1988,7 +1988,7 @@ countries = [
         "capital": "Baku",
         "lat": 40.5,
         "lng": 47.5,
-        "cca3": "AZE"
+        "cca3": "AZE",
     },
     {
         "name": "Guinea-Bissau",
@@ -1998,7 +1998,7 @@ countries = [
         "capital": "Bissau",
         "lat": 12,
         "lng": -15,
-        "cca3": "GNB"
+        "cca3": "GNB",
     },
     {
         "name": "Swaziland",
@@ -2008,7 +2008,7 @@ countries = [
         "capital": "Lobamba",
         "lat": -26.5,
         "lng": 31.5,
-        "cca3": "SWZ"
+        "cca3": "SWZ",
     },
     {
         "name": "Tonga",
@@ -2018,7 +2018,7 @@ countries = [
         "capital": "Nuku'alofa",
         "lat": -20,
         "lng": -175,
-        "cca3": "TON"
+        "cca3": "TON",
     },
     {
         "name": "Canada",
@@ -2028,7 +2028,7 @@ countries = [
         "capital": "Ottawa",
         "lat": 60,
         "lng": -95,
-        "cca3": "CAN"
+        "cca3": "CAN",
     },
     {
         "name": "Ukraine",
@@ -2038,7 +2038,7 @@ countries = [
         "capital": "Kiev",
         "lat": 49,
         "lng": 32,
-        "cca3": "UKR"
+        "cca3": "UKR",
     },
     {
         "name": "South Korea",
@@ -2048,7 +2048,7 @@ countries = [
         "capital": "Seoul",
         "lat": 37,
         "lng": 127.5,
-        "cca3": "KOR"
+        "cca3": "KOR",
     },
     {
         "name": "Anguilla",
@@ -2058,7 +2058,7 @@ countries = [
         "capital": "The Valley",
         "lat": 18.25,
         "lng": -63.16666666,
-        "cca3": "AIA"
+        "cca3": "AIA",
     },
     {
         "name": "Central African Republic",
@@ -2068,7 +2068,7 @@ countries = [
         "capital": "Bangui",
         "lat": 7,
         "lng": 21,
-        "cca3": "CAF"
+        "cca3": "CAF",
     },
     {
         "name": "Slovakia",
@@ -2078,7 +2078,7 @@ countries = [
         "capital": "Bratislava",
         "lat": 48.66666666,
         "lng": 19.5,
-        "cca3": "SVK"
+        "cca3": "SVK",
     },
     {
         "name": "Cyprus",
@@ -2088,7 +2088,7 @@ countries = [
         "capital": "Nicosia",
         "lat": 35,
         "lng": 33,
-        "cca3": "CYP"
+        "cca3": "CYP",
     },
     {
         "name": "Bosnia and Herzegovina",
@@ -2098,7 +2098,7 @@ countries = [
         "capital": "Sarajevo",
         "lat": 44,
         "lng": 18,
-        "cca3": "BIH"
+        "cca3": "BIH",
     },
     {
         "name": "Singapore",
@@ -2108,7 +2108,7 @@ countries = [
         "capital": "Singapore",
         "lat": 1.36666666,
         "lng": 103.8,
-        "cca3": "SGP"
+        "cca3": "SGP",
     },
     {
         "name": "South Georgia",
@@ -2118,7 +2118,7 @@ countries = [
         "capital": "King Edward Point",
         "lat": -54.5,
         "lng": -37,
-        "cca3": "SGS"
+        "cca3": "SGS",
     },
     {
         "name": "Somalia",
@@ -2128,7 +2128,7 @@ countries = [
         "capital": "Mogadishu",
         "lat": 10,
         "lng": 49,
-        "cca3": "SOM"
+        "cca3": "SOM",
     },
     {
         "name": "Uzbekistan",
@@ -2138,7 +2138,7 @@ countries = [
         "capital": "Tashkent",
         "lat": 41,
         "lng": 64,
-        "cca3": "UZB"
+        "cca3": "UZB",
     },
     {
         "name": "Eritrea",
@@ -2148,7 +2148,7 @@ countries = [
         "capital": "Asmara",
         "lat": 15,
         "lng": 39,
-        "cca3": "ERI"
+        "cca3": "ERI",
     },
     {
         "name": "Poland",
@@ -2158,7 +2158,7 @@ countries = [
         "capital": "Warsaw",
         "lat": 52,
         "lng": 20,
-        "cca3": "POL"
+        "cca3": "POL",
     },
     {
         "name": "Kuwait",
@@ -2168,7 +2168,7 @@ countries = [
         "capital": "Kuwait City",
         "lat": 29.5,
         "lng": 45.75,
-        "cca3": "KWT"
+        "cca3": "KWT",
     },
     {
         "name": "Gabon",
@@ -2178,7 +2178,7 @@ countries = [
         "capital": "Libreville",
         "lat": -1,
         "lng": 11.75,
-        "cca3": "GAB"
+        "cca3": "GAB",
     },
     {
         "name": "Cayman Islands",
@@ -2188,7 +2188,7 @@ countries = [
         "capital": "George Town",
         "lat": 19.5,
         "lng": -80.5,
-        "cca3": "CYM"
+        "cca3": "CYM",
     },
     {
         "name": "Vatican City",
@@ -2198,7 +2198,7 @@ countries = [
         "capital": "Vatican City",
         "lat": 41.9,
         "lng": 12.45,
-        "cca3": "VAT"
+        "cca3": "VAT",
     },
     {
         "name": "Estonia",
@@ -2208,7 +2208,7 @@ countries = [
         "capital": "Tallinn",
         "lat": 59,
         "lng": 26,
-        "cca3": "EST"
+        "cca3": "EST",
     },
     {
         "name": "Malawi",
@@ -2218,7 +2218,7 @@ countries = [
         "capital": "Lilongwe",
         "lat": -13.5,
         "lng": 34,
-        "cca3": "MWI"
+        "cca3": "MWI",
     },
     {
         "name": "Spain",
@@ -2228,7 +2228,7 @@ countries = [
         "capital": "Madrid",
         "lat": 40,
         "lng": -4,
-        "cca3": "ESP"
+        "cca3": "ESP",
     },
     {
         "name": "Iraq",
@@ -2238,7 +2238,7 @@ countries = [
         "capital": "Baghdad",
         "lat": 33,
         "lng": 44,
-        "cca3": "IRQ"
+        "cca3": "IRQ",
     },
     {
         "name": "El Salvador",
@@ -2248,7 +2248,7 @@ countries = [
         "capital": "San Salvador",
         "lat": 13.83333333,
         "lng": -88.91666666,
-        "cca3": "SLV"
+        "cca3": "SLV",
     },
     {
         "name": "Mali",
@@ -2258,7 +2258,7 @@ countries = [
         "capital": "Bamako",
         "lat": 17,
         "lng": -4,
-        "cca3": "MLI"
+        "cca3": "MLI",
     },
     {
         "name": "Ireland",
@@ -2268,7 +2268,7 @@ countries = [
         "capital": "Dublin",
         "lat": 53,
         "lng": -8,
-        "cca3": "IRL"
+        "cca3": "IRL",
     },
     {
         "name": "Iran",
@@ -2278,7 +2278,7 @@ countries = [
         "capital": "Tehran",
         "lat": 32,
         "lng": 53,
-        "cca3": "IRN"
+        "cca3": "IRN",
     },
     {
         "name": "Aruba",
@@ -2288,7 +2288,7 @@ countries = [
         "capital": "Oranjestad",
         "lat": 12.5,
         "lng": -69.96666666,
-        "cca3": "ABW"
+        "cca3": "ABW",
     },
     {
         "name": "Papua New Guinea",
@@ -2298,7 +2298,7 @@ countries = [
         "capital": "Port Moresby",
         "lat": -6,
         "lng": 147,
-        "cca3": "PNG"
+        "cca3": "PNG",
     },
     {
         "name": "Panama",
@@ -2308,7 +2308,7 @@ countries = [
         "capital": "Panama City",
         "lat": 9,
         "lng": -80,
-        "cca3": "PAN"
+        "cca3": "PAN",
     },
     {
         "name": "Sudan",
@@ -2318,7 +2318,7 @@ countries = [
         "capital": "Khartoum",
         "lat": 15,
         "lng": 30,
-        "cca3": "SDN"
+        "cca3": "SDN",
     },
     {
         "name": "Solomon Islands",
@@ -2328,7 +2328,7 @@ countries = [
         "capital": "Honiara",
         "lat": -8,
         "lng": 159,
-        "cca3": "SLB"
+        "cca3": "SLB",
     },
     {
         "name": "Western Sahara",
@@ -2338,7 +2338,7 @@ countries = [
         "capital": "El Aaiun",
         "lat": 24.5,
         "lng": -13,
-        "cca3": "ESH"
+        "cca3": "ESH",
     },
     {
         "name": "Monaco",
@@ -2348,7 +2348,7 @@ countries = [
         "capital": "Monaco",
         "lat": 43.73333333,
         "lng": 7.4,
-        "cca3": "MCO"
+        "cca3": "MCO",
     },
     {
         "name": "Italy",
@@ -2358,7 +2358,7 @@ countries = [
         "capital": "Rome",
         "lat": 42.83333333,
         "lng": 12.83333333,
-        "cca3": "ITA"
+        "cca3": "ITA",
     },
     {
         "name": "Japan",
@@ -2368,7 +2368,7 @@ countries = [
         "capital": "Tokyo",
         "lat": 36,
         "lng": 138,
-        "cca3": "JPN"
+        "cca3": "JPN",
     },
     {
         "name": "Kyrgyzstan",
@@ -2378,7 +2378,7 @@ countries = [
         "capital": "Bishkek",
         "lat": 41,
         "lng": 75,
-        "cca3": "KGZ"
+        "cca3": "KGZ",
     },
     {
         "name": "Uganda",
@@ -2388,7 +2388,7 @@ countries = [
         "capital": "Kampala",
         "lat": 1,
         "lng": 32,
-        "cca3": "UGA"
+        "cca3": "UGA",
     },
     {
         "name": "New Caledonia",
@@ -2398,7 +2398,7 @@ countries = [
         "capital": "Noumea",
         "lat": -21.5,
         "lng": 165.5,
-        "cca3": "NCL"
+        "cca3": "NCL",
     },
     {
         "name": "United Arab Emirates",
@@ -2408,7 +2408,7 @@ countries = [
         "capital": "Abu Dhabi",
         "lat": 24,
         "lng": 54,
-        "cca3": "ARE"
+        "cca3": "ARE",
     },
     {
         "name": "Argentina",
@@ -2418,7 +2418,7 @@ countries = [
         "capital": "Buenos Aires",
         "lat": -34,
         "lng": -64,
-        "cca3": "ARG"
+        "cca3": "ARG",
     },
     {
         "name": "Bahamas",
@@ -2428,7 +2428,7 @@ countries = [
         "capital": "Nassau",
         "lat": 24.25,
         "lng": -76,
-        "cca3": "BHS"
+        "cca3": "BHS",
     },
     {
         "name": "Bahrain",
@@ -2438,7 +2438,7 @@ countries = [
         "capital": "Manama",
         "lat": 26,
         "lng": 50.55,
-        "cca3": "BHR"
+        "cca3": "BHR",
     },
     {
         "name": "Armenia",
@@ -2448,7 +2448,7 @@ countries = [
         "capital": "Yerevan",
         "lat": 40,
         "lng": 45,
-        "cca3": "ARM"
+        "cca3": "ARM",
     },
     {
         "name": "Nauru",
@@ -2458,7 +2458,7 @@ countries = [
         "capital": "Yaren",
         "lat": -0.53333333,
         "lng": 166.91666666,
-        "cca3": "NRU"
+        "cca3": "NRU",
     },
     {
         "name": "Cuba",
@@ -2468,8 +2468,8 @@ countries = [
         "capital": "Havana",
         "lat": 21.5,
         "lng": -80,
-        "cca3": "CUB"
-    }
+        "cca3": "CUB",
+    },
 ]
 
 all_lookups = {}
diff --git a/superset/data/country_map.py b/superset/data/country_map.py
index 6b32abe..2c34cfd 100644
--- a/superset/data/country_map.py
+++ b/superset/data/country_map.py
@@ -42,7 +42,8 @@ def load_country_map_data():
             '2014': BigInteger,
             'dttm': Date(),
         },
-        index=False)
+        index=False,
+    )
     print('Done loading table!')
     print('-' * 80)
     print('Creating table reference')
diff --git a/superset/data/css_templates.py b/superset/data/css_templates.py
index a991736..73112ae 100644
--- a/superset/data/css_templates.py
+++ b/superset/data/css_templates.py
@@ -11,7 +11,8 @@ def load_css_templates():
     obj = db.session.query(CssTemplate).filter_by(template_name='Flat').first()
     if not obj:
         obj = CssTemplate(template_name='Flat')
-    css = textwrap.dedent("""\
+    css = textwrap.dedent(
+        """\
     .gridster div.widget {
         transition: background-color 0.5s ease;
         background-color: #FAFAFA;
@@ -42,16 +43,17 @@ def load_css_templates():
         '#ff3339', '#ff1ab1', '#005c66', '#00b3a5', '#55d12e', '#b37e00', '#988b4e',
      ];
     */
-    """)
+    """
+    )
     obj.css = css
     db.session.merge(obj)
     db.session.commit()
 
-    obj = (
-        db.session.query(CssTemplate).filter_by(template_name='Courier Black').first())
+    obj = db.session.query(CssTemplate).filter_by(template_name='Courier Black').first()
     if not obj:
         obj = CssTemplate(template_name='Courier Black')
-    css = textwrap.dedent("""\
+    css = textwrap.dedent(
+        """\
     .gridster div.widget {
         transition: background-color 0.5s ease;
         background-color: #EEE;
@@ -97,7 +99,8 @@ def load_css_templates():
         '#ff3339', '#ff1ab1', '#005c66', '#00b3a5', '#55d12e', '#b37e00', '#988b4e',
      ];
     */
-    """)
+    """
+    )
     obj.css = css
     db.session.merge(obj)
     db.session.commit()
diff --git a/superset/data/deck.py b/superset/data/deck.py
index 3307e9e..8f0d29d 100644
--- a/superset/data/deck.py
+++ b/superset/data/deck.py
@@ -2,21 +2,9 @@
 import json
 
 from superset import db
-from .helpers import (
-    Dash,
-    get_slice_json,
-    merge_slice,
-    Slice,
-    TBL,
-    update_slice_ids,
-)
+from .helpers import Dash, get_slice_json, merge_slice, Slice, TBL, update_slice_ids
 
-COLOR_RED = {
-    'r': 205,
-    'g': 0,
-    'b': 3,
-    'a': 0.82,
-}
+COLOR_RED = {'r': 205, 'g': 0, 'b': 3, 'a': 0.82}
 POSITION_JSON = """\
 {
     "CHART-3afd9d70": {
@@ -165,11 +153,7 @@ def load_deck_dash():
     slices = []
     tbl = db.session.query(TBL).filter_by(table_name='long_lat').first()
     slice_data = {
-        'spatial': {
-            'type': 'latlong',
-            'lonCol': 'LON',
-            'latCol': 'LAT',
-        },
+        'spatial': {'type': 'latlong', 'lonCol': 'LON', 'latCol': 'LAT'},
         'color_picker': COLOR_RED,
         'datasource': '5__table',
         'filters': [],
@@ -211,23 +195,14 @@ def load_deck_dash():
         'point_unit': 'square_m',
         'filters': [],
         'row_limit': 5000,
-        'spatial': {
-            'type': 'latlong',
-            'lonCol': 'LON',
-            'latCol': 'LAT',
-        },
+        'spatial': {'type': 'latlong', 'lonCol': 'LON', 'latCol': 'LAT'},
         'mapbox_style': 'mapbox://styles/mapbox/dark-v9',
         'granularity_sqla': None,
         'size': 'count',
         'viz_type': 'deck_screengrid',
         'time_range': 'No filter',
         'point_radius': 'Auto',
-        'color_picker': {
-            'a': 1,
-            'r': 14,
-            'b': 0,
-            'g': 255,
-        },
+        'color_picker': {'a': 1, 'r': 14, 'b': 0, 'g': 255},
         'grid_size': 20,
         'where': '',
         'having': '',
@@ -255,11 +230,7 @@ def load_deck_dash():
     slices.append(slc)
 
     slice_data = {
-        'spatial': {
-            'type': 'latlong',
-            'lonCol': 'LON',
-            'latCol': 'LAT',
-        },
+        'spatial': {'type': 'latlong', 'lonCol': 'LON', 'latCol': 'LAT'},
         'filters': [],
         'row_limit': 5000,
         'mapbox_style': 'mapbox://styles/mapbox/streets-v9',
@@ -269,12 +240,7 @@ def load_deck_dash():
         'time_range': 'No filter',
         'point_radius_unit': 'Pixels',
         'point_radius': 'Auto',
-        'color_picker': {
-            'a': 1,
-            'r': 14,
-            'b': 0,
-            'g': 255,
-        },
+        'color_picker': {'a': 1, 'r': 14, 'b': 0, 'g': 255},
         'grid_size': 40,
         'extruded': True,
         'having': '',
@@ -303,11 +269,7 @@ def load_deck_dash():
     slices.append(slc)
 
     slice_data = {
-        'spatial': {
-            'type': 'latlong',
-            'lonCol': 'LON',
-            'latCol': 'LAT',
-        },
+        'spatial': {'type': 'latlong', 'lonCol': 'LON', 'latCol': 'LAT'},
         'filters': [],
         'row_limit': 5000,
         'mapbox_style': 'mapbox://styles/mapbox/satellite-streets-v9',
@@ -317,12 +279,7 @@ def load_deck_dash():
         'point_radius_unit': 'Pixels',
         'point_radius': 'Auto',
         'time_range': 'No filter',
-        'color_picker': {
-            'a': 1,
-            'r': 14,
-            'b': 0,
-            'g': 255,
-        },
+        'color_picker': {'a': 1, 'r': 14, 'b': 0, 'g': 255},
         'grid_size': 120,
         'extruded': True,
         'having': '',
@@ -350,8 +307,9 @@ def load_deck_dash():
     merge_slice(slc)
     slices.append(slc)
 
-    polygon_tbl = db.session.query(TBL) \
-                    .filter_by(table_name='sf_population_polygons').first()
+    polygon_tbl = (
+        db.session.query(TBL).filter_by(table_name='sf_population_polygons').first()
+    )
     slice_data = {
         'datasource': '11__table',
         'viz_type': 'deck_polygon',
@@ -380,28 +338,14 @@ def load_deck_dash():
             'minLatitude': -85.05113,
         },
         'reverse_long_lat': False,
-        'fill_color_picker': {
-            'r': 3,
-            'g': 65,
-            'b': 73,
-            'a': 1,
-        },
-        'stroke_color_picker': {
-            'r': 0,
-            'g': 122,
-            'b': 135,
-            'a': 1,
-        },
+        'fill_color_picker': {'r': 3, 'g': 65, 'b': 73, 'a': 1},
+        'stroke_color_picker': {'r': 0, 'g': 122, 'b': 135, 'a': 1},
         'filled': True,
         'stroked': False,
         'extruded': True,
         'point_radius_scale': 100,
-        'js_columns': [
-            'population',
-            'area',
-        ],
-        'js_datapoint_mutator':
-            '(d) => {\n    d.elevation = d.extraProps.population/d.extraProps.area/10\n \
+        'js_columns': ['population', 'area'],
+        'js_datapoint_mutator': '(d) => {\n    d.elevation = d.extraProps.population/d.extraProps.area/10\n \
          d.fillColor = [d.extraProps.population/d.extraProps.area/60,140,0]\n \
          return d;\n}',
         'js_tooltip': '',
@@ -457,12 +401,7 @@ def load_deck_dash():
             'width': 997,
             'zoom': 2.929837070560775,
         },
-        'color_picker': {
-            'r': 0,
-            'g': 122,
-            'b': 135,
-            'a': 1,
-        },
+        'color_picker': {'r': 0, 'g': 122, 'b': 135, 'a': 1},
         'stroke_width': 1,
         'where': '',
         'having': '',
@@ -506,17 +445,10 @@ def load_deck_dash():
             'maxLatitude': 85.05113,
             'minLatitude': -85.05113,
         },
-        'color_picker': {
-            'r': 0,
-            'g': 122,
-            'b': 135,
-            'a': 1,
-        },
+        'color_picker': {'r': 0, 'g': 122, 'b': 135, 'a': 1},
         'line_width': 150,
         'reverse_long_lat': False,
-        'js_columns': [
-            'color',
-        ],
+        'js_columns': ['color'],
         'js_datapoint_mutator': 'd => {\n    return {\n        ...d,\n        color: \
             colors.hexToRGB(d.extraProps.color),\n    }\n}',
         'js_tooltip': '',
@@ -531,7 +463,10 @@ def load_deck_dash():
         slice_name='Path',
         viz_type='deck_path',
         datasource_type='table',
-        datasource_id=db.session.query(TBL).filter_by(table_name='bart_lines').first().id,
+        datasource_id=db.session.query(TBL)
+        .filter_by(table_name='bart_lines')
+        .first()
+        .id,
         params=get_slice_json(slice_data),
     )
     merge_slice(slc)
diff --git a/superset/data/energy.py b/superset/data/energy.py
index bbb4fba..8019b80 100644
--- a/superset/data/energy.py
+++ b/superset/data/energy.py
@@ -22,12 +22,9 @@ def load_energy():
         db.engine,
         if_exists='replace',
         chunksize=500,
-        dtype={
-            'source': String(255),
-            'target': String(255),
-            'value': Float(),
-        },
-        index=False)
+        dtype={'source': String(255), 'target': String(255), 'value': Float()},
+        index=False,
+    )
 
     print('Creating table [wb_health_population] reference')
     tbl = db.session.query(TBL).filter_by(table_name=tbl_name).first()
@@ -44,7 +41,8 @@ def load_energy():
         viz_type='sankey',
         datasource_type='table',
         datasource_id=tbl.id,
-        params=textwrap.dedent("""\
+        params=textwrap.dedent(
+            """\
         {
             "collapsed_fieldsets": "",
             "groupby": [
@@ -58,7 +56,8 @@ def load_energy():
             "viz_type": "sankey",
             "where": ""
         }
-        """),
+        """
+        ),
     )
     misc_dash_slices.add(slc.slice_name)
     merge_slice(slc)
@@ -68,7 +67,8 @@ def load_energy():
         viz_type='directed_force',
         datasource_type='table',
         datasource_id=tbl.id,
-        params=textwrap.dedent("""\
+        params=textwrap.dedent(
+            """\
         {
             "charge": "-500",
             "collapsed_fieldsets": "",
@@ -84,7 +84,8 @@ def load_energy():
             "viz_type": "directed_force",
             "where": ""
         }
-        """),
+        """
+        ),
     )
     misc_dash_slices.add(slc.slice_name)
     merge_slice(slc)
@@ -94,7 +95,8 @@ def load_energy():
         viz_type='heatmap',
         datasource_type='table',
         datasource_id=tbl.id,
-        params=textwrap.dedent("""\
+        params=textwrap.dedent(
+            """\
         {
             "all_columns_x": "source",
             "all_columns_y": "target",
@@ -110,7 +112,8 @@ def load_energy():
             "xscale_interval": "1",
             "yscale_interval": "1"
         }
-        """),
+        """
+        ),
     )
     misc_dash_slices.add(slc.slice_name)
     merge_slice(slc)
diff --git a/superset/data/flights.py b/superset/data/flights.py
index 986d75b..a741827 100644
--- a/superset/data/flights.py
+++ b/superset/data/flights.py
@@ -33,10 +33,9 @@ def load_flights():
         db.engine,
         if_exists='replace',
         chunksize=500,
-        dtype={
-            'ds': DateTime,
-        },
-        index=False)
+        dtype={'ds': DateTime},
+        index=False,
+    )
     tbl = db.session.query(TBL).filter_by(table_name=tbl_name).first()
     if not tbl:
         tbl = TBL(table_name=tbl_name)
diff --git a/superset/data/helpers.py b/superset/data/helpers.py
index d1ecccb..8768834 100644
--- a/superset/data/helpers.py
+++ b/superset/data/helpers.py
@@ -23,7 +23,8 @@ misc_dash_slices = set()  # slices assembled in a 'Misc Chart' dashboard
 
 def update_slice_ids(layout_dict, slices):
     charts = [
-        component for component in layout_dict.values()
+        component
+        for component in layout_dict.values()
         if isinstance(component, dict) and component['type'] == 'CHART'
     ]
     sorted_charts = sorted(charts, key=lambda k: k['meta']['chartId'])
diff --git a/superset/data/long_lat.py b/superset/data/long_lat.py
index c82452f..6fc05dc 100644
--- a/superset/data/long_lat.py
+++ b/superset/data/long_lat.py
@@ -23,16 +23,14 @@ def load_long_lat_data():
     """Loading lat/long data from a csv file in the repo"""
     with gzip.open(os.path.join(DATA_FOLDER, 'san_francisco.csv.gz')) as f:
         pdf = pd.read_csv(f, encoding='utf-8')
-    start = datetime.datetime.now().replace(
-        hour=0, minute=0, second=0, microsecond=0)
+    start = datetime.datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
     pdf['datetime'] = [
         start + datetime.timedelta(hours=i * 24 / (len(pdf) - 1))
         for i in range(len(pdf))
     ]
     pdf['occupancy'] = [random.randint(1, 6) for _ in range(len(pdf))]
     pdf['radius_miles'] = [random.uniform(1, 3) for _ in range(len(pdf))]
-    pdf['geohash'] = pdf[['LAT', 'LON']].apply(
-        lambda x: geohash.encode(*x), axis=1)
+    pdf['geohash'] = pdf[['LAT', 'LON']].apply(lambda x: geohash.encode(*x), axis=1)
     pdf['delimited'] = pdf['LAT'].map(str).str.cat(pdf['LON'].map(str), sep=',')
     pdf.to_sql(  # pylint: disable=no-member
         'long_lat',
@@ -56,7 +54,8 @@ def load_long_lat_data():
             'geohash': String(12),
             'delimited': String(60),
         },
-        index=False)
+        index=False,
+    )
     print('Done loading table!')
     print('-' * 80)
 
diff --git a/superset/data/misc_dashboard.py b/superset/data/misc_dashboard.py
index 3e29abe..74db6d7 100644
--- a/superset/data/misc_dashboard.py
+++ b/superset/data/misc_dashboard.py
@@ -3,12 +3,7 @@ import textwrap
 
 
 from superset import db
-from .helpers import (
-    Dash,
-    misc_dash_slices,
-    Slice,
-    update_slice_ids,
-)
+from .helpers import Dash, misc_dash_slices, Slice, update_slice_ids
 
 DASH_SLUG = 'misc_charts'
 
@@ -22,7 +17,8 @@ def load_misc_dashboard():
 
     if not dash:
         dash = Dash()
-    js = textwrap.dedent("""\
+    js = textwrap.dedent(
+        """\
 {
     "CHART-BkeVbh8ANQ": {
         "children": [],
@@ -194,13 +190,11 @@ def load_misc_dashboard():
     },
     "DASHBOARD_VERSION_KEY": "v2"
 }
-    """)
+    """
+    )
     pos = json.loads(js)
     slices = (
-        db.session
-        .query(Slice)
-        .filter(Slice.slice_name.in_(misc_dash_slices))
-        .all()
+        db.session.query(Slice).filter(Slice.slice_name.in_(misc_dash_slices)).all()
     )
     slices = sorted(slices, key=lambda x: x.id)
     update_slice_ids(pos, slices)
diff --git a/superset/data/multi_line.py b/superset/data/multi_line.py
index 15468ec..56cd846 100644
--- a/superset/data/multi_line.py
+++ b/superset/data/multi_line.py
@@ -2,11 +2,7 @@ import json
 
 from superset import db
 from .birth_names import load_birth_names
-from .helpers import (
-    merge_slice,
-    misc_dash_slices,
-    Slice,
-)
+from .helpers import merge_slice, misc_dash_slices, Slice
 from .world_bank import load_world_bank_health_n_pop
 
 
@@ -14,24 +10,27 @@ def load_multi_line():
     load_world_bank_health_n_pop()
     load_birth_names()
     ids = [
-        row.id for row in
-        db.session.query(Slice).filter(
-            Slice.slice_name.in_(['Growth Rate', 'Trends']))
+        row.id
+        for row in db.session.query(Slice).filter(
+            Slice.slice_name.in_(['Growth Rate', 'Trends'])
+        )
     ]
 
     slc = Slice(
         datasource_type='table',  # not true, but needed
-        datasource_id=1,          # cannot be empty
+        datasource_id=1,  # cannot be empty
         slice_name='Multi Line',
         viz_type='line_multi',
-        params=json.dumps({
-            'slice_name': 'Multi Line',
-            'viz_type': 'line_multi',
-            'line_charts': [ids[0]],
-            'line_charts_2': [ids[1]],
-            'since': '1960-01-01',
-            'prefix_metric_with_slice_name': True,
-        }),
+        params=json.dumps(
+            {
+                'slice_name': 'Multi Line',
+                'viz_type': 'line_multi',
+                'line_charts': [ids[0]],
+                'line_charts_2': [ids[1]],
+                'since': '1960-01-01',
+                'prefix_metric_with_slice_name': True,
+            }
+        ),
     )
 
     misc_dash_slices.add(slc.slice_name)
diff --git a/superset/data/multiformat_time_series.py b/superset/data/multiformat_time_series.py
index 4e5cf6b..cd42cf9 100644
--- a/superset/data/multiformat_time_series.py
+++ b/superset/data/multiformat_time_series.py
@@ -38,7 +38,8 @@ def load_multiformat_time_series():
             'string2': String(100),
             'string3': String(100),
         },
-        index=False)
+        index=False,
+    )
     print('Done loading table!')
     print('-' * 80)
     print('Creating table [multiformat_time_series] reference')
diff --git a/superset/data/paris.py b/superset/data/paris.py
index a8934d9..052e577 100644
--- a/superset/data/paris.py
+++ b/superset/data/paris.py
@@ -28,7 +28,8 @@ def load_paris_iris_geojson():
             'features': Text,
             'type': Text,
         },
-        index=False)
+        index=False,
+    )
     print('Creating table {} reference'.format(tbl_name))
     tbl = db.session.query(TBL).filter_by(table_name=tbl_name).first()
     if not tbl:
diff --git a/superset/data/random_time_series.py b/superset/data/random_time_series.py
index 297490b..290d841 100644
--- a/superset/data/random_time_series.py
+++ b/superset/data/random_time_series.py
@@ -6,14 +6,7 @@ from sqlalchemy import DateTime
 
 from superset import db
 from superset.utils import core as utils
-from .helpers import (
-    config,
-    DATA_FOLDER,
-    get_slice_json,
-    merge_slice,
-    Slice,
-    TBL,
-)
+from .helpers import config, DATA_FOLDER, get_slice_json, merge_slice, Slice, TBL
 
 
 def load_random_time_series_data():
@@ -26,10 +19,9 @@ def load_random_time_series_data():
         db.engine,
         if_exists='replace',
         chunksize=500,
-        dtype={
-            'ds': DateTime,
-        },
-        index=False)
+        dtype={'ds': DateTime},
+        index=False,
+    )
     print('Done loading table!')
     print('-' * 80)
 
diff --git a/superset/data/sf_population_polygons.py b/superset/data/sf_population_polygons.py
index abd6fef..e38d473 100644
--- a/superset/data/sf_population_polygons.py
+++ b/superset/data/sf_population_polygons.py
@@ -28,7 +28,8 @@ def load_sf_population_polygons():
             'contour': Text,
             'area': BigInteger,
         },
-        index=False)
+        index=False,
+    )
     print('Creating table {} reference'.format(tbl_name))
     tbl = db.session.query(TBL).filter_by(table_name=tbl_name).first()
     if not tbl:
diff --git a/superset/data/unicode_test_data.py b/superset/data/unicode_test_data.py
index 561b9e0..cd084d4 100644
--- a/superset/data/unicode_test_data.py
+++ b/superset/data/unicode_test_data.py
@@ -22,8 +22,9 @@ from .helpers import (
 
 def load_unicode_test_data():
     """Loading unicode test dataset from a csv file in the repo"""
-    df = pd.read_csv(os.path.join(DATA_FOLDER, 'unicode_utf8_unixnl_test.csv'),
-                     encoding='utf-8')
+    df = pd.read_csv(
+        os.path.join(DATA_FOLDER, 'unicode_utf8_unixnl_test.csv'), encoding='utf-8'
+    )
     # generate date/numeric data
     df['dttm'] = datetime.datetime.now().date()
     df['value'] = [random.randint(1, 100) for _ in range(len(df))]
@@ -39,7 +40,8 @@ def load_unicode_test_data():
             'dttm': Date(),
             'value': Float(),
         },
-        index=False)
+        index=False,
+    )
     print('Done loading table!')
     print('-' * 80)
 
@@ -81,11 +83,7 @@ def load_unicode_test_data():
     merge_slice(slc)
 
     print('Creating a dashboard')
-    dash = (
-        db.session.query(Dash)
-        .filter_by(dashboard_title='Unicode Test')
-        .first()
-    )
+    dash = db.session.query(Dash).filter_by(dashboard_title='Unicode Test').first()
 
     if not dash:
         dash = Dash()
diff --git a/superset/data/world_bank.py b/superset/data/world_bank.py
index b75a079..c289158 100644
--- a/superset/data/world_bank.py
+++ b/superset/data/world_bank.py
@@ -41,7 +41,8 @@ def load_world_bank_health_n_pop():
             'country_name': String(255),
             'region': String(255),
         },
-        index=False)
+        index=False,
+    )
 
     print('Creating table [wb_health_population] reference')
     tbl = db.session.query(TBL).filter_by(table_name=tbl_name).first()
@@ -86,7 +87,9 @@ def load_world_bank_health_n_pop():
                 defaults,
                 viz_type='filter_box',
                 date_filter=False,
-                groupby=['region', 'country_name'])),
+                groupby=['region', 'country_name'],
+            ),
+        ),
         Slice(
             slice_name="World's Population",
             viz_type='big_number',
@@ -98,7 +101,9 @@ def load_world_bank_health_n_pop():
                 viz_type='big_number',
                 compare_lag='10',
                 metric='sum__SP_POP_TOTL',
-                compare_suffix='over 10Y')),
+                compare_suffix='over 10Y',
+            ),
+        ),
         Slice(
             slice_name='Most Populated Countries',
             viz_type='table',
@@ -108,7 +113,9 @@ def load_world_bank_health_n_pop():
                 defaults,
                 viz_type='table',
                 metrics=['sum__SP_POP_TOTL'],
-                groupby=['country_name'])),
+                groupby=['country_name'],
+            ),
+        ),
         Slice(
             slice_name='Growth Rate',
             viz_type='line',
@@ -120,7 +127,9 @@ def load_world_bank_health_n_pop():
                 since='1960-01-01',
                 metrics=['sum__SP_POP_TOTL'],
                 num_period_compare='10',
-                groupby=['country_name'])),
+                groupby=['country_name'],
+            ),
+        ),
         Slice(
             slice_name='% Rural',
             viz_type='world_map',
@@ -130,7 +139,9 @@ def load_world_bank_health_n_pop():
                 defaults,
                 viz_type='world_map',
                 metric='sum__SP_RUR_TOTL_ZS',
-                num_period_compare='10')),
+                num_period_compare='10',
+            ),
+        ),
         Slice(
             slice_name='Life Expectancy VS Rural %',
             viz_type='bubble',
@@ -148,14 +159,30 @@ def load_world_bank_health_n_pop():
                 y='sum__SP_DYN_LE00_IN',
                 size='sum__SP_POP_TOTL',
                 max_bubble_size='50',
-                filters=[{
-                    'col': 'country_code',
-                    'val': [
-                        'TCA', 'MNP', 'DMA', 'MHL', 'MCO', 'SXM', 'CYM',
-                        'TUV', 'IMY', 'KNA', 'ASM', 'ADO', 'AMA', 'PLW',
-                    ],
-                    'op': 'not in'}],
-            )),
+                filters=[
+                    {
+                        'col': 'country_code',
+                        'val': [
+                            'TCA',
+                            'MNP',
+                            'DMA',
+                            'MHL',
+                            'MCO',
+                            'SXM',
+                            'CYM',
+                            'TUV',
+                            'IMY',
+                            'KNA',
+                            'ASM',
+                            'ADO',
+                            'AMA',
+                            'PLW',
+                        ],
+                        'op': 'not in',
+                    }
+                ],
+            ),
+        ),
         Slice(
             slice_name='Rural Breakdown',
             viz_type='sunburst',
@@ -167,7 +194,9 @@ def load_world_bank_health_n_pop():
                 groupby=['region', 'country_name'],
                 secondary_metric='sum__SP_RUR_TOTL',
                 since='2011-01-01',
-                until='2011-01-01')),
+                until='2011-01-01',
+            ),
+        ),
         Slice(
             slice_name="World's Pop Growth",
             viz_type='area',
@@ -178,7 +207,9 @@ def load_world_bank_health_n_pop():
                 since='1960-01-01',
                 until='now',
                 viz_type='area',
-                groupby=['region'])),
+                groupby=['region'],
+            ),
+        ),
         Slice(
             slice_name='Box plot',
             viz_type='box_plot',
@@ -190,7 +221,9 @@ def load_world_bank_health_n_pop():
                 until='now',
                 whisker_options='Min/max (no outliers)',
                 viz_type='box_plot',
-                groupby=['region'])),
+                groupby=['region'],
+            ),
+        ),
         Slice(
             slice_name='Treemap',
             viz_type='treemap',
@@ -202,7 +235,9 @@ def load_world_bank_health_n_pop():
                 until='now',
                 viz_type='treemap',
                 metrics=['sum__SP_POP_TOTL'],
-                groupby=['region', 'country_code'])),
+                groupby=['region', 'country_code'],
+            ),
+        ),
         Slice(
             slice_name='Parallel Coordinates',
             viz_type='para',
@@ -214,12 +249,11 @@ def load_world_bank_health_n_pop():
                 until='2011-01-01',
                 viz_type='para',
                 limit=100,
-                metrics=[
-                    'sum__SP_POP_TOTL',
-                    'sum__SP_RUR_TOTL_ZS',
-                    'sum__SH_DYN_AIDS'],
+                metrics=['sum__SP_POP_TOTL', 'sum__SP_RUR_TOTL_ZS', 'sum__SH_DYN_AIDS'],
                 secondary_metric='sum__SP_POP_TOTL',
-                series='country_name')),
+                series='country_name',
+            ),
+        ),
     ]
     misc_dash_slices.add(slices[-1].slice_name)
     for slc in slices:
@@ -232,7 +266,8 @@ def load_world_bank_health_n_pop():
 
     if not dash:
         dash = Dash()
-    js = textwrap.dedent("""\
+    js = textwrap.dedent(
+        """\
 {
     "CHART-36bfc934": {
         "children": [],
@@ -448,7 +483,8 @@ def load_world_bank_health_n_pop():
     },
     "DASHBOARD_VERSION_KEY": "v2"
 }
-    """)
+    """
+    )
     pos = json.loads(js)
     update_slice_ids(pos, slices)
 
diff --git a/superset/dataframe.py b/superset/dataframe.py
index d410d24..00c197b 100644
--- a/superset/dataframe.py
+++ b/superset/dataframe.py
@@ -59,7 +59,7 @@ class SupersetDataFrame(object):
         'O': 'OBJECT',  # (Python) objects
         'S': 'BYTE',  # (byte-)string
         'U': 'STRING',  # Unicode
-        'V': None,   # raw data (void)
+        'V': None,  # raw data (void)
     }
 
     def __init__(self, data, cursor_description, db_engine_spec):
@@ -70,8 +70,7 @@ class SupersetDataFrame(object):
         self.column_names = dedup(column_names)
 
         data = data or []
-        self.df = (
-            pd.DataFrame(list(data), columns=self.column_names).infer_objects())
+        self.df = pd.DataFrame(list(data), columns=self.column_names).infer_objects()
 
         self._type_dict = {}
         try:
@@ -91,9 +90,13 @@ class SupersetDataFrame(object):
     @property
     def data(self):
         # work around for https://github.com/pandas-dev/pandas/issues/18372
-        data = [dict((k, _maybe_box_datetimelike(v))
-                for k, v in zip(self.df.columns, np.atleast_1d(row)))
-                for row in self.df.values]
+        data = [
+            dict(
+                (k, _maybe_box_datetimelike(v))
+                for k, v in zip(self.df.columns, np.atleast_1d(row))
+            )
+            for row in self.df.values
+        ]
         for d in data:
             for k, v in list(d.items()):
                 # if an int is too big for Java Script to handle
@@ -126,7 +129,6 @@ class SupersetDataFrame(object):
 
     @staticmethod
     def is_date(np_dtype, db_type_str):
-
         def looks_daty(s):
             if isinstance(s, basestring):
                 return any([s.lower().startswith(ss) for ss in ('time', 'date')])
@@ -153,8 +155,11 @@ class SupersetDataFrame(object):
         # consider checking for key substring too.
         if cls.is_id(column_name):
             return 'count_distinct'
-        if (hasattr(dtype, 'type') and issubclass(dtype.type, np.generic) and
-                np.issubdtype(dtype, np.number)):
+        if (
+            hasattr(dtype, 'type')
+            and issubclass(dtype.type, np.generic)
+            and np.issubdtype(dtype, np.number)
+        ):
             return 'sum'
         return None
 
@@ -173,10 +178,7 @@ class SupersetDataFrame(object):
         if sample_size:
             sample = self.df.sample(sample_size)
         for col in self.df.dtypes.keys():
-            db_type_str = (
-                self._type_dict.get(col) or
-                self.db_type(self.df.dtypes[col])
-            )
+            db_type_str = self._type_dict.get(col) or self.db_type(self.df.dtypes[col])
             column = {
                 'name': col,
                 'agg': self.agg_func(self.df.dtypes[col], col),
@@ -199,14 +201,11 @@ class SupersetDataFrame(object):
                     column['is_dim'] = False
                 # check if encoded datetime
                 if (
-                        column['type'] == 'STRING' and
-                        self.datetime_conversion_rate(sample[col]) >
-                        INFER_COL_TYPES_THRESHOLD):
-                    column.update({
-                        'is_date': True,
-                        'is_dim': False,
-                        'agg': None,
-                    })
+                    column['type'] == 'STRING'
+                    and self.datetime_conversion_rate(sample[col])
+                    > INFER_COL_TYPES_THRESHOLD
+                ):
+                    column.update({'is_date': True, 'is_dim': False, 'agg': None})
             # 'agg' is optional attribute
             if not column['agg']:
                 column.pop('agg', None)
diff --git a/superset/db_engine_specs.py b/superset/db_engine_specs.py
index d409c87..9bf6568 100644
--- a/superset/db_engine_specs.py
+++ b/superset/db_engine_specs.py
@@ -80,6 +80,7 @@ def _create_time_grains_tuple(time_grains, time_grain_functions, blacklist):
 
 class LimitMethod(object):
     """Enum the ways that limits can be applied"""
+
     FETCH_MANY = 'fetch_many'
     WRAP_SQL = 'wrap_sql'
     FORCE_LIMIT = 'force_limit'
@@ -142,9 +143,7 @@ class BaseEngineSpec(object):
             sql = sql.strip('\t\n ;')
             qry = (
                 select('*')
-                .select_from(
-                    TextAsFrom(text(sql), ['*']).alias('inner_qry'),
-                )
+                .select_from(TextAsFrom(text(sql), ['*']).alias('inner_qry'))
                 .limit(limit)
             )
             return database.compile_sqla_query(qry)
@@ -165,8 +164,9 @@ class BaseEngineSpec(object):
 
     @staticmethod
     def csv_to_df(**kwargs):
-        kwargs['filepath_or_buffer'] = \
+        kwargs['filepath_or_buffer'] = (
             config['UPLOAD_FOLDER'] + kwargs['filepath_or_buffer']
+        )
         kwargs['encoding'] = 'utf-8'
         kwargs['iterator'] = True
         chunks = pandas.read_csv(**kwargs)
@@ -235,23 +235,28 @@ class BaseEngineSpec(object):
         Empty schema corresponds to the list of full names of the all
         tables or views: <schema>.<result_set_name>.
         """
-        schemas = db.all_schema_names(cache=db.schema_cache_enabled,
-                                      cache_timeout=db.schema_cache_timeout,
-                                      force=True)
+        schemas = db.all_schema_names(
+            cache=db.schema_cache_enabled,
+            cache_timeout=db.schema_cache_timeout,
+            force=True,
+        )
         all_result_sets = []
         for schema in schemas:
             if datasource_type == 'table':
                 all_datasource_names = db.all_table_names_in_schema(
-                    schema=schema, force=True,
+                    schema=schema,
+                    force=True,
                     cache=db.table_cache_enabled,
-                    cache_timeout=db.table_cache_timeout)
+                    cache_timeout=db.table_cache_timeout,
+                )
             elif datasource_type == 'view':
                 all_datasource_names = db.all_view_names_in_schema(
-                    schema=schema, force=True,
+                    schema=schema,
+                    force=True,
                     cache=db.table_cache_enabled,
-                    cache_timeout=db.table_cache_timeout)
-            all_result_sets += [
-                '{}.{}'.format(schema, t) for t in all_datasource_names]
+                    cache_timeout=db.table_cache_timeout,
+                )
+            all_result_sets += ['{}.{}'.format(schema, t) for t in all_datasource_names]
         return all_result_sets
 
     @classmethod
@@ -306,8 +311,7 @@ class BaseEngineSpec(object):
         return sorted(inspector.get_view_names(schema))
 
     @classmethod
-    def where_latest_partition(
-            cls, table_name, schema, database, qry, columns=None):
+    def where_latest_partition(cls, table_name, schema, database, qry, columns=None):
         return False
 
     @classmethod
@@ -315,9 +319,18 @@ class BaseEngineSpec(object):
         return [sqla.column(c.get('name')) for c in cols]
 
     @classmethod
-    def select_star(cls, my_db, table_name, engine, schema=None, limit=100,
-                    show_cols=False, indent=True, latest_partition=True,
-                    cols=None):
+    def select_star(
+        cls,
+        my_db,
+        table_name,
+        engine,
+        schema=None,
+        limit=100,
+        show_cols=False,
+        indent=True,
+        latest_partition=True,
+        cols=None,
+    ):
         fields = '*'
         cols = cols or []
         if (show_cols or latest_partition) and not cols:
@@ -337,7 +350,8 @@ class BaseEngineSpec(object):
             qry = qry.limit(limit)
         if latest_partition:
             partition_query = cls.where_latest_partition(
-                table_name, schema, my_db, qry, columns=cols)
+                table_name, schema, my_db, qry, columns=cols
+            )
             if partition_query != False:  # noqa
                 qry = partition_query
         sql = my_db.compile_sqla_query(qry)
@@ -497,9 +511,9 @@ class OracleEngineSpec(PostgresBaseEngineSpec):
 
     @classmethod
     def convert_dttm(cls, target_type, dttm):
-        return (
-            """TO_TIMESTAMP('{}', 'YYYY-MM-DD"T"HH24:MI:SS.ff6')"""
-        ).format(dttm.isoformat())
+        return ("""TO_TIMESTAMP('{}', 'YYYY-MM-DD"T"HH24:MI:SS.ff6')""").format(
+            dttm.isoformat()
+        )
 
 
 class Db2EngineSpec(BaseEngineSpec):
@@ -508,27 +522,25 @@ class Db2EngineSpec(BaseEngineSpec):
     force_column_alias_quotes = True
     time_grain_functions = {
         None: '{col}',
-        'PT1S': 'CAST({col} as TIMESTAMP)'
-                ' - MICROSECOND({col}) MICROSECONDS',
+        'PT1S': 'CAST({col} as TIMESTAMP)' ' - MICROSECOND({col}) MICROSECONDS',
         'PT1M': 'CAST({col} as TIMESTAMP)'
-                ' - SECOND({col}) SECONDS'
-                ' - MICROSECOND({col}) MICROSECONDS',
+        ' - SECOND({col}) SECONDS'
+        ' - MICROSECOND({col}) MICROSECONDS',
         'PT1H': 'CAST({col} as TIMESTAMP)'
-                ' - MINUTE({col}) MINUTES'
-                ' - SECOND({col}) SECONDS'
-                ' - MICROSECOND({col}) MICROSECONDS ',
+        ' - MINUTE({col}) MINUTES'
+        ' - SECOND({col}) SECONDS'
+        ' - MICROSECOND({col}) MICROSECONDS ',
         'P1D': 'CAST({col} as TIMESTAMP)'
-               ' - HOUR({col}) HOURS'
-               ' - MINUTE({col}) MINUTES'
-               ' - SECOND({col}) SECONDS'
-               ' - MICROSECOND({col}) MICROSECONDS',
+        ' - HOUR({col}) HOURS'
+        ' - MINUTE({col}) MINUTES'
+        ' - SECOND({col}) SECONDS'
+        ' - MICROSECOND({col}) MICROSECONDS',
         'P1W': '{col} - (DAYOFWEEK({col})) DAYS',
         'P1M': '{col} - (DAY({col})-1) DAYS',
         'P0.25Y': '{col} - (DAY({col})-1) DAYS'
-                  ' - (MONTH({col})-1) MONTHS'
-                  ' + ((QUARTER({col})-1) * 3) MONTHS',
-        'P1Y': '{col} - (DAY({col})-1) DAYS'
-               ' - (MONTH({col})-1) MONTHS',
+        ' - (MONTH({col})-1) MONTHS'
+        ' + ((QUARTER({col})-1) * 3) MONTHS',
+        'P1Y': '{col} - (DAY({col})-1) DAYS' ' - (MONTH({col})-1) MONTHS',
     }
 
     @classmethod
@@ -560,23 +572,28 @@ class SqliteEngineSpec(BaseEngineSpec):
 
     @classmethod
     def fetch_result_sets(cls, db, datasource_type):
-        schemas = db.all_schema_names(cache=db.schema_cache_enabled,
-                                      cache_timeout=db.schema_cache_timeout,
-                                      force=True)
+        schemas = db.all_schema_names(
+            cache=db.schema_cache_enabled,
+            cache_timeout=db.schema_cache_timeout,
+            force=True,
+        )
         all_result_sets = []
         schema = schemas[0]
         if datasource_type == 'table':
             all_datasource_names = db.all_table_names_in_schema(
-                schema=schema, force=True,
+                schema=schema,
+                force=True,
                 cache=db.table_cache_enabled,
-                cache_timeout=db.table_cache_timeout)
+                cache_timeout=db.table_cache_timeout,
+            )
         elif datasource_type == 'view':
             all_datasource_names = db.all_view_names_in_schema(
-                schema=schema, force=True,
+                schema=schema,
+                force=True,
                 cache=db.table_cache_enabled,
-                cache_timeout=db.table_cache_timeout)
-        all_result_sets += [
-            '{}.{}'.format(schema, t) for t in all_datasource_names]
+                cache_timeout=db.table_cache_timeout,
+            )
+        all_result_sets += ['{}.{}'.format(schema, t) for t in all_datasource_names]
         return all_result_sets
 
     @classmethod
@@ -598,23 +615,19 @@ class MySQLEngineSpec(BaseEngineSpec):
     time_grain_functions = {
         None: '{col}',
         'PT1S': 'DATE_ADD(DATE({col}), '
-              'INTERVAL (HOUR({col})*60*60 + MINUTE({col})*60'
-              ' + SECOND({col})) SECOND)',
+        'INTERVAL (HOUR({col})*60*60 + MINUTE({col})*60'
+        ' + SECOND({col})) SECOND)',
         'PT1M': 'DATE_ADD(DATE({col}), '
-              'INTERVAL (HOUR({col})*60 + MINUTE({col})) MINUTE)',
-        'PT1H': 'DATE_ADD(DATE({col}), '
-              'INTERVAL HOUR({col}) HOUR)',
+        'INTERVAL (HOUR({col})*60 + MINUTE({col})) MINUTE)',
+        'PT1H': 'DATE_ADD(DATE({col}), ' 'INTERVAL HOUR({col}) HOUR)',
         'P1D': 'DATE({col})',
-        'P1W': 'DATE(DATE_SUB({col}, '
-              'INTERVAL DAYOFWEEK({col}) - 1 DAY))',
-        'P1M': 'DATE(DATE_SUB({col}, '
-              'INTERVAL DAYOFMONTH({col}) - 1 DAY))',
+        'P1W': 'DATE(DATE_SUB({col}, ' 'INTERVAL DAYOFWEEK({col}) - 1 DAY))',
+        'P1M': 'DATE(DATE_SUB({col}, ' 'INTERVAL DAYOFMONTH({col}) - 1 DAY))',
         'P0.25Y': 'MAKEDATE(YEAR({col}), 1) '
-              '+ INTERVAL QUARTER({col}) QUARTER - INTERVAL 1 QUARTER',
-        'P1Y': 'DATE(DATE_SUB({col}, '
-              'INTERVAL DAYOFYEAR({col}) - 1 DAY))',
+        '+ INTERVAL QUARTER({col}) QUARTER - INTERVAL 1 QUARTER',
+        'P1Y': 'DATE(DATE_SUB({col}, ' 'INTERVAL DAYOFYEAR({col}) - 1 DAY))',
         '1969-12-29T00:00:00Z/P1W': 'DATE(DATE_SUB({col}, '
-              'INTERVAL DAYOFWEEK(DATE_SUB({col}, INTERVAL 1 DAY)) - 1 DAY))',
+        'INTERVAL DAYOFWEEK(DATE_SUB({col}, INTERVAL 1 DAY)) - 1 DAY))',
     }
 
     type_code_map = {}  # loaded from get_datatype only if needed
@@ -623,7 +636,8 @@ class MySQLEngineSpec(BaseEngineSpec):
     def convert_dttm(cls, target_type, dttm):
         if target_type.upper() in ('DATETIME', 'DATE'):
             return "STR_TO_DATE('{}', '%Y-%m-%d %H:%i:%s')".format(
-                dttm.strftime('%Y-%m-%d %H:%M:%S'))
+                dttm.strftime('%Y-%m-%d %H:%M:%S')
+            )
         return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
 
     @classmethod
@@ -637,11 +651,10 @@ class MySQLEngineSpec(BaseEngineSpec):
         if not cls.type_code_map:
             # only import and store if needed at least once
             import MySQLdb
+
             ft = MySQLdb.constants.FIELD_TYPE
             cls.type_code_map = {
-                getattr(ft, k): k
-                for k in dir(ft)
-                if not k.startswith('_')
+                getattr(ft, k): k for k in dir(ft) if not k.startswith('_')
             }
         datatype = type_code
         if isinstance(type_code, int):
@@ -678,11 +691,9 @@ class PrestoEngineSpec(BaseEngineSpec):
         'P1M': "date_trunc('month', CAST({col} AS TIMESTAMP))",
         'P0.25Y': "date_trunc('quarter', CAST({col} AS TIMESTAMP))",
         'P1Y': "date_trunc('year', CAST({col} AS TIMESTAMP))",
-        'P1W/1970-01-03T00:00:00Z':
-            "date_add('day', 5, date_trunc('week', date_add('day', 1, \
+        'P1W/1970-01-03T00:00:00Z': "date_add('day', 5, date_trunc('week', date_add('day', 1, \
             CAST({col} AS TIMESTAMP))))",
-        '1969-12-28T00:00:00Z/P1W':
-            "date_add('day', -1, date_trunc('week', \
+        '1969-12-28T00:00:00Z/P1W': "date_add('day', -1, date_trunc('week', \
             date_add('day', 1, CAST({col} AS TIMESTAMP))))",
     }
 
@@ -731,13 +742,13 @@ class PrestoEngineSpec(BaseEngineSpec):
         result_set_df = db.get_df(
             """SELECT table_schema, table_name FROM INFORMATION_SCHEMA.{}S
                ORDER BY concat(table_schema, '.', table_name)""".format(
-                datasource_type.upper(),
+                datasource_type.upper()
             ),
-            None)
+            None,
+        )
         result_sets = []
         for unused, row in result_set_df.iterrows():
-            result_sets.append('{}.{}'.format(
-                row['table_schema'], row['table_name']))
+            result_sets.append('{}.{}'.format(row['table_schema'], row['table_name']))
         return result_sets
 
     @classmethod
@@ -751,13 +762,14 @@ class PrestoEngineSpec(BaseEngineSpec):
             full_table_name = '{}.{}'.format(schema_name, table_name)
         pql = cls._partition_query(full_table_name)
         col_name, latest_part = cls.latest_partition(
-            table_name, schema_name, database, show_first=True)
+            table_name, schema_name, database, show_first=True
+        )
         return {
             'partitions': {
                 'cols': cols,
                 'latest': {col_name: latest_part},
                 'partitionQuery': pql,
-            },
+            }
         }
 
     @classmethod
@@ -791,7 +803,8 @@ class PrestoEngineSpec(BaseEngineSpec):
                     progress = 100 * (completed_splits / total_splits)
                     logging.info(
                         'Query progress: {} / {} '
-                        'splits'.format(completed_splits, total_splits))
+                        'splits'.format(completed_splits, total_splits)
+                    )
                     if progress > query.progress:
                         query.progress = progress
                     session.commit()
@@ -802,9 +815,10 @@ class PrestoEngineSpec(BaseEngineSpec):
     @classmethod
     def extract_error_message(cls, e):
         if (
-                hasattr(e, 'orig') and
-                type(e.orig).__name__ == 'DatabaseError' and
-                isinstance(e.orig[0], dict)):
+            hasattr(e, 'orig')
+            and type(e.orig).__name__ == 'DatabaseError'
+            and isinstance(e.orig[0], dict)
+        ):
             error_dict = e.orig[0]
             return '{} at {}: {}'.format(
                 error_dict.get('errorName'),
@@ -812,17 +826,16 @@ class PrestoEngineSpec(BaseEngineSpec):
                 error_dict.get('message'),
             )
         if (
-                type(e).__name__ == 'DatabaseError' and
-                hasattr(e, 'args') and
-                len(e.args) > 0
+            type(e).__name__ == 'DatabaseError'
+            and hasattr(e, 'args')
+            and len(e.args) > 0
         ):
             error_dict = e.args[0]
             return error_dict.get('message')
         return utils.error_msg_from_exception(e)
 
     @classmethod
-    def _partition_query(
-            cls, table_name, limit=0, order_by=None, filters=None):
+    def _partition_query(cls, table_name, limit=0, order_by=None, filters=None):
         """Returns a partition query
 
         :param table_name: the name of the table to get partitions from
@@ -851,20 +864,22 @@ class PrestoEngineSpec(BaseEngineSpec):
                 l.append(f"{field} = '{value}'")
             where_clause = 'WHERE ' + ' AND '.join(l)
 
-        sql = textwrap.dedent(f"""\
+        sql = textwrap.dedent(
+            f"""\
             SHOW PARTITIONS FROM {table_name}
             {where_clause}
             {order_by_clause}
             {limit_clause}
-        """)
+        """
+        )
         return sql
 
     @classmethod
-    def where_latest_partition(
-            cls, table_name, schema, database, qry, columns=None):
+    def where_latest_partition(cls, table_name, schema, database, qry, columns=None):
         try:
             col_name, value = cls.latest_partition(
-                table_name, schema, database, show_first=True)
+                table_name, schema, database, show_first=True
+            )
         except Exception:
             # table is not partitioned
             return False
@@ -899,12 +914,14 @@ class PrestoEngineSpec(BaseEngineSpec):
         indexes = database.get_indexes(table_name, schema)
         if len(indexes[0]['column_names']) < 1:
             raise SupersetTemplateException(
-                'The table should have one partitioned field')
+                'The table should have one partitioned field'
+            )
         elif not show_first and len(indexes[0]['column_names']) > 1:
             raise SupersetTemplateException(
                 'The table should have a single partitioned field '
                 'to use this function. You may want to use '
-                '`presto.latest_sub_partition`')
+                '`presto.latest_sub_partition`'
+            )
         part_field = indexes[0]['column_names'][0]
         sql = cls._partition_query(table_name, 1, [(part_field, True)])
         df = database.get_df(sql, schema)
@@ -945,8 +962,7 @@ class PrestoEngineSpec(BaseEngineSpec):
                 raise SupersetTemplateException(msg)
         if len(kwargs.keys()) != len(part_fields) - 1:
             msg = (
-                'A filter needs to be specified for {} out of the '
-                '{} fields.'
+                'A filter needs to be specified for {} out of the ' '{} fields.'
             ).format(len(part_fields) - 1, len(part_fields))
             raise SupersetTemplateException(msg)
 
@@ -954,8 +970,7 @@ class PrestoEngineSpec(BaseEngineSpec):
             if field not in kwargs.keys():
                 field_to_return = field
 
-        sql = cls._partition_query(
-            table_name, 1, [(field_to_return, True)], kwargs)
+        sql = cls._partition_query(table_name, 1, [(field_to_return, True)], kwargs)
         df = database.get_df(sql, schema)
         if df.empty:
             return ''
@@ -970,18 +985,18 @@ class HiveEngineSpec(PrestoEngineSpec):
 
     # Scoping regex at class level to avoid recompiling
     # 17/02/07 19:36:38 INFO ql.Driver: Total jobs = 5
-    jobs_stats_r = re.compile(
-        r'.*INFO.*Total jobs = (?P<max_jobs>[0-9]+)')
+    jobs_stats_r = re.compile(r'.*INFO.*Total jobs = (?P<max_jobs>[0-9]+)')
     # 17/02/07 19:37:08 INFO ql.Driver: Launching Job 2 out of 5
     launching_job_r = re.compile(
-        '.*INFO.*Launching Job (?P<job_number>[0-9]+) out of '
-        '(?P<max_jobs>[0-9]+)')
+        '.*INFO.*Launching Job (?P<job_number>[0-9]+) out of ' '(?P<max_jobs>[0-9]+)'
+    )
     # 17/02/07 19:36:58 INFO exec.Task: 2017-02-07 19:36:58,152 Stage-18
     # map = 0%,  reduce = 0%
     stage_progress_r = re.compile(
         r'.*INFO.*Stage-(?P<stage_number>[0-9]+).*'
         r'map = (?P<map_progress>[0-9]+)%.*'
-        r'reduce = (?P<reduce_progress>[0-9]+)%.*')
+        r'reduce = (?P<reduce_progress>[0-9]+)%.*'
+    )
 
     @classmethod
     def patch(cls):
@@ -990,7 +1005,8 @@ class HiveEngineSpec(PrestoEngineSpec):
         from TCLIService import (
             constants as patched_constants,
             ttypes as patched_ttypes,
-            TCLIService as patched_TCLIService)
+            TCLIService as patched_TCLIService,
+        )
 
         hive.TCLIService = patched_TCLIService
         hive.constants = patched_constants
@@ -999,12 +1015,12 @@ class HiveEngineSpec(PrestoEngineSpec):
 
     @classmethod
     def fetch_result_sets(cls, db, datasource_type):
-        return BaseEngineSpec.fetch_result_sets(
-            db, datasource_type)
+        return BaseEngineSpec.fetch_result_sets(db, datasource_type)
 
     @classmethod
     def fetch_data(cls, cursor, limit):
         from TCLIService import ttypes
+
         state = cursor.poll()
         if state.operationState == ttypes.TOperationState.ERROR_STATE:
             raise Exception('Query error', state.errorMessage)
@@ -1013,6 +1029,7 @@ class HiveEngineSpec(PrestoEngineSpec):
     @staticmethod
     def create_table_from_csv(form, table):
         """Uploads a csv file and creates a superset datasource in Hive."""
+
         def convert_to_hive_type(col_type):
             """maps tableschema's types to hive types"""
             tableschema_to_hive_types = {
@@ -1028,7 +1045,8 @@ class HiveEngineSpec(PrestoEngineSpec):
         if not bucket_path:
             logging.info('No upload bucket specified')
             raise Exception(
-                'No upload bucket specified. You can specify one in the config file.')
+                'No upload bucket specified. You can specify one in the config file.'
+            )
 
         table_name = form.name.data
         schema_name = form.schema.data
@@ -1038,38 +1056,43 @@ class HiveEngineSpec(PrestoEngineSpec):
                 raise Exception(
                     "You can't specify a namespace. "
                     'All tables will be uploaded to the `{}` namespace'.format(
-                        config.get('HIVE_NAMESPACE')))
+                        config.get('HIVE_NAMESPACE')
+                    )
+                )
             full_table_name = '{}.{}'.format(
-                config.get('UPLOADED_CSV_HIVE_NAMESPACE'), table_name)
+                config.get('UPLOADED_CSV_HIVE_NAMESPACE'), table_name
+            )
         else:
             if '.' in table_name and schema_name:
                 raise Exception(
                     "You can't specify a namespace both in the name of the table "
-                    'and in the schema field. Please remove one')
+                    'and in the schema field. Please remove one'
+                )
 
-            full_table_name = '{}.{}'.format(
-                schema_name, table_name) if schema_name else table_name
+            full_table_name = (
+                '{}.{}'.format(schema_name, table_name) if schema_name else table_name
+            )
 
         filename = form.csv_file.data.filename
 
         upload_prefix = config['CSV_TO_HIVE_UPLOAD_DIRECTORY']
-        upload_path = config['UPLOAD_FOLDER'] + \
-            secure_filename(filename)
+        upload_path = config['UPLOAD_FOLDER'] + secure_filename(filename)
 
         hive_table_schema = Table(upload_path).infer()
         column_name_and_type = []
         for column_info in hive_table_schema['fields']:
             column_name_and_type.append(
                 '`{}` {}'.format(
-                    column_info['name'],
-                    convert_to_hive_type(column_info['type'])))
+                    column_info['name'], convert_to_hive_type(column_info['type'])
+                )
+            )
         schema_definition = ', '.join(column_name_and_type)
 
         s3 = boto3.client('s3')
         location = os.path.join('s3a://', bucket_path, upload_prefix, table_name)
         s3.upload_file(
-            upload_path, bucket_path,
-            os.path.join(upload_prefix, table_name, filename))
+            upload_path, bucket_path, os.path.join(upload_prefix, table_name, filename)
+        )
         sql = f"""CREATE TABLE {full_table_name} ( {schema_definition} )
             ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS
             TEXTFILE LOCATION '{location}'
@@ -1084,8 +1107,7 @@ class HiveEngineSpec(PrestoEngineSpec):
         if tt == 'DATE':
             return "CAST('{}' AS DATE)".format(dttm.isoformat()[:10])
         elif tt == 'TIMESTAMP':
-            return "CAST('{}' AS TIMESTAMP)".format(
-                dttm.strftime('%Y-%m-%d %H:%M:%S'))
+            return "CAST('{}' AS TIMESTAMP)".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
         return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
 
     @classmethod
@@ -1125,14 +1147,12 @@ class HiveEngineSpec(PrestoEngineSpec):
         logging.info(
             'Progress detail: {}, '
             'current job {}, '
-            'total jobs: {}'.format(stages, current_job, total_jobs))
+            'total jobs: {}'.format(stages, current_job, total_jobs)
+        )
 
-        stage_progress = sum(
-            stages.values()) / len(stages.values()) if stages else 0
+        stage_progress = sum(stages.values()) / len(stages.values()) if stages else 0
 
-        progress = (
-            100 * (current_job - 1) / total_jobs + stage_progress / total_jobs
-        )
+        progress = 100 * (current_job - 1) / total_jobs + stage_progress / total_jobs
         return int(progress)
 
     @classmethod
@@ -1146,6 +1166,7 @@ class HiveEngineSpec(PrestoEngineSpec):
     def handle_cursor(cls, cursor, query, session):
         """Updates progress information"""
         from pyhive import hive
+
         unfinished_states = (
             hive.ttypes.TOperationState.INITIALIZED_STATE,
             hive.ttypes.TOperationState.RUNNING_STATE,
@@ -1173,11 +1194,9 @@ class HiveEngineSpec(PrestoEngineSpec):
                     tracking_url = cls.get_tracking_url(log_lines)
                     if tracking_url:
                         job_id = tracking_url.split('/')[-2]
-                        logging.info(
-                            'Found the tracking url: {}'.format(tracking_url))
+                        logging.info('Found the tracking url: {}'.format(tracking_url))
                         tracking_url = tracking_url_trans(tracking_url)
-                        logging.info(
-                            'Transformation applied: {}'.format(tracking_url))
+                        logging.info('Transformation applied: {}'.format(tracking_url))
                         query.tracking_url = tracking_url
                         logging.info('Job id: {}'.format(job_id))
                         needs_commit = True
@@ -1194,11 +1213,11 @@ class HiveEngineSpec(PrestoEngineSpec):
             polled = cursor.poll()
 
     @classmethod
-    def where_latest_partition(
-            cls, table_name, schema, database, qry, columns=None):
+    def where_latest_partition(cls, table_name, schema, database, qry, columns=None):
         try:
             col_name, value = cls.latest_partition(
-                table_name, schema, database, show_first=True)
+                table_name, schema, database, show_first=True
+            )
         except Exception:
             # table is not partitioned
             return False
@@ -1218,8 +1237,7 @@ class HiveEngineSpec(PrestoEngineSpec):
         return df.ix[:, 0].max().split('=')[1]
 
     @classmethod
-    def _partition_query(
-            cls, table_name, limit=0, order_by=None, filters=None):
+    def _partition_query(cls, table_name, limit=0, order_by=None, filters=None):
         return f'SHOW PARTITIONS {table_name}'
 
     @classmethod
@@ -1249,8 +1267,12 @@ class HiveEngineSpec(PrestoEngineSpec):
         backend_name = url.get_backend_name()
 
         # Must be Hive connection, enable impersonation, and set param auth=LDAP|KERBEROS
-        if (backend_name == 'hive' and 'auth' in url.query.keys() and
-                impersonate_user is True and username is not None):
+        if (
+            backend_name == 'hive'
+            and 'auth' in url.query.keys()
+            and impersonate_user is True
+            and username is not None
+        ):
             configuration['hive.server2.proxy.user'] = username
         return configuration
 
@@ -1312,8 +1334,7 @@ class AthenaEngineSpec(BaseEngineSpec):
             return "from_iso8601_date('{}')".format(dttm.isoformat()[:10])
         if tt == 'TIMESTAMP':
             return "from_iso8601_timestamp('{}')".format(dttm.isoformat())
-        return ("CAST ('{}' AS TIMESTAMP)"
-                .format(dttm.strftime('%Y-%m-%d %H:%M:%S')))
+        return "CAST ('{}' AS TIMESTAMP)".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
 
     @classmethod
     def epoch_to_dttm(cls):
@@ -1349,8 +1370,7 @@ class ClickHouseEngineSpec(BaseEngineSpec):
         if tt == 'DATE':
             return "toDate('{}')".format(dttm.strftime('%Y-%m-%d'))
         if tt == 'DATETIME':
-            return "toDateTime('{}')".format(
-                dttm.strftime('%Y-%m-%d %H:%M:%S'))
+            return "toDateTime('{}')".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
         return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
 
 
@@ -1358,6 +1378,7 @@ class BQEngineSpec(BaseEngineSpec):
     """Engine spec for Google's BigQuery
 
     As contributed by @mxmzdlv on issue #945"""
+
     engine = 'bigquery'
 
     """
@@ -1402,12 +1423,16 @@ class BQEngineSpec(BaseEngineSpec):
     def mutate_expression_label(label):
         mutated_label = re.sub('[^\w]+', '_', label)
         if not re.match('^[a-zA-Z_]+.*', mutated_label):
-            raise SupersetTemplateException('BigQuery field_name used is invalid {}, '
-                                            'should start with a letter or '
-                                            'underscore'.format(mutated_label))
+            raise SupersetTemplateException(
+                'BigQuery field_name used is invalid {}, '
+                'should start with a letter or '
+                'underscore'.format(mutated_label)
+            )
         if len(mutated_label) > 128:
-            raise SupersetTemplateException('BigQuery field_name {}, should be atmost '
-                                            '128 characters'.format(mutated_label))
+            raise SupersetTemplateException(
+                'BigQuery field_name {}, should be atmost '
+                '128 characters'.format(mutated_label)
+            )
         return mutated_label
 
     @classmethod
@@ -1416,20 +1441,18 @@ class BQEngineSpec(BaseEngineSpec):
         if not indexes:
             return {}
         partitions_columns = [
-            index.get('column_names', []) for index in indexes
+            index.get('column_names', [])
+            for index in indexes
             if index.get('name') == 'partition'
         ]
         cluster_columns = [
-            index.get('column_names', []) for index in indexes
+            index.get('column_names', [])
+            for index in indexes
             if index.get('name') == 'clustering'
         ]
         return {
-            'partitions': {
-                'cols': partitions_columns,
-            },
-            'clustering': {
-                'cols': cluster_columns,
-            },
+            'partitions': {'cols': partitions_columns},
+            'clustering': {'cols': cluster_columns},
         }
 
     @classmethod
@@ -1442,8 +1465,10 @@ class BQEngineSpec(BaseEngineSpec):
         Also explicility specifying column names so we don't encounter duplicate
         column names in the result.
         """
-        return [sqla.literal_column(c.get('name')).label(c.get('name').replace('.', '__'))
-                for c in cols]
+        return [
+            sqla.literal_column(c.get('name')).label(c.get('name').replace('.', '__'))
+            for c in cols
+        ]
 
 
 class ImpalaEngineSpec(BaseEngineSpec):
@@ -1475,13 +1500,17 @@ class ImpalaEngineSpec(BaseEngineSpec):
 
     @classmethod
     def get_schema_names(cls, inspector):
-        schemas = [row[0] for row in inspector.engine.execute('SHOW SCHEMAS')
-                   if not row[0].startswith('_')]
+        schemas = [
+            row[0]
+            for row in inspector.engine.execute('SHOW SCHEMAS')
+            if not row[0].startswith('_')
+        ]
         return schemas
 
 
 class DruidEngineSpec(BaseEngineSpec):
     """Engine spec for Druid.io"""
+
     engine = 'druid'
     inner_joins = False
     allows_subquery = False
@@ -1524,13 +1553,13 @@ class KylinEngineSpec(BaseEngineSpec):
         if tt == 'DATE':
             return "CAST('{}' AS DATE)".format(dttm.isoformat()[:10])
         if tt == 'TIMESTAMP':
-            return "CAST('{}' AS TIMESTAMP)".format(
-                dttm.strftime('%Y-%m-%d %H:%M:%S'))
+            return "CAST('{}' AS TIMESTAMP)".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
         return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
 
 
 class TeradataEngineSpec(BaseEngineSpec):
     """Dialect for Teradata DB."""
+
     engine = 'teradata'
     limit_method = LimitMethod.WRAP_SQL
 
@@ -1547,5 +1576,7 @@ class TeradataEngineSpec(BaseEngineSpec):
 
 
 engines = {
-    o.engine: o for o in globals().values()
-    if inspect.isclass(o) and issubclass(o, BaseEngineSpec)}
+    o.engine: o
+    for o in globals().values()
+    if inspect.isclass(o) and issubclass(o, BaseEngineSpec)
+}
diff --git a/superset/db_engines/hive.py b/superset/db_engines/hive.py
index 31c658e..4b981cd 100644
--- a/superset/db_engines/hive.py
+++ b/superset/db_engines/hive.py
@@ -5,8 +5,7 @@ from thrift import Thrift
 
 
 # TODO: contribute back to pyhive.
-def fetch_logs(self, max_rows=1024,
-               orientation=ttypes.TFetchOrientation.FETCH_NEXT):
+def fetch_logs(self, max_rows=1024, orientation=ttypes.TFetchOrientation.FETCH_NEXT):
     """Mocked. Retrieve the logs produced by the execution of the query.
     Can be called multiple times to fetch the logs produced after
     the previous call.
@@ -20,8 +19,7 @@ def fetch_logs(self, max_rows=1024,
         logs = self._connection.client.GetLog(req).log
         return logs
     # raised if Hive is used
-    except (ttypes.TApplicationException,
-            Thrift.TApplicationException):
+    except (ttypes.TApplicationException, Thrift.TApplicationException):
         if self._state == self._STATE_NONE:
             raise hive.ProgrammingError('No query yet')
         logs = []
@@ -34,8 +32,7 @@ def fetch_logs(self, max_rows=1024,
             )
             response = self._connection.client.FetchResults(req)
             hive._check_status(response)
-            assert not response.results.rows, \
-                'expected data in columnar format'
+            assert not response.results.rows, 'expected data in columnar format'
             assert len(response.results.columns) == 1, response.results.columns
             new_logs = hive._unwrap_column(response.results.columns[0])
             logs += new_logs
diff --git a/superset/forms.py b/superset/forms.py
index e4b8481..01dc09d 100644
--- a/superset/forms.py
+++ b/superset/forms.py
@@ -4,8 +4,7 @@ from flask_appbuilder.fieldwidgets import BS3TextFieldWidget
 from flask_appbuilder.forms import DynamicForm
 from flask_babel import lazy_gettext as _
 from flask_wtf.file import FileAllowed, FileField, FileRequired
-from wtforms import (
-    BooleanField, Field, IntegerField, SelectField, StringField)
+from wtforms import BooleanField, Field, IntegerField, SelectField, StringField
 from wtforms.ext.sqlalchemy.fields import QuerySelectField
 from wtforms.validators import DataRequired, NumberRange, Optional
 
@@ -45,9 +44,9 @@ class CsvToDatabaseForm(DynamicForm):
     # pylint: disable=E0211
     def csv_allowed_dbs():
         csv_allowed_dbs = []
-        csv_enabled_dbs = db.session.query(
-            models.Database).filter_by(
-            allow_csv_upload=True).all()
+        csv_enabled_dbs = (
+            db.session.query(models.Database).filter_by(allow_csv_upload=True).all()
+        )
         for csv_enabled_db in csv_enabled_dbs:
             if CsvToDatabaseForm.at_least_one_schema_is_allowed(csv_enabled_db):
                 csv_allowed_dbs.append(csv_enabled_db)
@@ -79,13 +78,15 @@ class CsvToDatabaseForm(DynamicForm):
                 b) if database supports schema
                     user is able to upload to schema in schemas_allowed_for_csv_upload
         """
-        if (security_manager.database_access(database) or
-                security_manager.all_datasource_access()):
+        if (
+            security_manager.database_access(database)
+            or security_manager.all_datasource_access()
+        ):
             return True
         schemas = database.get_schema_access_for_csv_upload()
-        if (schemas and
-            security_manager.schemas_accessible_by_user(
-                database, schemas, False)):
+        if schemas and security_manager.schemas_accessible_by_user(
+            database, schemas, False
+        ):
             return True
         return False
 
@@ -93,102 +94,122 @@ class CsvToDatabaseForm(DynamicForm):
         _('Table Name'),
         description=_('Name of table to be created from csv data.'),
         validators=[DataRequired()],
-        widget=BS3TextFieldWidget())
+        widget=BS3TextFieldWidget(),
+    )
     csv_file = FileField(
         _('CSV File'),
         description=_('Select a CSV file to be uploaded to a database.'),
-        validators=[
-            FileRequired(), FileAllowed(['csv'], _('CSV Files Only!'))])
+        validators=[FileRequired(), FileAllowed(['csv'], _('CSV Files Only!'))],
+    )
     con = QuerySelectField(
         _('Database'),
         query_factory=csv_allowed_dbs,
-        get_pk=lambda a: a.id, get_label=lambda a: a.database_name)
+        get_pk=lambda a: a.id,
+        get_label=lambda a: a.database_name,
+    )
     schema = StringField(
         _('Schema'),
         description=_('Specify a schema (if database flavor supports this).'),
         validators=[Optional()],
         widget=BS3TextFieldWidget(),
-        filters=[lambda x: x or None])
+        filters=[lambda x: x or None],
+    )
     sep = StringField(
         _('Delimiter'),
         description=_('Delimiter used by CSV file (for whitespace use \s+).'),
         validators=[DataRequired()],
-        widget=BS3TextFieldWidget())
+        widget=BS3TextFieldWidget(),
+    )
     if_exists = SelectField(
         _('Table Exists'),
         description=_(
             'If table exists do one of the following: '
             'Fail (do nothing), Replace (drop and recreate table) '
-            'or Append (insert data).'),
+            'or Append (insert data).'
+        ),
         choices=[
-            ('fail', _('Fail')), ('replace', _('Replace')),
-            ('append', _('Append'))],
-        validators=[DataRequired()])
+            ('fail', _('Fail')),
+            ('replace', _('Replace')),
+            ('append', _('Append')),
+        ],
+        validators=[DataRequired()],
+    )
     header = IntegerField(
         _('Header Row'),
         description=_(
             'Row containing the headers to use as '
             'column names (0 is first line of data). '
-            'Leave empty if there is no header row.'),
+            'Leave empty if there is no header row.'
+        ),
         validators=[Optional()],
         widget=BS3TextFieldWidget(),
-        filters=[lambda x: x or None])
+        filters=[lambda x: x or None],
+    )
     index_col = IntegerField(
         _('Index Column'),
         description=_(
             'Column to use as the row labels of the '
-            'dataframe. Leave empty if no index column.'),
-        validators=[Optional(), NumberRange(0, 1E+20)],
+            'dataframe. Leave empty if no index column.'
+        ),
+        validators=[Optional(), NumberRange(0, 1e20)],
         widget=BS3TextFieldWidget(),
-        filters=[lambda x: x or None])
+        filters=[lambda x: x or None],
+    )
     mangle_dupe_cols = BooleanField(
         _('Mangle Duplicate Columns'),
-        description=_('Specify duplicate columns as "X.0, X.1".'))
+        description=_('Specify duplicate columns as "X.0, X.1".'),
+    )
     skipinitialspace = BooleanField(
-        _('Skip Initial Space'),
-        description=_('Skip spaces after delimiter.'))
+        _('Skip Initial Space'), description=_('Skip spaces after delimiter.')
+    )
     skiprows = IntegerField(
         _('Skip Rows'),
         description=_('Number of rows to skip at start of file.'),
-        validators=[Optional(), NumberRange(0, 1E+20)],
+        validators=[Optional(), NumberRange(0, 1e20)],
         widget=BS3TextFieldWidget(),
-        filters=[lambda x: x or None])
+        filters=[lambda x: x or None],
+    )
     nrows = IntegerField(
         _('Rows to Read'),
         description=_('Number of rows of file to read.'),
-        validators=[Optional(), NumberRange(0, 1E+20)],
+        validators=[Optional(), NumberRange(0, 1e20)],
         widget=BS3TextFieldWidget(),
-        filters=[lambda x: x or None])
+        filters=[lambda x: x or None],
+    )
     skip_blank_lines = BooleanField(
         _('Skip Blank Lines'),
         description=_(
-            'Skip blank lines rather than interpreting them '
-            'as NaN values.'))
+            'Skip blank lines rather than interpreting them ' 'as NaN values.'
+        ),
+    )
     parse_dates = CommaSeparatedListField(
         _('Parse Dates'),
         description=_(
-            'A comma separated list of columns that should be '
-            'parsed as dates.'),
-        filters=[filter_not_empty_values])
+            'A comma separated list of columns that should be ' 'parsed as dates.'
+        ),
+        filters=[filter_not_empty_values],
+    )
     infer_datetime_format = BooleanField(
         _('Infer Datetime Format'),
-        description=_(
-            'Use Pandas to interpret the datetime format '
-            'automatically.'))
+        description=_('Use Pandas to interpret the datetime format ' 'automatically.'),
+    )
     decimal = StringField(
         _('Decimal Character'),
         description=_('Character to interpret as decimal point.'),
         validators=[Optional()],
         widget=BS3TextFieldWidget(),
-        filters=[lambda x: x or '.'])
+        filters=[lambda x: x or '.'],
+    )
     index = BooleanField(
-        _('Dataframe Index'),
-        description=_('Write dataframe index as a column.'))
+        _('Dataframe Index'), description=_('Write dataframe index as a column.')
+    )
     index_label = StringField(
         _('Column Label(s)'),
         description=_(
             'Column label for index column(s). If None is given '
-            'and Dataframe Index is True, Index Names are used.'),
+            'and Dataframe Index is True, Index Names are used.'
+        ),
         validators=[Optional()],
         widget=BS3TextFieldWidget(),
-        filters=[lambda x: x or None])
+        filters=[lambda x: x or None],
+    )
diff --git a/superset/jinja_context.py b/superset/jinja_context.py
index 0af6968..d0685c36 100644
--- a/superset/jinja_context.py
+++ b/superset/jinja_context.py
@@ -112,6 +112,7 @@ class BaseTemplateProcessor(object):
     and are given access to the ``models.Database`` object and schema
     name. For globally available methods use ``@classmethod``.
     """
+
     engine = None
 
     def __init__(self, database=None, query=None, table=None, **kwargs):
@@ -153,6 +154,7 @@ class PrestoTemplateProcessor(BaseTemplateProcessor):
     The methods described here are namespaced under ``presto`` in the
     jinja context as in ``SELECT '{{ presto.some_macro_call() }}'``
     """
+
     engine = 'presto'
 
     @staticmethod
@@ -164,15 +166,14 @@ class PrestoTemplateProcessor(BaseTemplateProcessor):
     def latest_partition(self, table_name):
         table_name, schema = self._schema_table(table_name, self.schema)
         return self.database.db_engine_spec.latest_partition(
-            table_name, schema, self.database)[1]
+            table_name, schema, self.database
+        )[1]
 
     def latest_sub_partition(self, table_name, **kwargs):
         table_name, schema = self._schema_table(table_name, self.schema)
         return self.database.db_engine_spec.latest_sub_partition(
-            table_name=table_name,
-            schema=schema,
-            database=self.database,
-            **kwargs)
+            table_name=table_name, schema=schema, database=self.database, **kwargs
+        )
 
 
 class HiveTemplateProcessor(PrestoTemplateProcessor):
diff --git a/superset/legacy.py b/superset/legacy.py
index 35fad36..5b302ba 100644
--- a/superset/legacy.py
+++ b/superset/legacy.py
@@ -18,8 +18,12 @@ def cast_filter_data(form_data):
             col_str = '{}_col_{}'.format(prefix, i)
             op_str = '{}_op_{}'.format(prefix, i)
             val_str = '{}_eq_{}'.format(prefix, i)
-            if col_str in fd and op_str in fd and val_str in fd \
-               and len(fd[val_str]) > 0:
+            if (
+                col_str in fd
+                and op_str in fd
+                and val_str in fd
+                and len(fd[val_str]) > 0
+            ):
                 f = {}
                 f['col'] = fd[col_str]
                 f['op'] = fd[op_str]
@@ -82,6 +86,5 @@ def update_time_range(form_data):
     """Move since and until to time_range."""
     if 'since' in form_data or 'until' in form_data:
         form_data['time_range'] = '{} : {}'.format(
-            form_data.pop('since', '') or '',
-            form_data.pop('until', '') or '',
+            form_data.pop('since', '') or '', form_data.pop('until', '') or ''
         )
diff --git a/superset/migrations/env.py b/superset/migrations/env.py
index 94269f5..3a30363 100755
--- a/superset/migrations/env.py
+++ b/superset/migrations/env.py
@@ -20,9 +20,10 @@ logger = logging.getLogger('alembic.env')
 # from myapp import mymodel
 from flask import current_app
 
-config.set_main_option('sqlalchemy.url',
-                       current_app.config.get('SQLALCHEMY_DATABASE_URI'))
-target_metadata = Base.metadata   # pylint: disable=no-member
+config.set_main_option(
+    'sqlalchemy.url', current_app.config.get('SQLALCHEMY_DATABASE_URI')
+)
+target_metadata = Base.metadata  # pylint: disable=no-member
 
 # other values from the config, defined by the needs of env.py,
 # can be acquired:
@@ -67,26 +68,27 @@ def run_migrations_online():
                 directives[:] = []
                 logger.info('No changes in schema detected.')
 
-    engine = engine_from_config(config.get_section(config.config_ini_section),
-                                prefix='sqlalchemy.',
-                                poolclass=pool.NullPool)
+    engine = engine_from_config(
+        config.get_section(config.config_ini_section),
+        prefix='sqlalchemy.',
+        poolclass=pool.NullPool,
+    )
 
     connection = engine.connect()
     kwargs = {}
     if engine.name in ('sqlite', 'mysql'):
-        kwargs = {
-            'transaction_per_migration': True,
-            'transactional_ddl': True,
-        }
+        kwargs = {'transaction_per_migration': True, 'transactional_ddl': True}
     configure_args = current_app.extensions['migrate'].configure_args
     if configure_args:
         kwargs.update(configure_args)
 
-    context.configure(connection=connection,
-                      target_metadata=target_metadata,
-                      # compare_type=True,
-                      process_revision_directives=process_revision_directives,
-                      **kwargs)
+    context.configure(
+        connection=connection,
+        target_metadata=target_metadata,
+        # compare_type=True,
+        process_revision_directives=process_revision_directives,
+        **kwargs
+    )
 
     try:
         with context.begin_transaction():
@@ -94,6 +96,7 @@ def run_migrations_online():
     finally:
         connection.close()
 
+
 if context.is_offline_mode():
     run_migrations_offline()
 else:
diff --git a/superset/migrations/versions/0c5070e96b57_add_user_attributes_table.py b/superset/migrations/versions/0c5070e96b57_add_user_attributes_table.py
index 69eba1b..9a2ea84 100644
--- a/superset/migrations/versions/0c5070e96b57_add_user_attributes_table.py
+++ b/superset/migrations/versions/0c5070e96b57_add_user_attributes_table.py
@@ -15,19 +15,20 @@ import sqlalchemy as sa
 
 
 def upgrade():
-    op.create_table('user_attribute',
-    sa.Column('created_on', sa.DateTime(), nullable=True),
-    sa.Column('changed_on', sa.DateTime(), nullable=True),
-    sa.Column('id', sa.Integer(), nullable=False),
-    sa.Column('user_id', sa.Integer(), nullable=True),
-    sa.Column('welcome_dashboard_id', sa.Integer(), nullable=True),
-    sa.Column('created_by_fk', sa.Integer(), nullable=True),
-    sa.Column('changed_by_fk', sa.Integer(), nullable=True),
-    sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ),
-    sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ),
-    sa.ForeignKeyConstraint(['user_id'], ['ab_user.id'], ),
-    sa.ForeignKeyConstraint(['welcome_dashboard_id'], ['dashboards.id'], ),
-    sa.PrimaryKeyConstraint('id')
+    op.create_table(
+        'user_attribute',
+        sa.Column('created_on', sa.DateTime(), nullable=True),
+        sa.Column('changed_on', sa.DateTime(), nullable=True),
+        sa.Column('id', sa.Integer(), nullable=False),
+        sa.Column('user_id', sa.Integer(), nullable=True),
+        sa.Column('welcome_dashboard_id', sa.Integer(), nullable=True),
+        sa.Column('created_by_fk', sa.Integer(), nullable=True),
+        sa.Column('changed_by_fk', sa.Integer(), nullable=True),
+        sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id']),
+        sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id']),
+        sa.ForeignKeyConstraint(['user_id'], ['ab_user.id']),
+        sa.ForeignKeyConstraint(['welcome_dashboard_id'], ['dashboards.id']),
+        sa.PrimaryKeyConstraint('id'),
     )
 
 
diff --git a/superset/migrations/versions/1226819ee0e3_fix_wrong_constraint_on_table_columns.py b/superset/migrations/versions/1226819ee0e3_fix_wrong_constraint_on_table_columns.py
index 2b360ef..464817c 100644
--- a/superset/migrations/versions/1226819ee0e3_fix_wrong_constraint_on_table_columns.py
+++ b/superset/migrations/versions/1226819ee0e3_fix_wrong_constraint_on_table_columns.py
@@ -16,37 +16,44 @@ down_revision = '956a063c52b3'
 
 
 naming_convention = {
-    "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
+    "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s"
 }
 
 
 def find_constraint_name(upgrade=True):
     cols = {'column_name'} if upgrade else {'datasource_name'}
     return generic_find_constraint_name(
-        table='columns', columns=cols, referenced='datasources', db=db)
+        table='columns', columns=cols, referenced='datasources', db=db
+    )
 
 
 def upgrade():
     try:
         constraint = find_constraint_name()
-        with op.batch_alter_table("columns",
-                naming_convention=naming_convention) as batch_op:
+        with op.batch_alter_table(
+            "columns", naming_convention=naming_convention
+        ) as batch_op:
             if constraint:
                 batch_op.drop_constraint(constraint, type_="foreignkey")
             batch_op.create_foreign_key(
                 'fk_columns_datasource_name_datasources',
                 'datasources',
-                ['datasource_name'], ['datasource_name'])
+                ['datasource_name'],
+                ['datasource_name'],
+            )
     except:
-        logging.warning(
-            "Could not find or drop constraint on `columns`")
+        logging.warning("Could not find or drop constraint on `columns`")
+
 
 def downgrade():
     constraint = find_constraint_name(False) or 'fk_columns_datasource_name_datasources'
-    with op.batch_alter_table("columns",
-        naming_convention=naming_convention) as batch_op:
+    with op.batch_alter_table(
+        "columns", naming_convention=naming_convention
+    ) as batch_op:
         batch_op.drop_constraint(constraint, type_="foreignkey")
         batch_op.create_foreign_key(
             'fk_columns_column_name_datasources',
             'datasources',
-            ['column_name'], ['datasource_name'])
+            ['column_name'],
+            ['datasource_name'],
+        )
diff --git a/superset/migrations/versions/1296d28ec131_druid_exports.py b/superset/migrations/versions/1296d28ec131_druid_exports.py
index 6df37bc..4c310e3 100644
--- a/superset/migrations/versions/1296d28ec131_druid_exports.py
+++ b/superset/migrations/versions/1296d28ec131_druid_exports.py
@@ -15,7 +15,9 @@ import sqlalchemy as sa
 
 
 def upgrade():
-    op.add_column('datasources', sa.Column('params', sa.String(length=1000), nullable=True))
+    op.add_column(
+        'datasources', sa.Column('params', sa.String(length=1000), nullable=True)
+    )
 
 
 def downgrade():
diff --git a/superset/migrations/versions/12d55656cbca_is_featured.py b/superset/migrations/versions/12d55656cbca_is_featured.py
index 3158223..0eb3351 100644
--- a/superset/migrations/versions/12d55656cbca_is_featured.py
+++ b/superset/migrations/versions/12d55656cbca_is_featured.py
@@ -20,4 +20,3 @@ def upgrade():
 
 def downgrade():
     op.drop_column('tables', 'is_featured')
-
diff --git a/superset/migrations/versions/130915240929_is_sqllab_viz_flow.py b/superset/migrations/versions/130915240929_is_sqllab_viz_flow.py
index 4d8554c..e58abc6 100644
--- a/superset/migrations/versions/130915240929_is_sqllab_viz_flow.py
+++ b/superset/migrations/versions/130915240929_is_sqllab_viz_flow.py
@@ -20,6 +20,7 @@ Base = declarative_base()
 
 class Table(Base):
     """Declarative class to do query in upgrade"""
+
     __tablename__ = 'tables'
     id = sa.Column(sa.Integer, primary_key=True)
     sql = sa.Column(sa.Text)
diff --git a/superset/migrations/versions/18e88e1cc004_making_audit_nullable.py b/superset/migrations/versions/18e88e1cc004_making_audit_nullable.py
index 213f603..40b350c 100644
--- a/superset/migrations/versions/18e88e1cc004_making_audit_nullable.py
+++ b/superset/migrations/versions/18e88e1cc004_making_audit_nullable.py
@@ -16,12 +16,11 @@ down_revision = '430039611635'
 def upgrade():
     try:
         op.alter_column(
-            'clusters', 'changed_on',
-            existing_type=sa.DATETIME(),
-            nullable=True)
+            'clusters', 'changed_on', existing_type=sa.DATETIME(), nullable=True
+        )
         op.alter_column(
-            'clusters', 'created_on',
-            existing_type=sa.DATETIME(), nullable=True)
+            'clusters', 'created_on', existing_type=sa.DATETIME(), nullable=True
+        )
         op.drop_constraint(None, 'columns', type_='foreignkey')
         op.drop_constraint(None, 'columns', type_='foreignkey')
         op.drop_column('columns', 'created_on')
@@ -29,86 +28,58 @@ def upgrade():
         op.drop_column('columns', 'changed_on')
         op.drop_column('columns', 'changed_by_fk')
         op.alter_column(
-            'css_templates', 'changed_on',
-            existing_type=sa.DATETIME(),
-            nullable=True)
+            'css_templates', 'changed_on', existing_type=sa.DATETIME(), nullable=True
+        )
         op.alter_column(
-            'css_templates', 'created_on',
-            existing_type=sa.DATETIME(),
-            nullable=True)
+            'css_templates', 'created_on', existing_type=sa.DATETIME(), nullable=True
+        )
         op.alter_column(
-            'dashboards', 'changed_on',
-            existing_type=sa.DATETIME(),
-            nullable=True)
+            'dashboards', 'changed_on', existing_type=sa.DATETIME(), nullable=True
+        )
         op.alter_column(
-            'dashboards', 'created_on',
-            existing_type=sa.DATETIME(),
-            nullable=True)
+            'dashboards', 'created_on', existing_type=sa.DATETIME(), nullable=True
+        )
         op.create_unique_constraint(None, 'dashboards', ['slug'])
         op.alter_column(
-            'datasources', 'changed_by_fk',
-            existing_type=sa.INTEGER(),
-            nullable=True)
+            'datasources', 'changed_by_fk', existing_type=sa.INTEGER(), nullable=True
+        )
         op.alter_column(
-            'datasources', 'changed_on',
-            existing_type=sa.DATETIME(),
-            nullable=True)
+            'datasources', 'changed_on', existing_type=sa.DATETIME(), nullable=True
+        )
         op.alter_column(
-            'datasources', 'created_by_fk',
-            existing_type=sa.INTEGER(),
-            nullable=True)
+            'datasources', 'created_by_fk', existing_type=sa.INTEGER(), nullable=True
+        )
         op.alter_column(
-            'datasources', 'created_on',
-            existing_type=sa.DATETIME(),
-            nullable=True)
+            'datasources', 'created_on', existing_type=sa.DATETIME(), nullable=True
+        )
+        op.alter_column('dbs', 'changed_on', existing_type=sa.DATETIME(), nullable=True)
+        op.alter_column('dbs', 'created_on', existing_type=sa.DATETIME(), nullable=True)
         op.alter_column(
-            'dbs', 'changed_on',
-            existing_type=sa.DATETIME(),
-            nullable=True)
+            'slices', 'changed_on', existing_type=sa.DATETIME(), nullable=True
+        )
         op.alter_column(
-            'dbs', 'created_on',
-            existing_type=sa.DATETIME(),
-            nullable=True)
+            'slices', 'created_on', existing_type=sa.DATETIME(), nullable=True
+        )
         op.alter_column(
-            'slices', 'changed_on',
-            existing_type=sa.DATETIME(),
-            nullable=True)
+            'sql_metrics', 'changed_on', existing_type=sa.DATETIME(), nullable=True
+        )
         op.alter_column(
-            'slices', 'created_on',
-            existing_type=sa.DATETIME(),
-            nullable=True)
+            'sql_metrics', 'created_on', existing_type=sa.DATETIME(), nullable=True
+        )
         op.alter_column(
-            'sql_metrics', 'changed_on',
-            existing_type=sa.DATETIME(),
-            nullable=True)
+            'table_columns', 'changed_on', existing_type=sa.DATETIME(), nullable=True
+        )
         op.alter_column(
-            'sql_metrics', 'created_on',
-            existing_type=sa.DATETIME(),
-            nullable=True)
+            'table_columns', 'created_on', existing_type=sa.DATETIME(), nullable=True
+        )
         op.alter_column(
-            'table_columns', 'changed_on',
-            existing_type=sa.DATETIME(),
-            nullable=True)
+            'tables', 'changed_on', existing_type=sa.DATETIME(), nullable=True
+        )
         op.alter_column(
-            'table_columns', 'created_on',
-            existing_type=sa.DATETIME(),
-            nullable=True)
-        op.alter_column(
-            'tables', 'changed_on',
-            existing_type=sa.DATETIME(),
-            nullable=True)
-        op.alter_column(
-            'tables', 'created_on',
-            existing_type=sa.DATETIME(),
-            nullable=True)
-        op.alter_column(
-            'url', 'changed_on',
-            existing_type=sa.DATETIME(),
-            nullable=True)
-        op.alter_column(
-            'url', 'created_on',
-            existing_type=sa.DATETIME(),
-            nullable=True)
+            'tables', 'created_on', existing_type=sa.DATETIME(), nullable=True
+        )
+        op.alter_column('url', 'changed_on', existing_type=sa.DATETIME(), nullable=True)
+        op.alter_column('url', 'created_on', existing_type=sa.DATETIME(), nullable=True)
     except Exception:
         pass
 
diff --git a/superset/migrations/versions/1a48a5411020_adding_slug_to_dash.py b/superset/migrations/versions/1a48a5411020_adding_slug_to_dash.py
index c6b8864..0cc14b7 100644
--- a/superset/migrations/versions/1a48a5411020_adding_slug_to_dash.py
+++ b/superset/migrations/versions/1a48a5411020_adding_slug_to_dash.py
@@ -13,6 +13,7 @@ down_revision = '289ce07647b'
 from alembic import op
 import sqlalchemy as sa
 
+
 def upgrade():
     op.add_column('dashboards', sa.Column('slug', sa.String(length=255), nullable=True))
     try:
diff --git a/superset/migrations/versions/1d9e835a84f9_.py b/superset/migrations/versions/1d9e835a84f9_.py
index 0d5235c..2c331f4 100644
--- a/superset/migrations/versions/1d9e835a84f9_.py
+++ b/superset/migrations/versions/1d9e835a84f9_.py
@@ -21,7 +21,9 @@ def upgrade():
             'allow_csv_upload',
             sa.Boolean(),
             nullable=False,
-            server_default=expression.true()))
+            server_default=expression.true(),
+        ),
+    )
 
 
 def downgrade():
diff --git a/superset/migrations/versions/1e2841a4128_.py b/superset/migrations/versions/1e2841a4128_.py
index 330b3b2..3d0e308 100644
--- a/superset/migrations/versions/1e2841a4128_.py
+++ b/superset/migrations/versions/1e2841a4128_.py
@@ -13,6 +13,7 @@ down_revision = '5a7bad26f2a7'
 from alembic import op
 import sqlalchemy as sa
 
+
 def upgrade():
     op.add_column('table_columns', sa.Column('expression', sa.Text(), nullable=True))
 
diff --git a/superset/migrations/versions/21e88bc06c02_annotation_migration.py b/superset/migrations/versions/21e88bc06c02_annotation_migration.py
index 4c7bb80..e9ced44 100644
--- a/superset/migrations/versions/21e88bc06c02_annotation_migration.py
+++ b/superset/migrations/versions/21e88bc06c02_annotation_migration.py
@@ -1,8 +1,7 @@
 import json
 
 from alembic import op
-from sqlalchemy import (
-  Column, Integer, or_, String, Text)
+from sqlalchemy import Column, Integer, or_, String, Text
 from sqlalchemy.ext.declarative import declarative_base
 
 from superset import db
@@ -33,23 +32,26 @@ def upgrade():
     bind = op.get_bind()
     session = db.Session(bind=bind)
 
-    for slc in session.query(Slice).filter(or_(
-            Slice.viz_type.like('line'), Slice.viz_type.like('bar'))):
+    for slc in session.query(Slice).filter(
+        or_(Slice.viz_type.like('line'), Slice.viz_type.like('bar'))
+    ):
         params = json.loads(slc.params)
         layers = params.get('annotation_layers', [])
         if layers:
             new_layers = []
             for layer in layers:
-                new_layers.append({
-                    'annotationType': 'INTERVAL',
-                    'style': 'solid',
-                    'name': 'Layer {}'.format(layer),
-                    'show': True,
-                    'overrides': {'since': None, 'until': None},
-                    'value': layer,
-                    'width': 1,
-                    'sourceType': 'NATIVE',
-                })
+                new_layers.append(
+                    {
+                        'annotationType': 'INTERVAL',
+                        'style': 'solid',
+                        'name': 'Layer {}'.format(layer),
+                        'show': True,
+                        'overrides': {'since': None, 'until': None},
+                        'value': layer,
+                        'width': 1,
+                        'sourceType': 'NATIVE',
+                    }
+                )
             params['annotation_layers'] = new_layers
             slc.params = json.dumps(params)
             session.merge(slc)
@@ -61,8 +63,9 @@ def downgrade():
     bind = op.get_bind()
     session = db.Session(bind=bind)
 
-    for slc in session.query(Slice).filter(or_(
-            Slice.viz_type.like('line'), Slice.viz_type.like('bar'))):
+    for slc in session.query(Slice).filter(
+        or_(Slice.viz_type.like('line'), Slice.viz_type.like('bar'))
+    ):
         params = json.loads(slc.params)
         layers = params.get('annotation_layers', [])
         if layers:
diff --git a/superset/migrations/versions/2591d77e9831_user_id.py b/superset/migrations/versions/2591d77e9831_user_id.py
index 4fac61c..1de7eea 100644
--- a/superset/migrations/versions/2591d77e9831_user_id.py
+++ b/superset/migrations/versions/2591d77e9831_user_id.py
@@ -15,12 +15,12 @@ import sqlalchemy as sa
 
 
 def upgrade():
-  with op.batch_alter_table('tables') as batch_op:
-    batch_op.add_column(sa.Column('user_id', sa.Integer()))
-    batch_op.create_foreign_key('user_id', 'ab_user', ['user_id'], ['id'])
+    with op.batch_alter_table('tables') as batch_op:
+        batch_op.add_column(sa.Column('user_id', sa.Integer()))
+        batch_op.create_foreign_key('user_id', 'ab_user', ['user_id'], ['id'])
 
 
 def downgrade():
-  with op.batch_alter_table('tables') as batch_op:
-    batch_op.drop_constraint('user_id', type_='foreignkey')
-    batch_op.drop_column('user_id')
+    with op.batch_alter_table('tables') as batch_op:
+        batch_op.drop_constraint('user_id', type_='foreignkey')
+        batch_op.drop_column('user_id')
diff --git a/superset/migrations/versions/27ae655e4247_make_creator_owners.py b/superset/migrations/versions/27ae655e4247_make_creator_owners.py
index 2c3cdc1..50bf82d 100644
--- a/superset/migrations/versions/27ae655e4247_make_creator_owners.py
+++ b/superset/migrations/versions/27ae655e4247_make_creator_owners.py
@@ -16,41 +16,51 @@ from sqlalchemy.ext.declarative import declarative_base
 from flask_appbuilder.models.mixins import AuditMixin
 from sqlalchemy.orm import relationship
 from flask_appbuilder import Model
-from sqlalchemy import (
-    Column, Integer, ForeignKey, Table)
+from sqlalchemy import Column, Integer, ForeignKey, Table
 
 Base = declarative_base()
 
+
 class User(Base):
     """Declarative class to do query in upgrade"""
+
     __tablename__ = 'ab_user'
     id = Column(Integer, primary_key=True)
 
-slice_user = Table('slice_user', Base.metadata,
+
+slice_user = Table(
+    'slice_user',
+    Base.metadata,
     Column('id', Integer, primary_key=True),
     Column('user_id', Integer, ForeignKey('ab_user.id')),
-    Column('slice_id', Integer, ForeignKey('slices.id'))
+    Column('slice_id', Integer, ForeignKey('slices.id')),
 )
 
 dashboard_user = Table(
-    'dashboard_user', Base.metadata,
+    'dashboard_user',
+    Base.metadata,
     Column('id', Integer, primary_key=True),
     Column('user_id', Integer, ForeignKey('ab_user.id')),
-    Column('dashboard_id', Integer, ForeignKey('dashboards.id'))
+    Column('dashboard_id', Integer, ForeignKey('dashboards.id')),
 )
 
+
 class Slice(Base, AuditMixin):
     """Declarative class to do query in upgrade"""
+
     __tablename__ = 'slices'
     id = Column(Integer, primary_key=True)
     owners = relationship("User", secondary=slice_user)
 
+
 class Dashboard(Base, AuditMixin):
     """Declarative class to do query in upgrade"""
+
     __tablename__ = 'dashboards'
     id = Column(Integer, primary_key=True)
     owners = relationship("User", secondary=dashboard_user)
 
+
 def upgrade():
     bind = op.get_bind()
     session = db.Session(bind=bind)
diff --git a/superset/migrations/versions/289ce07647b_add_encrypted_password_field.py b/superset/migrations/versions/289ce07647b_add_encrypted_password_field.py
index 2328d6d..a749db2 100644
--- a/superset/migrations/versions/289ce07647b_add_encrypted_password_field.py
+++ b/superset/migrations/versions/289ce07647b_add_encrypted_password_field.py
@@ -8,7 +8,7 @@ Create Date: 2015-11-21 11:18:00.650587
 
 from alembic import op
 import sqlalchemy as sa
-from sqlalchemy_utils  import EncryptedType
+from sqlalchemy_utils import EncryptedType
 
 # revision identifiers, used by Alembic.
 revision = '289ce07647b'
@@ -17,11 +17,8 @@ down_revision = '2929af7925ed'
 
 def upgrade():
     op.add_column(
-        'dbs',
-        sa.Column(
-            'password',
-            EncryptedType(sa.String(1024)),
-            nullable=True))
+        'dbs', sa.Column('password', EncryptedType(sa.String(1024)), nullable=True)
+    )
 
 
 def downgrade():
diff --git a/superset/migrations/versions/2929af7925ed_tz_offsets_in_data_sources.py b/superset/migrations/versions/2929af7925ed_tz_offsets_in_data_sources.py
index 85b54bc..64107c6 100644
--- a/superset/migrations/versions/2929af7925ed_tz_offsets_in_data_sources.py
+++ b/superset/migrations/versions/2929af7925ed_tz_offsets_in_data_sources.py
@@ -13,6 +13,7 @@ down_revision = '1e2841a4128'
 from alembic import op
 import sqlalchemy as sa
 
+
 def upgrade():
     op.add_column('datasources', sa.Column('offset', sa.Integer(), nullable=True))
     op.add_column('tables', sa.Column('offset', sa.Integer(), nullable=True))
diff --git a/superset/migrations/versions/2fcdcb35e487_saved_queries.py b/superset/migrations/versions/2fcdcb35e487_saved_queries.py
index da1c975..a27b49e 100644
--- a/superset/migrations/versions/2fcdcb35e487_saved_queries.py
+++ b/superset/migrations/versions/2fcdcb35e487_saved_queries.py
@@ -27,11 +27,11 @@ def upgrade():
         sa.Column('description', sa.Text(), nullable=True),
         sa.Column('changed_by_fk', sa.Integer(), nullable=True),
         sa.Column('created_by_fk', sa.Integer(), nullable=True),
-        sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ),
-        sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ),
-        sa.ForeignKeyConstraint(['user_id'], ['ab_user.id'], ),
-        sa.ForeignKeyConstraint(['db_id'], ['dbs.id'], ),
-        sa.PrimaryKeyConstraint('id')
+        sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id']),
+        sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id']),
+        sa.ForeignKeyConstraint(['user_id'], ['ab_user.id']),
+        sa.ForeignKeyConstraint(['db_id'], ['dbs.id']),
+        sa.PrimaryKeyConstraint('id'),
     )
 
 
diff --git a/superset/migrations/versions/30bb17c0dc76_.py b/superset/migrations/versions/30bb17c0dc76_.py
index c68255b..ec655b8 100644
--- a/superset/migrations/versions/30bb17c0dc76_.py
+++ b/superset/migrations/versions/30bb17c0dc76_.py
@@ -23,4 +23,4 @@ def upgrade():
 
 def downgrade():
     with op.batch_alter_table('logs') as batch_op:
-        batch_op.add_column(sa.Column('dt', sa.Date,  default=date.today()))
+        batch_op.add_column(sa.Column('dt', sa.Date, default=date.today()))
diff --git a/superset/migrations/versions/315b3f4da9b0_adding_log_model.py b/superset/migrations/versions/315b3f4da9b0_adding_log_model.py
index d9fdfac..c9e9c66 100644
--- a/superset/migrations/versions/315b3f4da9b0_adding_log_model.py
+++ b/superset/migrations/versions/315b3f4da9b0_adding_log_model.py
@@ -15,14 +15,15 @@ import sqlalchemy as sa
 
 
 def upgrade():
-    op.create_table('logs',
+    op.create_table(
+        'logs',
         sa.Column('id', sa.Integer(), nullable=False),
         sa.Column('action', sa.String(length=512), nullable=True),
         sa.Column('user_id', sa.Integer(), nullable=True),
         sa.Column('json', sa.Text(), nullable=True),
         sa.Column('dttm', sa.DateTime(), nullable=True),
-        sa.ForeignKeyConstraint(['user_id'], ['ab_user.id'], ),
-        sa.PrimaryKeyConstraint('id')
+        sa.ForeignKeyConstraint(['user_id'], ['ab_user.id']),
+        sa.PrimaryKeyConstraint('id'),
     )
 
 
diff --git a/superset/migrations/versions/33d996bcc382_update_slice_model.py b/superset/migrations/versions/33d996bcc382_update_slice_model.py
index a92a959..947f583 100644
--- a/superset/migrations/versions/33d996bcc382_update_slice_model.py
+++ b/superset/migrations/versions/33d996bcc382_update_slice_model.py
@@ -2,8 +2,7 @@ from alembic import op
 import sqlalchemy as sa
 from superset import db
 from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy import (
-    Column, Integer, String)
+from sqlalchemy import Column, Integer, String
 
 """update slice model
 
@@ -22,6 +21,7 @@ Base = declarative_base()
 
 class Slice(Base):
     """Declarative class to do query in upgrade"""
+
     __tablename__ = 'slices'
     id = Column(Integer, primary_key=True)
     datasource_id = Column(Integer)
diff --git a/superset/migrations/versions/3b626e2a6783_sync_db_with_models.py b/superset/migrations/versions/3b626e2a6783_sync_db_with_models.py
index 0a0f802..eb6162d 100644
--- a/superset/migrations/versions/3b626e2a6783_sync_db_with_models.py
+++ b/superset/migrations/versions/3b626e2a6783_sync_db_with_models.py
@@ -24,11 +24,14 @@ def upgrade():
     # cleanup after: https://github.com/airbnb/superset/pull/1078
     try:
         slices_ibfk_1 = generic_find_constraint_name(
-            table='slices', columns={'druid_datasource_id'},
-            referenced='datasources', db=db)
+            table='slices',
+            columns={'druid_datasource_id'},
+            referenced='datasources',
+            db=db,
+        )
         slices_ibfk_2 = generic_find_constraint_name(
-            table='slices', columns={'table_id'},
-            referenced='tables', db=db)
+            table='slices', columns={'table_id'}, referenced='tables', db=db
+        )
 
         with op.batch_alter_table('slices') as batch_op:
             if slices_ibfk_1:
@@ -44,7 +47,8 @@ def upgrade():
     try:
         with op.batch_alter_table('columns') as batch_op:
             batch_op.create_foreign_key(
-                None, 'datasources', ['datasource_name'], ['datasource_name'])
+                None, 'datasources', ['datasource_name'], ['datasource_name']
+            )
     except Exception as e:
         logging.warning(str(e))
     try:
@@ -69,31 +73,42 @@ def downgrade():
 
     try:
         with op.batch_alter_table('slices') as batch_op:
-            batch_op.add_column(sa.Column(
-                'table_id', mysql.INTEGER(display_width=11),
-                autoincrement=False, nullable=True))
-            batch_op.add_column(sa.Column(
-                'druid_datasource_id', sa.Integer(), autoincrement=False,
-                nullable=True))
+            batch_op.add_column(
+                sa.Column(
+                    'table_id',
+                    mysql.INTEGER(display_width=11),
+                    autoincrement=False,
+                    nullable=True,
+                )
+            )
+            batch_op.add_column(
+                sa.Column(
+                    'druid_datasource_id',
+                    sa.Integer(),
+                    autoincrement=False,
+                    nullable=True,
+                )
+            )
             batch_op.create_foreign_key(
-                'slices_ibfk_1', 'datasources', ['druid_datasource_id'],
-                ['id'])
-            batch_op.create_foreign_key(
-                'slices_ibfk_2', 'tables', ['table_id'], ['id'])
+                'slices_ibfk_1', 'datasources', ['druid_datasource_id'], ['id']
+            )
+            batch_op.create_foreign_key('slices_ibfk_2', 'tables', ['table_id'], ['id'])
     except Exception as e:
         logging.warning(str(e))
 
     try:
         fk_columns = generic_find_constraint_name(
-            table='columns', columns={'datasource_name'},
-            referenced='datasources', db=db)
+            table='columns',
+            columns={'datasource_name'},
+            referenced='datasources',
+            db=db,
+        )
         with op.batch_alter_table('columns') as batch_op:
             batch_op.drop_constraint(fk_columns, type_='foreignkey')
     except Exception as e:
         logging.warning(str(e))
 
-    op.add_column(
-        'query', sa.Column('name', sa.String(length=256), nullable=True))
+    op.add_column('query', sa.Column('name', sa.String(length=256), nullable=True))
     try:
         with op.batch_alter_table('query') as batch_op:
             batch_op.drop_constraint('client_id', type_='unique')
diff --git a/superset/migrations/versions/3dda56f1c4c6_migrate_num_period_compare_and_period_.py b/superset/migrations/versions/3dda56f1c4c6_migrate_num_period_compare_and_period_.py
index d52849c..e50cb79 100644
--- a/superset/migrations/versions/3dda56f1c4c6_migrate_num_period_compare_and_period_.py
+++ b/superset/migrations/versions/3dda56f1c4c6_migrate_num_period_compare_and_period_.py
@@ -34,11 +34,7 @@ class Slice(Base):
     params = Column(Text)
 
 
-comparison_type_map = {
-    'factor': 'ratio',
-    'growth': 'percentage',
-    'value': 'absolute',
-}
+comparison_type_map = {'factor': 'ratio', 'growth': 'percentage', 'value': 'absolute'}
 
 db_engine_specs_map = {
     'second': 'PT1S',
@@ -86,12 +82,7 @@ def timedelta_to_string(obj):
 
 
 def format_seconds(value):
-    periods = [
-        ('minute', 60),
-        ('hour', 3600),
-        ('day', 86400),
-        ('week', 604800),
-    ]
+    periods = [('minute', 60), ('hour', 3600), ('day', 86400), ('week', 604800)]
     for period, multiple in periods:
         if value % multiple == 0:
             value //= multiple
@@ -136,8 +127,11 @@ def upgrade():
             continue
 
         num_period_compare = int(params.get('num_period_compare'))
-        granularity = (params.get('granularity') if chart.datasource_type == 'druid'
-            else params.get('time_grain_sqla'))
+        granularity = (
+            params.get('granularity')
+            if chart.datasource_type == 'druid'
+            else params.get('time_grain_sqla')
+        )
         time_compare = compute_time_compare(granularity, num_period_compare)
 
         period_ratio_type = params.get('period_ratio_type') or 'growth'
diff --git a/superset/migrations/versions/41f6a59a61f2_database_options_for_sql_lab.py b/superset/migrations/versions/41f6a59a61f2_database_options_for_sql_lab.py
index 18a5441..848a8ad 100644
--- a/superset/migrations/versions/41f6a59a61f2_database_options_for_sql_lab.py
+++ b/superset/migrations/versions/41f6a59a61f2_database_options_for_sql_lab.py
@@ -15,11 +15,10 @@ down_revision = '3c3ffe173e4f'
 
 def upgrade():
     op.add_column('dbs', sa.Column('allow_ctas', sa.Boolean(), nullable=True))
+    op.add_column('dbs', sa.Column('expose_in_sqllab', sa.Boolean(), nullable=True))
     op.add_column(
-        'dbs', sa.Column('expose_in_sqllab', sa.Boolean(), nullable=True))
-    op.add_column(
-        'dbs',
-        sa.Column('force_ctas_schema', sa.String(length=250), nullable=True))
+        'dbs', sa.Column('force_ctas_schema', sa.String(length=250), nullable=True)
+    )
 
 
 def downgrade():
diff --git a/superset/migrations/versions/4451805bbaa1_remove_double_percents.py b/superset/migrations/versions/4451805bbaa1_remove_double_percents.py
index 2e57b39..2e5328e 100644
--- a/superset/migrations/versions/4451805bbaa1_remove_double_percents.py
+++ b/superset/migrations/versions/4451805bbaa1_remove_double_percents.py
@@ -66,8 +66,8 @@ def replace(source, target):
                 if 'adhoc_filters' in params:
                     for filt in params['adhoc_filters']:
                         if 'sqlExpression' in filt:
-                            filt['sqlExpression'] = (
-                                filt['sqlExpression'].replace(source, target)
+                            filt['sqlExpression'] = filt['sqlExpression'].replace(
+                                source, target
                             )
 
                     slc.params = json.dumps(params, sort_keys=True)
diff --git a/superset/migrations/versions/4500485bde7d_allow_run_sync_async.py b/superset/migrations/versions/4500485bde7d_allow_run_sync_async.py
index 0695e2c..b0dc581 100644
--- a/superset/migrations/versions/4500485bde7d_allow_run_sync_async.py
+++ b/superset/migrations/versions/4500485bde7d_allow_run_sync_async.py
@@ -25,4 +25,3 @@ def downgrade():
         op.drop_column('dbs', 'allow_run_async')
     except Exception:
         pass
-
diff --git a/superset/migrations/versions/46f444d8b9b7_remove_coordinator_from_druid_cluster_.py b/superset/migrations/versions/46f444d8b9b7_remove_coordinator_from_druid_cluster_.py
index d86084d..99bb3a6 100644
--- a/superset/migrations/versions/46f444d8b9b7_remove_coordinator_from_druid_cluster_.py
+++ b/superset/migrations/versions/46f444d8b9b7_remove_coordinator_from_druid_cluster_.py
@@ -22,10 +22,11 @@ def upgrade():
 
 def downgrade():
     op.add_column(
-        'clusters',
-        sa.Column('coordinator_host', sa.String(length=256), nullable=True),
+        'clusters', sa.Column('coordinator_host', sa.String(length=256), nullable=True)
+    )
+    op.add_column(
+        'clusters', sa.Column('coordinator_port', sa.Integer(), nullable=True)
     )
-    op.add_column('clusters', sa.Column('coordinator_port', sa.Integer(), nullable=True))
     op.add_column(
         'clusters',
         sa.Column('coordinator_endpoint', sa.String(length=256), nullable=True),
diff --git a/superset/migrations/versions/4736ec66ce19_.py b/superset/migrations/versions/4736ec66ce19_.py
index e314102..a496e4e 100644
--- a/superset/migrations/versions/4736ec66ce19_.py
+++ b/superset/migrations/versions/4736ec66ce19_.py
@@ -42,8 +42,7 @@ def upgrade():
     # Add the new less restrictive uniqueness constraint.
     with op.batch_alter_table('datasources', naming_convention=conv) as batch_op:
         batch_op.create_unique_constraint(
-            'uq_datasources_cluster_name',
-            ['cluster_name', 'datasource_name'],
+            'uq_datasources_cluster_name', ['cluster_name', 'datasource_name']
         )
 
     # Augment the tables which have a foreign key constraint related to the
@@ -73,11 +72,9 @@ def upgrade():
         # Migrate the existing data.
         for datasource in bind.execute(datasources.select()):
             bind.execute(
-                table.update().where(
-                    table.c.datasource_name == datasource.datasource_name,
-                ).values(
-                    datasource_id=datasource.id,
-                ),
+                table.update()
+                .where(table.c.datasource_name == datasource.datasource_name)
+                .values(datasource_id=datasource.id)
             )
 
         with op.batch_alter_table(foreign, naming_convention=conv) as batch_op:
@@ -86,10 +83,7 @@ def upgrade():
             # due to prior revisions (1226819ee0e3, 3b626e2a6783) there may
             # incorectly be multiple duplicate constraints.
             names = generic_find_fk_constraint_names(
-                foreign,
-                {'datasource_name'},
-                'datasources',
-                insp,
+                foreign, {'datasource_name'}, 'datasources', insp
             )
 
             for name in names:
@@ -105,10 +99,9 @@ def upgrade():
         with op.batch_alter_table('datasources', naming_convention=conv) as batch_op:
             batch_op.drop_constraint(
                 generic_find_uq_constraint_name(
-                    'datasources',
-                    {'datasource_name'},
-                    insp,
-                ) or 'uq_datasources_datasource_name',
+                    'datasources', {'datasource_name'}, insp
+                )
+                or 'uq_datasources_datasource_name',
                 type_='unique',
             )
     except Exception as e:
@@ -116,7 +109,8 @@ def upgrade():
             'Constraint drop failed, you may want to do this '
             'manually on your database. For context, this is a known '
             'issue around undeterministic contraint names on Postgres '
-            'and perhaps more databases through SQLAlchemy.')
+            'and perhaps more databases through SQLAlchemy.'
+        )
         logging.exception(e)
 
 
@@ -129,8 +123,7 @@ def downgrade():
     # datasources.datasource_name column is no longer unique.
     with op.batch_alter_table('datasources', naming_convention=conv) as batch_op:
         batch_op.create_unique_constraint(
-            'uq_datasources_datasource_name',
-            ['datasource_name'],
+            'uq_datasources_datasource_name', ['datasource_name']
         )
 
     # Augment the tables which have a foreign key constraint related to the
@@ -160,19 +153,16 @@ def downgrade():
         # Migrate the existing data.
         for datasource in bind.execute(datasources.select()):
             bind.execute(
-                table.update().where(
-                    table.c.datasource_id == datasource.id,
-                ).values(
-                    datasource_name=datasource.datasource_name,
-                ),
+                table.update()
+                .where(table.c.datasource_id == datasource.id)
+                .values(datasource_name=datasource.datasource_name)
             )
 
         with op.batch_alter_table(foreign, naming_convention=conv) as batch_op:
 
             # Drop the datasource_id column and associated constraint.
             batch_op.drop_constraint(
-                'fk_{}_datasource_id_datasources'.format(foreign),
-                type_='foreignkey',
+                'fk_{}_datasource_id_datasources'.format(foreign), type_='foreignkey'
             )
 
             batch_op.drop_column('datasource_id')
@@ -183,21 +173,18 @@ def downgrade():
         # associated with the cluster_name column needs to be dropped.
         batch_op.drop_constraint(
             generic_find_fk_constraint_name(
-                'datasources',
-                {'cluster_name'},
-                'clusters',
-                insp,
-            ) or 'fk_datasources_cluster_name_clusters',
+                'datasources', {'cluster_name'}, 'clusters', insp
+            )
+            or 'fk_datasources_cluster_name_clusters',
             type_='foreignkey',
         )
 
         # Drop the old less restrictive uniqueness constraint.
         batch_op.drop_constraint(
             generic_find_uq_constraint_name(
-                'datasources',
-                {'cluster_name', 'datasource_name'},
-                insp,
-            ) or 'uq_datasources_cluster_name',
+                'datasources', {'cluster_name', 'datasource_name'}, insp
+            )
+            or 'uq_datasources_cluster_name',
             type_='unique',
         )
 
diff --git a/superset/migrations/versions/4ce8df208545_migrate_time_range_for_default_filters.py b/superset/migrations/versions/4ce8df208545_migrate_time_range_for_default_filters.py
index 4d04817..949312f 100644
--- a/superset/migrations/versions/4ce8df208545_migrate_time_range_for_default_filters.py
+++ b/superset/migrations/versions/4ce8df208545_migrate_time_range_for_default_filters.py
@@ -10,11 +10,7 @@ Create Date: 2018-11-12 13:31:07.578090
 import json
 
 from alembic import op
-from sqlalchemy import (
-    Column,
-    Integer,
-    Text,
-)
+from sqlalchemy import Column, Integer, Text
 from sqlalchemy.ext.declarative import declarative_base
 
 from superset import db
@@ -27,6 +23,7 @@ Base = declarative_base()
 
 class Dashboard(Base):
     """Declarative class to do query in upgrade"""
+
     __tablename__ = 'dashboards'
     id = Column(Integer, primary_key=True)
     json_metadata = Column(Text)
@@ -48,8 +45,11 @@ def upgrade():
             if default_filters and default_filters != '{}':
                 try:
                     filters = json.loads(default_filters)
-                    keys = [key for key, val in filters.items() if
-                            val.get('__from') or val.get('__to')]
+                    keys = [
+                        key
+                        for key, val in filters.items()
+                        if val.get('__from') or val.get('__to')
+                    ]
                     if len(keys):
                         for key in keys:
                             val = filters[key]
@@ -68,8 +68,11 @@ def upgrade():
             # key: chart id, value: field names that escape from filters
             filter_immune_slice_fields = json_metadata.get('filter_immune_slice_fields')
             if filter_immune_slice_fields:
-                keys = [key for key, val in filter_immune_slice_fields.items() if
-                        '__from' in val or '__to' in val]
+                keys = [
+                    key
+                    for key, val in filter_immune_slice_fields.items()
+                    if '__from' in val or '__to' in val
+                ]
                 if len(keys):
                     for key in keys:
                         val = filter_immune_slice_fields[key]
@@ -81,8 +84,9 @@ def upgrade():
                         # just abandon __from and __to
                         if '__time_range' not in val:
                             val.append('__time_range')
-                    json_metadata['filter_immune_slice_fields'] = \
-                        filter_immune_slice_fields
+                    json_metadata[
+                        'filter_immune_slice_fields'
+                    ] = filter_immune_slice_fields
                     has_update = True
 
             if has_update:
diff --git a/superset/migrations/versions/4e6a06bad7a8_init.py b/superset/migrations/versions/4e6a06bad7a8_init.py
index add55f8..71b4c84 100644
--- a/superset/migrations/versions/4e6a06bad7a8_init.py
+++ b/superset/migrations/versions/4e6a06bad7a8_init.py
@@ -16,157 +16,221 @@ import sqlalchemy as sa
 
 def upgrade():
     ### commands auto generated by Alembic - please adjust! ###
-    op.create_table('clusters',
-    sa.Column('created_on', sa.DateTime(), nullable=False),
-    sa.Column('changed_on', sa.DateTime(), nullable=False),
-    sa.Column('id', sa.Integer(), nullable=False),
-    sa.Column('cluster_name', sa.String(length=250), nullable=True),
-    sa.Column('coordinator_host', sa.String(length=255), nullable=True),
-    sa.Column('coordinator_port', sa.Integer(), nullable=True),
-    sa.Column('coordinator_endpoint', sa.String(length=255), nullable=True),
-    sa.Column('broker_host', sa.String(length=255), nullable=True),
-    sa.Column('broker_port', sa.Integer(), nullable=True),
-    sa.Column('broker_endpoint', sa.String(length=255), nullable=True),
-    sa.Column('metadata_last_refreshed', sa.DateTime(), nullable=True),
-    sa.Column('created_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True),
-    sa.Column('changed_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True),
-    sa.PrimaryKeyConstraint('id'),
-    sa.UniqueConstraint('cluster_name')
+    op.create_table(
+        'clusters',
+        sa.Column('created_on', sa.DateTime(), nullable=False),
+        sa.Column('changed_on', sa.DateTime(), nullable=False),
+        sa.Column('id', sa.Integer(), nullable=False),
+        sa.Column('cluster_name', sa.String(length=250), nullable=True),
+        sa.Column('coordinator_host', sa.String(length=255), nullable=True),
+        sa.Column('coordinator_port', sa.Integer(), nullable=True),
+        sa.Column('coordinator_endpoint', sa.String(length=255), nullable=True),
+        sa.Column('broker_host', sa.String(length=255), nullable=True),
+        sa.Column('broker_port', sa.Integer(), nullable=True),
+        sa.Column('broker_endpoint', sa.String(length=255), nullable=True),
+        sa.Column('metadata_last_refreshed', sa.DateTime(), nullable=True),
+        sa.Column(
+            'created_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True
+        ),
+        sa.Column(
+            'changed_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True
+        ),
+        sa.PrimaryKeyConstraint('id'),
+        sa.UniqueConstraint('cluster_name'),
     )
-    op.create_table('dashboards',
-    sa.Column('created_on', sa.DateTime(), nullable=False),
-    sa.Column('changed_on', sa.DateTime(), nullable=False),
-    sa.Column('id', sa.Integer(), nullable=False),
-    sa.Column('dashboard_title', sa.String(length=500), nullable=True),
-    sa.Column('position_json', sa.Text(), nullable=True),
-    sa.Column('created_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True),
-    sa.Column('changed_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True),
-    sa.PrimaryKeyConstraint('id')
+    op.create_table(
+        'dashboards',
+        sa.Column('created_on', sa.DateTime(), nullable=False),
+        sa.Column('changed_on', sa.DateTime(), nullable=False),
+        sa.Column('id', sa.Integer(), nullable=False),
+        sa.Column('dashboard_title', sa.String(length=500), nullable=True),
+        sa.Column('position_json', sa.Text(), nullable=True),
+        sa.Column(
+            'created_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True
+        ),
+        sa.Column(
+            'changed_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True
+        ),
+        sa.PrimaryKeyConstraint('id'),
     )
-    op.create_table('dbs',
-    sa.Column('created_on', sa.DateTime(), nullable=False),
-    sa.Column('changed_on', sa.DateTime(), nullable=False),
-    sa.Column('id', sa.Integer(), nullable=False),
-    sa.Column('database_name', sa.String(length=250), nullable=True),
-    sa.Column('sqlalchemy_uri', sa.String(length=1024), nullable=True),
-    sa.Column('created_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True),
-    sa.Column('changed_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True),
-    sa.PrimaryKeyConstraint('id'),
-    sa.UniqueConstraint('database_name')
+    op.create_table(
+        'dbs',
+        sa.Column('created_on', sa.DateTime(), nullable=False),
+        sa.Column('changed_on', sa.DateTime(), nullable=False),
+        sa.Column('id', sa.Integer(), nullable=False),
+        sa.Column('database_name', sa.String(length=250), nullable=True),
+        sa.Column('sqlalchemy_uri', sa.String(length=1024), nullable=True),
+        sa.Column(
+            'created_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True
+        ),
+        sa.Column(
+            'changed_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True
+        ),
+        sa.PrimaryKeyConstraint('id'),
+        sa.UniqueConstraint('database_name'),
     )
-    op.create_table('datasources',
-    sa.Column('created_on', sa.DateTime(), nullable=False),
-    sa.Column('changed_on', sa.DateTime(), nullable=False),
-    sa.Column('id', sa.Integer(), nullable=False),
-    sa.Column('datasource_name', sa.String(length=255), nullable=True),
-    sa.Column('is_featured', sa.Boolean(), nullable=True),
-    sa.Column('is_hidden', sa.Boolean(), nullable=True),
-    sa.Column('description', sa.Text(), nullable=True),
-    sa.Column('default_endpoint', sa.Text(), nullable=True),
-    sa.Column('user_id', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True),
-    sa.Column('cluster_name', sa.String(length=250), sa.ForeignKey("clusters.cluster_name"), nullable=True),
-    sa.Column('created_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True),
-    sa.Column('changed_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True),
-    sa.PrimaryKeyConstraint('id'),
-    sa.UniqueConstraint('datasource_name')
+    op.create_table(
+        'datasources',
+        sa.Column('created_on', sa.DateTime(), nullable=False),
+        sa.Column('changed_on', sa.DateTime(), nullable=False),
+        sa.Column('id', sa.Integer(), nullable=False),
+        sa.Column('datasource_name', sa.String(length=255), nullable=True),
+        sa.Column('is_featured', sa.Boolean(), nullable=True),
+        sa.Column('is_hidden', sa.Boolean(), nullable=True),
+        sa.Column('description', sa.Text(), nullable=True),
+        sa.Column('default_endpoint', sa.Text(), nullable=True),
+        sa.Column('user_id', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True),
+        sa.Column(
+            'cluster_name',
+            sa.String(length=250),
+            sa.ForeignKey("clusters.cluster_name"),
+            nullable=True,
+        ),
+        sa.Column(
+            'created_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True
+        ),
+        sa.Column(
+            'changed_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True
+        ),
+        sa.PrimaryKeyConstraint('id'),
+        sa.UniqueConstraint('datasource_name'),
     )
-    op.create_table('tables',
-    sa.Column('created_on', sa.DateTime(), nullable=False),
-    sa.Column('changed_on', sa.DateTime(), nullable=False),
-    sa.Column('id', sa.Integer(), nullable=False),
-    sa.Column('table_name', sa.String(length=250), nullable=True),
-    sa.Column('main_dttm_col', sa.String(length=250), nullable=True),
-    sa.Column('default_endpoint', sa.Text(), nullable=True),
-    sa.Column('database_id', sa.Integer(), sa.ForeignKey("dbs.id"), nullable=False),
-    sa.Column('created_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True),
-    sa.Column('changed_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True),
-    sa.PrimaryKeyConstraint('id'),
-    sa.UniqueConstraint('table_name')
+    op.create_table(
+        'tables',
+        sa.Column('created_on', sa.DateTime(), nullable=False),
+        sa.Column('changed_on', sa.DateTime(), nullable=False),
+        sa.Column('id', sa.Integer(), nullable=False),
+        sa.Column('table_name', sa.String(length=250), nullable=True),
+        sa.Column('main_dttm_col', sa.String(length=250), nullable=True),
+        sa.Column('default_endpoint', sa.Text(), nullable=True),
+        sa.Column('database_id', sa.Integer(), sa.ForeignKey("dbs.id"), nullable=False),
+        sa.Column(
+            'created_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True
+        ),
+        sa.Column(
+            'changed_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True
+        ),
+        sa.PrimaryKeyConstraint('id'),
+        sa.UniqueConstraint('table_name'),
     )
-    op.create_table('columns',
-    sa.Column('created_on', sa.DateTime(), nullable=False),
-    sa.Column('changed_on', sa.DateTime(), nullable=False),
-    sa.Column('id', sa.Integer(), nullable=False),
-    sa.Column('datasource_name', sa.String(length=255), nullable=True),
-    sa.Column('column_name', sa.String(length=255), nullable=True),
-    sa.Column('is_active', sa.Boolean(), nullable=True),
-    sa.Column('type', sa.String(length=32), nullable=True),
-    sa.Column('groupby', sa.Boolean(), nullable=True),
-    sa.Column('count_distinct', sa.Boolean(), nullable=True),
-    sa.Column('sum', sa.Boolean(), nullable=True),
-    sa.Column('max', sa.Boolean(), nullable=True),
-    sa.Column('min', sa.Boolean(), nullable=True),
-    sa.Column('filterable', sa.Boolean(), nullable=True),
-    sa.Column('description', sa.Text(), nullable=True),
-    sa.Column('created_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True),
-    sa.Column('changed_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True),
-    sa.PrimaryKeyConstraint('id')
+    op.create_table(
+        'columns',
+        sa.Column('created_on', sa.DateTime(), nullable=False),
+        sa.Column('changed_on', sa.DateTime(), nullable=False),
+        sa.Column('id', sa.Integer(), nullable=False),
+        sa.Column('datasource_name', sa.String(length=255), nullable=True),
+        sa.Column('column_name', sa.String(length=255), nullable=True),
+        sa.Column('is_active', sa.Boolean(), nullable=True),
+        sa.Column('type', sa.String(length=32), nullable=True),
+        sa.Column('groupby', sa.Boolean(), nullable=True),
+        sa.Column('count_distinct', sa.Boolean(), nullable=True),
+        sa.Column('sum', sa.Boolean(), nullable=True),
+        sa.Column('max', sa.Boolean(), nullable=True),
+        sa.Column('min', sa.Boolean(), nullable=True),
+        sa.Column('filterable', sa.Boolean(), nullable=True),
+        sa.Column('description', sa.Text(), nullable=True),
+        sa.Column(
+            'created_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True
+        ),
+        sa.Column(
+            'changed_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True
+        ),
+        sa.PrimaryKeyConstraint('id'),
     )
-    op.create_table('metrics',
-    sa.Column('id', sa.Integer(), nullable=False),
-    sa.Column('metric_name', sa.String(length=512), nullable=True),
-    sa.Column('verbose_name', sa.String(length=1024), nullable=True),
-    sa.Column('metric_type', sa.String(length=32), nullable=True),
-    sa.Column('datasource_name', sa.String(length=255), sa.ForeignKey("datasources.datasource_name"), nullable=True),
-    sa.Column('json', sa.Text(), nullable=True),
-    sa.Column('description', sa.Text(), nullable=True),
-    sa.ForeignKeyConstraint(['datasource_name'], ['datasources.datasource_name'], ),
-    sa.PrimaryKeyConstraint('id')
+    op.create_table(
+        'metrics',
+        sa.Column('id', sa.Integer(), nullable=False),
+        sa.Column('metric_name', sa.String(length=512), nullable=True),
+        sa.Column('verbose_name', sa.String(length=1024), nullable=True),
+        sa.Column('metric_type', sa.String(length=32), nullable=True),
+        sa.Column(
+            'datasource_name',
+            sa.String(length=255),
+            sa.ForeignKey("datasources.datasource_name"),
+            nullable=True,
+        ),
+        sa.Column('json', sa.Text(), nullable=True),
+        sa.Column('description', sa.Text(), nullable=True),
+        sa.ForeignKeyConstraint(['datasource_name'], ['datasources.datasource_name']),
+        sa.PrimaryKeyConstraint('id'),
     )
-    op.create_table('slices',
-    sa.Column('created_on', sa.DateTime(), nullable=False),
-    sa.Column('changed_on', sa.DateTime(), nullable=False),
-    sa.Column('id', sa.Integer(), nullable=False),
-    sa.Column('slice_name', sa.String(length=250), nullable=True),
-    sa.Column('druid_datasource_id', sa.Integer(), sa.ForeignKey("datasources.id"), nullable=True),
-    sa.Column('table_id', sa.Integer(), sa.ForeignKey("tables.id"), nullable=True),
-    sa.Column('datasource_type', sa.String(length=200), nullable=True),
-    sa.Column('datasource_name', sa.String(length=2000), nullable=True),
-    sa.Column('viz_type', sa.String(length=250), nullable=True),
-    sa.Column('params', sa.Text(), nullable=True),
-    sa.Column('created_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True),
-    sa.Column('changed_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True),
-    sa.PrimaryKeyConstraint('id')
+    op.create_table(
+        'slices',
+        sa.Column('created_on', sa.DateTime(), nullable=False),
+        sa.Column('changed_on', sa.DateTime(), nullable=False),
+        sa.Column('id', sa.Integer(), nullable=False),
+        sa.Column('slice_name', sa.String(length=250), nullable=True),
+        sa.Column(
+            'druid_datasource_id',
+            sa.Integer(),
+            sa.ForeignKey("datasources.id"),
+            nullable=True,
+        ),
+        sa.Column('table_id', sa.Integer(), sa.ForeignKey("tables.id"), nullable=True),
+        sa.Column('datasource_type', sa.String(length=200), nullable=True),
+        sa.Column('datasource_name', sa.String(length=2000), nullable=True),
+        sa.Column('viz_type', sa.String(length=250), nullable=True),
+        sa.Column('params', sa.Text(), nullable=True),
+        sa.Column(
+            'created_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True
+        ),
+        sa.Column(
+            'changed_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True
+        ),
+        sa.PrimaryKeyConstraint('id'),
     )
-    op.create_table('sql_metrics',
-    sa.Column('created_on', sa.DateTime(), nullable=False),
-    sa.Column('changed_on', sa.DateTime(), nullable=False),
-    sa.Column('id', sa.Integer(), nullable=False),
-    sa.Column('metric_name', sa.String(length=512), nullable=True),
-    sa.Column('verbose_name', sa.String(length=1024), nullable=True),
-    sa.Column('metric_type', sa.String(length=32), nullable=True),
-    sa.Column('table_id', sa.Integer(), sa.ForeignKey("tables.id"), nullable=True),
-    sa.Column('expression', sa.Text(), nullable=True),
-    sa.Column('description', sa.Text(), nullable=True),
-    sa.Column('created_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True),
-    sa.Column('changed_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True),
-    sa.PrimaryKeyConstraint('id')
+    op.create_table(
+        'sql_metrics',
+        sa.Column('created_on', sa.DateTime(), nullable=False),
+        sa.Column('changed_on', sa.DateTime(), nullable=False),
+        sa.Column('id', sa.Integer(), nullable=False),
+        sa.Column('metric_name', sa.String(length=512), nullable=True),
+        sa.Column('verbose_name', sa.String(length=1024), nullable=True),
+        sa.Column('metric_type', sa.String(length=32), nullable=True),
+        sa.Column('table_id', sa.Integer(), sa.ForeignKey("tables.id"), nullable=True),
+        sa.Column('expression', sa.Text(), nullable=True),
+        sa.Column('description', sa.Text(), nullable=True),
+        sa.Column(
+            'created_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True
+        ),
+        sa.Column(
+            'changed_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True
+        ),
+        sa.PrimaryKeyConstraint('id'),
     )
-    op.create_table('table_columns',
-    sa.Column('created_on', sa.DateTime(), nullable=False),
-    sa.Column('changed_on', sa.DateTime(), nullable=False),
-    sa.Column('id', sa.Integer(), nullable=False),
-    sa.Column('table_id', sa.Integer(), sa.ForeignKey("tables.id"), nullable=True),
-    sa.Column('column_name', sa.String(length=255), nullable=True),
-    sa.Column('is_dttm', sa.Boolean(), nullable=True),
-    sa.Column('is_active', sa.Boolean(), nullable=True),
-    sa.Column('type', sa.String(length=32), nullable=True),
-    sa.Column('groupby', sa.Boolean(), nullable=True),
-    sa.Column('count_distinct', sa.Boolean(), nullable=True),
-    sa.Column('sum', sa.Boolean(), nullable=True),
-    sa.Column('max', sa.Boolean(), nullable=True),
-    sa.Column('min', sa.Boolean(), nullable=True),
-    sa.Column('filterable', sa.Boolean(), nullable=True),
-    sa.Column('description', sa.Text(), nullable=True),
-    sa.Column('created_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True),
-    sa.Column('changed_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True),
-    sa.PrimaryKeyConstraint('id')
+    op.create_table(
+        'table_columns',
+        sa.Column('created_on', sa.DateTime(), nullable=False),
+        sa.Column('changed_on', sa.DateTime(), nullable=False),
+        sa.Column('id', sa.Integer(), nullable=False),
+        sa.Column('table_id', sa.Integer(), sa.ForeignKey("tables.id"), nullable=True),
+        sa.Column('column_name', sa.String(length=255), nullable=True),
+        sa.Column('is_dttm', sa.Boolean(), nullable=True),
+        sa.Column('is_active', sa.Boolean(), nullable=True),
+        sa.Column('type', sa.String(length=32), nullable=True),
+        sa.Column('groupby', sa.Boolean(), nullable=True),
+        sa.Column('count_distinct', sa.Boolean(), nullable=True),
+        sa.Column('sum', sa.Boolean(), nullable=True),
+        sa.Column('max', sa.Boolean(), nullable=True),
+        sa.Column('min', sa.Boolean(), nullable=True),
+        sa.Column('filterable', sa.Boolean(), nullable=True),
+        sa.Column('description', sa.Text(), nullable=True),
+        sa.Column(
+            'created_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True
+        ),
+        sa.Column(
+            'changed_by_fk', sa.Integer(), sa.ForeignKey("ab_user.id"), nullable=True
+        ),
+        sa.PrimaryKeyConstraint('id'),
     )
-    op.create_table('dashboard_slices',
-    sa.Column('id', sa.Integer(), nullable=False),
-    sa.Column('dashboard_id', sa.Integer(), sa.ForeignKey("dashboards.id"), nullable=True),
-    sa.Column('slice_id', sa.Integer(), sa.ForeignKey("slices.id"), nullable=True),
-    sa.PrimaryKeyConstraint('id')
+    op.create_table(
+        'dashboard_slices',
+        sa.Column('id', sa.Integer(), nullable=False),
+        sa.Column(
+            'dashboard_id', sa.Integer(), sa.ForeignKey("dashboards.id"), nullable=True
+        ),
+        sa.Column('slice_id', sa.Integer(), sa.ForeignKey("slices.id"), nullable=True),
+        sa.PrimaryKeyConstraint('id'),
     )
     ### end Alembic commands ###
 
diff --git a/superset/migrations/versions/4fa88fe24e94_owners_many_to_many.py b/superset/migrations/versions/4fa88fe24e94_owners_many_to_many.py
index f0ec92b..d032c61 100644
--- a/superset/migrations/versions/4fa88fe24e94_owners_many_to_many.py
+++ b/superset/migrations/versions/4fa88fe24e94_owners_many_to_many.py
@@ -14,20 +14,22 @@ import sqlalchemy as sa
 
 
 def upgrade():
-    op.create_table('dashboard_user',
+    op.create_table(
+        'dashboard_user',
         sa.Column('id', sa.Integer(), nullable=False),
         sa.Column('user_id', sa.Integer(), nullable=True),
         sa.Column('dashboard_id', sa.Integer(), nullable=True),
-        sa.ForeignKeyConstraint(['dashboard_id'], ['dashboards.id'], ),
-        sa.ForeignKeyConstraint(['user_id'], ['ab_user.id'], ),
+        sa.ForeignKeyConstraint(['dashboard_id'], ['dashboards.id']),
+        sa.ForeignKeyConstraint(['user_id'], ['ab_user.id']),
         sa.PrimaryKeyConstraint('id'),
     )
-    op.create_table('slice_user',
+    op.create_table(
+        'slice_user',
         sa.Column('id', sa.Integer(), nullable=False),
         sa.Column('user_id', sa.Integer(), nullable=True),
         sa.Column('slice_id', sa.Integer(), nullable=True),
-        sa.ForeignKeyConstraint(['slice_id'], ['slices.id'], ),
-        sa.ForeignKeyConstraint(['user_id'], ['ab_user.id'], ),
+        sa.ForeignKeyConstraint(['slice_id'], ['slices.id']),
+        sa.ForeignKeyConstraint(['user_id'], ['ab_user.id']),
         sa.PrimaryKeyConstraint('id'),
     )
 
diff --git a/superset/migrations/versions/5e4a03ef0bf0_add_request_access_model.py b/superset/migrations/versions/5e4a03ef0bf0_add_request_access_model.py
index ad6375f..ee0eff7 100644
--- a/superset/migrations/versions/5e4a03ef0bf0_add_request_access_model.py
+++ b/superset/migrations/versions/5e4a03ef0bf0_add_request_access_model.py
@@ -23,9 +23,9 @@ def upgrade():
         sa.Column('datasource_id', sa.Integer(), nullable=True),
         sa.Column('changed_by_fk', sa.Integer(), nullable=True),
         sa.Column('created_by_fk', sa.Integer(), nullable=True),
-        sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ),
-        sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ),
-        sa.PrimaryKeyConstraint('id')
+        sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id']),
+        sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id']),
+        sa.PrimaryKeyConstraint('id'),
     )
 
 
diff --git a/superset/migrations/versions/732f1c06bcbf_add_fetch_values_predicate.py b/superset/migrations/versions/732f1c06bcbf_add_fetch_values_predicate.py
index 2d7ce54..ef89ee1 100644
--- a/superset/migrations/versions/732f1c06bcbf_add_fetch_values_predicate.py
+++ b/superset/migrations/versions/732f1c06bcbf_add_fetch_values_predicate.py
@@ -13,9 +13,16 @@ down_revision = 'd6db5a5cdb5d'
 from alembic import op
 import sqlalchemy as sa
 
+
 def upgrade():
-    op.add_column('datasources', sa.Column('fetch_values_from', sa.String(length=100), nullable=True))
-    op.add_column('tables', sa.Column('fetch_values_predicate', sa.String(length=1000), nullable=True))
+    op.add_column(
+        'datasources',
+        sa.Column('fetch_values_from', sa.String(length=100), nullable=True),
+    )
+    op.add_column(
+        'tables',
+        sa.Column('fetch_values_predicate', sa.String(length=1000), nullable=True),
+    )
 
 
 def downgrade():
diff --git a/superset/migrations/versions/763d4b211ec9_fixing_audit_fk.py b/superset/migrations/versions/763d4b211ec9_fixing_audit_fk.py
index d8feb77..1e46089 100644
--- a/superset/migrations/versions/763d4b211ec9_fixing_audit_fk.py
+++ b/superset/migrations/versions/763d4b211ec9_fixing_audit_fk.py
@@ -20,72 +20,64 @@ def upgrade():
     op.add_column('metrics', sa.Column('created_by_fk', sa.Integer(), nullable=True))
     op.add_column('metrics', sa.Column('created_on', sa.DateTime(), nullable=True))
     try:
-        op.alter_column('columns', 'changed_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=True)
-        op.alter_column('columns', 'created_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=True)
-        op.alter_column('css_templates', 'changed_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=True)
-        op.alter_column('css_templates', 'created_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=True)
-        op.alter_column('dashboards', 'changed_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=True)
-        op.alter_column('dashboards', 'created_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=True)
-        op.alter_column('datasources', 'changed_by_fk',
-                   existing_type=sa.INTEGER(),
-                   nullable=True)
-        op.alter_column('datasources', 'changed_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=True)
-        op.alter_column('datasources', 'created_by_fk',
-                   existing_type=sa.INTEGER(),
-                   nullable=True)
-        op.alter_column('datasources', 'created_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=True)
-        op.alter_column('dbs', 'changed_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=True)
-        op.alter_column('dbs', 'created_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=True)
-        op.alter_column('slices', 'changed_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=True)
-        op.alter_column('slices', 'created_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=True)
-        op.alter_column('sql_metrics', 'changed_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=True)
-        op.alter_column('sql_metrics', 'created_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=True)
-        op.alter_column('table_columns', 'changed_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=True)
-        op.alter_column('table_columns', 'created_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=True)
-        op.alter_column('tables', 'changed_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=True)
-        op.alter_column('tables', 'created_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=True)
-        op.alter_column('url', 'changed_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=True)
-        op.alter_column('url', 'created_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=True)
+        op.alter_column(
+            'columns', 'changed_on', existing_type=sa.DATETIME(), nullable=True
+        )
+        op.alter_column(
+            'columns', 'created_on', existing_type=sa.DATETIME(), nullable=True
+        )
+        op.alter_column(
+            'css_templates', 'changed_on', existing_type=sa.DATETIME(), nullable=True
+        )
+        op.alter_column(
+            'css_templates', 'created_on', existing_type=sa.DATETIME(), nullable=True
+        )
+        op.alter_column(
+            'dashboards', 'changed_on', existing_type=sa.DATETIME(), nullable=True
+        )
+        op.alter_column(
+            'dashboards', 'created_on', existing_type=sa.DATETIME(), nullable=True
+        )
+        op.alter_column(
+            'datasources', 'changed_by_fk', existing_type=sa.INTEGER(), nullable=True
+        )
+        op.alter_column(
+            'datasources', 'changed_on', existing_type=sa.DATETIME(), nullable=True
+        )
+        op.alter_column(
+            'datasources', 'created_by_fk', existing_type=sa.INTEGER(), nullable=True
+        )
+        op.alter_column(
+            'datasources', 'created_on', existing_type=sa.DATETIME(), nullable=True
+        )
+        op.alter_column('dbs', 'changed_on', existing_type=sa.DATETIME(), nullable=True)
+        op.alter_column('dbs', 'created_on', existing_type=sa.DATETIME(), nullable=True)
+        op.alter_column(
+            'slices', 'changed_on', existing_type=sa.DATETIME(), nullable=True
+        )
+        op.alter_column(
+            'slices', 'created_on', existing_type=sa.DATETIME(), nullable=True
+        )
+        op.alter_column(
+            'sql_metrics', 'changed_on', existing_type=sa.DATETIME(), nullable=True
+        )
+        op.alter_column(
+            'sql_metrics', 'created_on', existing_type=sa.DATETIME(), nullable=True
+        )
+        op.alter_column(
+            'table_columns', 'changed_on', existing_type=sa.DATETIME(), nullable=True
+        )
+        op.alter_column(
+            'table_columns', 'created_on', existing_type=sa.DATETIME(), nullable=True
+        )
+        op.alter_column(
+            'tables', 'changed_on', existing_type=sa.DATETIME(), nullable=True
+        )
+        op.alter_column(
+            'tables', 'created_on', existing_type=sa.DATETIME(), nullable=True
+        )
+        op.alter_column('url', 'changed_on', existing_type=sa.DATETIME(), nullable=True)
+        op.alter_column('url', 'created_on', existing_type=sa.DATETIME(), nullable=True)
         op.create_foreign_key(None, 'metrics', 'ab_user', ['changed_by_fk'], ['id'])
         op.create_foreign_key(None, 'metrics', 'ab_user', ['created_by_fk'], ['id'])
     except:
@@ -98,73 +90,73 @@ def downgrade():
     op.drop_column('metrics', 'changed_on')
     op.drop_column('metrics', 'changed_by_fk')
     try:
-        op.alter_column('url', 'created_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=False)
-        op.alter_column('url', 'changed_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=False)
-        op.alter_column('tables', 'created_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=False)
-        op.alter_column('tables', 'changed_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=False)
-        op.alter_column('table_columns', 'created_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=False)
-        op.alter_column('table_columns', 'changed_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=False)
-        op.alter_column('sql_metrics', 'created_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=False)
-        op.alter_column('sql_metrics', 'changed_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=False)
-        op.alter_column('slices', 'created_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=False)
-        op.alter_column('slices', 'changed_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=False)
+        op.alter_column(
+            'url', 'created_on', existing_type=sa.DATETIME(), nullable=False
+        )
+        op.alter_column(
+            'url', 'changed_on', existing_type=sa.DATETIME(), nullable=False
+        )
+        op.alter_column(
+            'tables', 'created_on', existing_type=sa.DATETIME(), nullable=False
+        )
+        op.alter_column(
+            'tables', 'changed_on', existing_type=sa.DATETIME(), nullable=False
+        )
+        op.alter_column(
+            'table_columns', 'created_on', existing_type=sa.DATETIME(), nullable=False
+        )
+        op.alter_column(
+            'table_columns', 'changed_on', existing_type=sa.DATETIME(), nullable=False
+        )
+        op.alter_column(
+            'sql_metrics', 'created_on', existing_type=sa.DATETIME(), nullable=False
+        )
+        op.alter_column(
+            'sql_metrics', 'changed_on', existing_type=sa.DATETIME(), nullable=False
+        )
+        op.alter_column(
+            'slices', 'created_on', existing_type=sa.DATETIME(), nullable=False
+        )
+        op.alter_column(
+            'slices', 'changed_on', existing_type=sa.DATETIME(), nullable=False
+        )
         op.drop_constraint(None, 'metrics', type_='foreignkey')
         op.drop_constraint(None, 'metrics', type_='foreignkey')
-        op.alter_column('dbs', 'created_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=False)
-        op.alter_column('dbs', 'changed_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=False)
-        op.alter_column('datasources', 'created_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=False)
-        op.alter_column('datasources', 'created_by_fk',
-                   existing_type=sa.INTEGER(),
-                   nullable=False)
-        op.alter_column('datasources', 'changed_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=False)
-        op.alter_column('datasources', 'changed_by_fk',
-                   existing_type=sa.INTEGER(),
-                   nullable=False)
-        op.alter_column('dashboards', 'created_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=False)
-        op.alter_column('dashboards', 'changed_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=False)
-        op.alter_column('css_templates', 'created_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=False)
-        op.alter_column('css_templates', 'changed_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=False)
-        op.alter_column('columns', 'created_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=False)
-        op.alter_column('columns', 'changed_on',
-                   existing_type=sa.DATETIME(),
-                   nullable=False)
+        op.alter_column(
+            'dbs', 'created_on', existing_type=sa.DATETIME(), nullable=False
+        )
+        op.alter_column(
+            'dbs', 'changed_on', existing_type=sa.DATETIME(), nullable=False
+        )
+        op.alter_column(
+            'datasources', 'created_on', existing_type=sa.DATETIME(), nullable=False
+        )
+        op.alter_column(
+            'datasources', 'created_by_fk', existing_type=sa.INTEGER(), nullable=False
+        )
+        op.alter_column(
+            'datasources', 'changed_on', existing_type=sa.DATETIME(), nullable=False
+        )
+        op.alter_column(
+            'datasources', 'changed_by_fk', existing_type=sa.INTEGER(), nullable=False
+        )
+        op.alter_column(
+            'dashboards', 'created_on', existing_type=sa.DATETIME(), nullable=False
+        )
+        op.alter_column(
+            'dashboards', 'changed_on', existing_type=sa.DATETIME(), nullable=False
+        )
+        op.alter_column(
+            'css_templates', 'created_on', existing_type=sa.DATETIME(), nullable=False
+        )
+        op.alter_column(
+            'css_templates', 'changed_on', existing_type=sa.DATETIME(), nullable=False
+        )
+        op.alter_column(
+            'columns', 'created_on', existing_type=sa.DATETIME(), nullable=False
+        )
+        op.alter_column(
+            'columns', 'changed_on', existing_type=sa.DATETIME(), nullable=False
+        )
     except:
         pass
diff --git a/superset/migrations/versions/7dbf98566af7_slice_description.py b/superset/migrations/versions/7dbf98566af7_slice_description.py
index 329af9e..7438363 100644
--- a/superset/migrations/versions/7dbf98566af7_slice_description.py
+++ b/superset/migrations/versions/7dbf98566af7_slice_description.py
@@ -13,8 +13,10 @@ down_revision = '8e80a26a31db'
 from alembic import op
 import sqlalchemy as sa
 
+
 def upgrade():
     op.add_column('slices', sa.Column('description', sa.Text(), nullable=True))
 
+
 def downgrade():
     op.drop_column('slices', 'description')
diff --git a/superset/migrations/versions/7e3ddad2a00b_results_key_to_query.py b/superset/migrations/versions/7e3ddad2a00b_results_key_to_query.py
index f2a4608..987c9d8 100644
--- a/superset/migrations/versions/7e3ddad2a00b_results_key_to_query.py
+++ b/superset/migrations/versions/7e3ddad2a00b_results_key_to_query.py
@@ -15,7 +15,9 @@ import sqlalchemy as sa
 
 
 def upgrade():
-    op.add_column('query', sa.Column('results_key', sa.String(length=64), nullable=True))
+    op.add_column(
+        'query', sa.Column('results_key', sa.String(length=64), nullable=True)
+    )
 
 
 def downgrade():
diff --git a/superset/migrations/versions/7fcdcde0761c_.py b/superset/migrations/versions/7fcdcde0761c_.py
index ad87fca..024006e 100644
--- a/superset/migrations/versions/7fcdcde0761c_.py
+++ b/superset/migrations/versions/7fcdcde0761c_.py
@@ -24,6 +24,7 @@ Base = declarative_base()
 
 class Dashboard(Base):
     """Declarative class to do query in upgrade"""
+
     __tablename__ = 'dashboards'
     id = sa.Column(sa.Integer, primary_key=True)
     dashboard_title = sa.Column(sa.String(500))
@@ -32,8 +33,7 @@ class Dashboard(Base):
 
 def is_v2_dash(positions):
     return (
-        isinstance(positions, dict) and
-        positions.get('DASHBOARD_VERSION_KEY') == 'v2'
+        isinstance(positions, dict) and positions.get('DASHBOARD_VERSION_KEY') == 'v2'
     )
 
 
@@ -48,14 +48,18 @@ def upgrade():
         if is_v2_dash(position_json):
             # re-dump the json data and remove leading and trailing white spaces
             text = json.dumps(
-                position_json, indent=None, separators=(',', ':'), sort_keys=True)
+                position_json, indent=None, separators=(',', ':'), sort_keys=True
+            )
             # remove DASHBOARD_ and _TYPE prefix/suffix in all the component ids
             text = re.sub(r'DASHBOARD_(?!VERSION)', '', text)
             text = text.replace('_TYPE', '')
 
             dashboard.position_json = text
-            print('dash id:{} position_json size from {} to {}'.format(
-                dashboard.id, len(original_text), len(text)))
+            print(
+                'dash id:{} position_json size from {} to {}'.format(
+                    dashboard.id, len(original_text), len(text)
+                )
+            )
             session.merge(dashboard)
             session.commit()
 
diff --git a/superset/migrations/versions/836c0bf75904_cache_timeouts.py b/superset/migrations/versions/836c0bf75904_cache_timeouts.py
index 86480c2..df9918a 100644
--- a/superset/migrations/versions/836c0bf75904_cache_timeouts.py
+++ b/superset/migrations/versions/836c0bf75904_cache_timeouts.py
@@ -14,7 +14,9 @@ import sqlalchemy as sa
 
 
 def upgrade():
-    op.add_column('datasources', sa.Column('cache_timeout', sa.Integer(), nullable=True))
+    op.add_column(
+        'datasources', sa.Column('cache_timeout', sa.Integer(), nullable=True)
+    )
     op.add_column('dbs', sa.Column('cache_timeout', sa.Integer(), nullable=True))
     op.add_column('slices', sa.Column('cache_timeout', sa.Integer(), nullable=True))
     op.add_column('tables', sa.Column('cache_timeout', sa.Integer(), nullable=True))
diff --git a/superset/migrations/versions/8e80a26a31db_.py b/superset/migrations/versions/8e80a26a31db_.py
index 73a0b06..6ec64b6 100644
--- a/superset/migrations/versions/8e80a26a31db_.py
+++ b/superset/migrations/versions/8e80a26a31db_.py
@@ -14,16 +14,17 @@ import sqlalchemy as sa
 
 
 def upgrade():
-    op.create_table('url',
-    sa.Column('created_on', sa.DateTime(), nullable=False),
-    sa.Column('changed_on', sa.DateTime(), nullable=False),
-    sa.Column('id', sa.Integer(), nullable=False),
-    sa.Column('url', sa.Text(), nullable=True),
-    sa.Column('created_by_fk', sa.Integer(), nullable=True),
-    sa.Column('changed_by_fk', sa.Integer(), nullable=True),
-    sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ),
-    sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ),
-    sa.PrimaryKeyConstraint('id')
+    op.create_table(
+        'url',
+        sa.Column('created_on', sa.DateTime(), nullable=False),
+        sa.Column('changed_on', sa.DateTime(), nullable=False),
+        sa.Column('id', sa.Integer(), nullable=False),
+        sa.Column('url', sa.Text(), nullable=True),
+        sa.Column('created_by_fk', sa.Integer(), nullable=True),
+        sa.Column('changed_by_fk', sa.Integer(), nullable=True),
+        sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id']),
+        sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id']),
+        sa.PrimaryKeyConstraint('id'),
     )
 
 
diff --git a/superset/migrations/versions/956a063c52b3_adjusting_key_length.py b/superset/migrations/versions/956a063c52b3_adjusting_key_length.py
index e5351de..9e9300b 100644
--- a/superset/migrations/versions/956a063c52b3_adjusting_key_length.py
+++ b/superset/migrations/versions/956a063c52b3_adjusting_key_length.py
@@ -15,87 +15,119 @@ down_revision = 'f0fbf6129e13'
 
 def upgrade():
     with op.batch_alter_table('clusters', schema=None) as batch_op:
-        batch_op.alter_column('broker_endpoint',
-                              existing_type=sa.VARCHAR(length=256),
... 8057 lines suppressed ...


Mime
View raw message