You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@superset.apache.org by hu...@apache.org on 2018/12/10 05:22:36 UTC

[incubator-superset] 01/01: run black over all .py files

This is an automated email from the ASF dual-hosted git repository.

hugh pushed a commit to branch black-lint
in repository https://gitbox.apache.org/repos/asf/incubator-superset.git

commit 75f08c31d74b83db13f9da5050d2e8e4e8ce8e15
Author: hughhhh <hm...@lyft.com>
AuthorDate: Sun Dec 9 21:22:18 2018 -0800

    run black over all .py files
---
 superset/__init__.py                               |   96 +-
 superset/cli.py                                    |  460 ++--
 superset/common/query_context.py                   |   13 +-
 superset/common/query_object.py                    |   27 +-
 superset/config.py                                 |  123 +-
 superset/connectors/base/models.py                 |  207 +-
 superset/connectors/base/views.py                  |   11 +-
 superset/connectors/connector_registry.py          |   22 +-
 superset/connectors/druid/models.py                | 1148 ++++-----
 superset/connectors/druid/views.py                 |  446 ++--
 superset/connectors/sqla/models.py                 |  620 +++--
 superset/connectors/sqla/views.py                  |  461 ++--
 superset/data/bart_lines.py                        |   29 +-
 superset/data/birth_names.py                       |  412 ++--
 superset/data/countries.py                         |  498 ++--
 superset/data/country_map.py                       |   77 +-
 superset/data/css_templates.py                     |   23 +-
 superset/data/deck.py                              |  589 ++---
 superset/data/energy.py                            |   55 +-
 superset/data/flights.py                           |   37 +-
 superset/data/helpers.py                           |   13 +-
 superset/data/long_lat.py                          |   95 +-
 superset/data/misc_dashboard.py                    |   24 +-
 superset/data/multi_line.py                        |   39 +-
 superset/data/multiformat_time_series.py           |   83 +-
 superset/data/paris.py                             |   23 +-
 superset/data/random_time_series.py                |   62 +-
 superset/data/sf_population_polygons.py            |   23 +-
 superset/data/unicode_test_data.py                 |   84 +-
 superset/data/world_bank.py                        |  290 ++-
 superset/dataframe.py                              |  105 +-
 superset/db_engine_specs.py                        | 1037 ++++----
 superset/db_engines/hive.py                        |   13 +-
 superset/extract_table_names.py                    |    4 +-
 superset/forms.py                                  |  175 +-
 superset/jinja_context.py                          |   59 +-
 superset/legacy.py                                 |   73 +-
 superset/migrations/env.py                         |   43 +-
 .../0c5070e96b57_add_user_attributes_table.py      |   33 +-
 ...9ee0e3_fix_wrong_constraint_on_table_columns.py |   43 +-
 .../versions/1296d28ec131_druid_exports.py         |   10 +-
 .../versions/12d55656cbca_is_featured.py           |    9 +-
 .../versions/130915240929_is_sqllab_viz_flow.py    |   13 +-
 .../versions/18e88e1cc004_making_audit_nullable.py |  161 +-
 .../19a814813610_adding_metric_warning_text.py     |   16 +-
 .../versions/1a1d627ebd8e_position_json.py         |   12 +-
 .../versions/1a48a5411020_adding_slug_to_dash.py   |   13 +-
 .../migrations/versions/1d2ddd543133_log_dt.py     |    8 +-
 superset/migrations/versions/1d9e835a84f9_.py      |   14 +-
 superset/migrations/versions/1e2841a4128_.py       |    9 +-
 .../versions/21e88bc06c02_annotation_migration.py  |   49 +-
 .../migrations/versions/2591d77e9831_user_id.py    |   16 +-
 .../versions/27ae655e4247_make_creator_owners.py   |   40 +-
 .../289ce07647b_add_encrypted_password_field.py    |   15 +-
 .../2929af7925ed_tz_offsets_in_data_sources.py     |   13 +-
 .../versions/2fcdcb35e487_saved_queries.py         |   40 +-
 superset/migrations/versions/30bb17c0dc76_.py      |   12 +-
 .../versions/315b3f4da9b0_adding_log_model.py      |   23 +-
 .../versions/33d996bcc382_update_slice_model.py    |   18 +-
 .../versions/3b626e2a6783_sync_db_with_models.py   |   91 +-
 .../3c3ffe173e4f_add_sql_string_to_table.py        |    8 +-
 ...1c4c6_migrate_num_period_compare_and_period_.py |  100 +-
 .../41f6a59a61f2_database_options_for_sql_lab.py   |   19 +-
 .../migrations/versions/430039611635_log_more.py   |   12 +-
 .../migrations/versions/43df8de3a5f4_dash_json.py  |    8 +-
 .../4451805bbaa1_remove_double_percents.py         |   30 +-
 .../versions/4500485bde7d_allow_run_sync_async.py  |   13 +-
 superset/migrations/versions/46ba6aaaac97_.py      |    4 +-
 ...8b9b7_remove_coordinator_from_druid_cluster_.py |   23 +-
 superset/migrations/versions/472d2f73dfd4_.py      |    4 +-
 superset/migrations/versions/4736ec66ce19_.py      |  141 +-
 ...08545_migrate_time_range_for_default_filters.py |   60 +-
 superset/migrations/versions/4e6a06bad7a8_init.py  |  370 +--
 .../versions/4fa88fe24e94_owners_many_to_many.py   |   38 +-
 .../versions/525c854f0005_log_this_plus.py         |   12 +-
 .../migrations/versions/55179c7f25c7_sqla_descr.py |    8 +-
 ...826_add_metadata_column_to_annotation_model_.py |    8 +-
 superset/migrations/versions/5a7bad26f2a7_.py      |   12 +-
 superset/migrations/versions/5ccf602336a0_.py      |    4 +-
 .../5e4a03ef0bf0_add_request_access_model.py       |   28 +-
 superset/migrations/versions/6414e83d82b7_.py      |    4 +-
 .../migrations/versions/65903709c321_allow_dml.py  |    8 +-
 .../versions/67a6ac9b727b_update_spatial_params.py |   22 +-
 superset/migrations/versions/705732c70154_.py      |    4 +-
 .../732f1c06bcbf_add_fetch_values_predicate.py     |   19 +-
 .../versions/763d4b211ec9_fixing_audit_fk.py       |  284 ++-
 .../versions/7dbf98566af7_slice_description.py     |   10 +-
 .../versions/7e3ddad2a00b_results_key_to_query.py  |   10 +-
 superset/migrations/versions/7fcdcde0761c_.py      |   28 +-
 .../80a67c5192fa_single_pie_chart_metric.py        |   26 +-
 .../versions/836c0bf75904_cache_timeouts.py        |   22 +-
 ...4f117f9_adding_extra_field_to_database_model.py |    8 +-
 superset/migrations/versions/8e80a26a31db_.py      |   27 +-
 .../versions/956a063c52b3_adjusting_key_length.py  |  184 +-
 superset/migrations/versions/960c69cb1f5b_.py      |   18 +-
 superset/migrations/versions/979c03af3341_.py      |    4 +-
 .../versions/a2d606a761d9_adding_favstar_model.py  |   23 +-
 .../versions/a61b40f9f57f_remove_allow_run_sync.py |   15 +-
 ...a65458420354_add_result_backend_time_logging.py |   14 +-
 .../a6c18f869a4e_query_start_running_time.py       |   12 +-
 ...7c195a_rewriting_url_from_shortner_with_new_.py |   29 +-
 .../a9c47e2c1547_add_impersonate_user_to_dbs.py    |    8 +-
 ...d66c4246e_add_cache_timeout_to_druid_cluster.py |    9 +-
 .../versions/ad4d656d92bc_add_avg_metric.py        |   17 +-
 .../versions/ad82a75afd82_add_query_model.py       |   73 +-
 .../versions/afb7730f6a9c_remove_empty_filters.py  |   13 +-
 ...dfe5fb6c_adding_verbose_name_to_druid_column.py |   10 +-
 superset/migrations/versions/b347b202819b_.py      |    4 +-
 .../b4456560d4f3_change_table_unique_constraint.py |   13 +-
 .../versions/b46fa1b0b39e_add_params_to_tables.py  |   10 +-
 .../bb51420eaf83_add_schema_to_table_model.py      |    8 +-
 .../versions/bcf3126872fc_add_keyvalue.py          |   15 +-
 .../versions/bddc498dd179_adhoc_filters.py         |   14 +-
 .../bebcf3fed1fe_convert_dashboard_v1_positions.py |  472 ++--
 .../bf706ae5eb46_cal_heatmap_metric_to_metrics.py  |   19 +-
 superset/migrations/versions/c18bd4186f15_.py      |    4 +-
 .../c3a8f8611885_materializing_permission.py       |   17 +-
 .../versions/c5756bec8b47_time_grain_sqla.py       |   14 +-
 .../migrations/versions/c611f2b591b8_dim_spec.py   |    8 +-
 superset/migrations/versions/c9495751e314_.py      |    4 +-
 .../versions/ca69c70ec99b_tracking_url.py          |    8 +-
 superset/migrations/versions/d2424a248d63_.py      |    4 +-
 superset/migrations/versions/d39b1e37131d_.py      |    4 +-
 superset/migrations/versions/d6db5a5cdb5d_.py      |    4 +-
 .../versions/d827694c7555_css_templates.py         |   29 +-
 ...d8bc074f7aad_add_new_field_is_restricted_to_.py |   30 +-
 .../db0c65b146bd_update_slice_model_json.py        |   13 +-
 .../versions/db527d8c4c78_add_db_verbose_name.py   |   23 +-
 .../versions/ddd6ebdd853b_annotations.py           |   69 +-
 superset/migrations/versions/e3970889f38e_.py      |    4 +-
 .../versions/e46f2d27a08e_materialize_perms.py     |   19 +-
 .../e502db2af7be_add_template_params_to_tables.py  |    9 +-
 ...8c4473c581_allow_multi_schema_metadata_fetch.py |   10 +-
 .../versions/e866bd2d4976_smaller_grid.py          |   42 +-
 superset/migrations/versions/ea033256294a_.py      |    4 +-
 superset/migrations/versions/ec1f88a35cc6_.py      |    4 +-
 .../eca4694defa7_sqllab_setting_defaults.py        |    8 +-
 superset/migrations/versions/ef8843b41dac_.py      |    4 +-
 ...bf6129e13_adding_verbose_name_to_tablecolumn.py |   12 +-
 .../versions/f162a1dea4c4_d3format_by_metric.py    |   16 +-
 .../f18570e03440_add_query_result_key_index.py     |   10 +-
 superset/migrations/versions/f1f2d4af5b90_.py      |   19 +-
 superset/migrations/versions/f231d82b9b26_.py      |   33 +-
 superset/migrations/versions/f959a6652acd_.py      |    4 +-
 superset/migrations/versions/fc480c87706c_.py      |    4 +-
 superset/migrations/versions/fee7b758c130_.py      |    4 +-
 superset/models/annotations.py                     |   30 +-
 superset/models/core.py                            |  585 +++--
 superset/models/helpers.py                         |  179 +-
 superset/models/sql_lab.py                         |  111 +-
 superset/models/user_attributes.py                 |   12 +-
 superset/security.py                               |  322 ++-
 superset/sql_lab.py                                |  143 +-
 superset/sql_parse.py                              |   55 +-
 superset/stats_logger.py                           |   31 +-
 superset/translations/utils.py                     |    4 +-
 superset/utils/cache.py                            |   17 +-
 superset/utils/core.py                             |  538 ++--
 superset/utils/dashboard_import_export.py          |    9 +-
 superset/utils/dict_import_export.py               |   59 +-
 superset/utils/import_datasource.py                |   30 +-
 superset/views/__init__.py                         |    6 +-
 superset/views/annotations.py                      |   84 +-
 superset/views/api.py                              |    4 +-
 superset/views/base.py                             |  152 +-
 superset/views/core.py                             | 2579 +++++++++++---------
 superset/views/datasource.py                       |   23 +-
 superset/views/sql_lab.py                          |   95 +-
 superset/views/utils.py                            |   33 +-
 superset/viz.py                                    | 1908 +++++++--------
 170 files changed, 9439 insertions(+), 8622 deletions(-)

diff --git a/superset/__init__.py b/superset/__init__.py
index 0a267e8..25071cc 100644
--- a/superset/__init__.py
+++ b/superset/__init__.py
@@ -17,15 +17,18 @@ from superset import config
 from superset.connectors.connector_registry import ConnectorRegistry
 from superset.security import SupersetSecurityManager
 from superset.utils.core import (
-    get_update_perms_flag, pessimistic_connection_handling, setup_cache)
+    get_update_perms_flag,
+    pessimistic_connection_handling,
+    setup_cache,
+)
 
 APP_DIR = os.path.dirname(__file__)
-CONFIG_MODULE = os.environ.get('SUPERSET_CONFIG', 'superset.config')
+CONFIG_MODULE = os.environ.get("SUPERSET_CONFIG", "superset.config")
 
 if not os.path.exists(config.DATA_DIR):
     os.makedirs(config.DATA_DIR)
 
-with open(APP_DIR + '/static/assets/backendSync.json', 'r') as f:
+with open(APP_DIR + "/static/assets/backendSync.json", "r") as f:
     frontend_config = json.load(f)
 
 app = Flask(__name__)
@@ -35,18 +38,18 @@ conf = app.config
 #################################################################
 # Handling manifest file logic at app start
 #################################################################
-MANIFEST_FILE = APP_DIR + '/static/assets/dist/manifest.json'
+MANIFEST_FILE = APP_DIR + "/static/assets/dist/manifest.json"
 manifest = {}
 
 
 def parse_manifest_json():
     global manifest
     try:
-        with open(MANIFEST_FILE, 'r') as f:
+        with open(MANIFEST_FILE, "r") as f:
             # the manifest inclues non-entry files
             # we only need entries in templates
             full_manifest = json.load(f)
-            manifest = full_manifest.get('entrypoints', {})
+            manifest = full_manifest.get("entrypoints", {})
     except Exception:
         pass
 
@@ -55,14 +58,14 @@ def get_js_manifest_files(filename):
     if app.debug:
         parse_manifest_json()
     entry_files = manifest.get(filename, {})
-    return entry_files.get('js', [])
+    return entry_files.get("js", [])
 
 
 def get_css_manifest_files(filename):
     if app.debug:
         parse_manifest_json()
     entry_files = manifest.get(filename, {})
-    return entry_files.get('css', [])
+    return entry_files.get("css", [])
 
 
 def get_unloaded_chunks(files, loaded_chunks):
@@ -87,16 +90,16 @@ def get_manifest():
 
 #################################################################
 
-for bp in conf.get('BLUEPRINTS'):
+for bp in conf.get("BLUEPRINTS"):
     try:
         print("Registering blueprint: '{}'".format(bp.name))
         app.register_blueprint(bp)
     except Exception as e:
-        print('blueprint registration failed')
+        print("blueprint registration failed")
         logging.exception(e)
 
-if conf.get('SILENCE_FAB'):
-    logging.getLogger('flask_appbuilder').setLevel(logging.ERROR)
+if conf.get("SILENCE_FAB"):
+    logging.getLogger("flask_appbuilder").setLevel(logging.ERROR)
 
 if app.debug:
     app.logger.setLevel(logging.DEBUG)  # pylint: disable=no-member
@@ -104,44 +107,46 @@ else:
     # In production mode, add log handler to sys.stderr.
     app.logger.addHandler(logging.StreamHandler())  # pylint: disable=no-member
     app.logger.setLevel(logging.INFO)  # pylint: disable=no-member
-logging.getLogger('pyhive.presto').setLevel(logging.INFO)
+logging.getLogger("pyhive.presto").setLevel(logging.INFO)
 
 db = SQLA(app)
 
-if conf.get('WTF_CSRF_ENABLED'):
+if conf.get("WTF_CSRF_ENABLED"):
     csrf = CSRFProtect(app)
-    csrf_exempt_list = conf.get('WTF_CSRF_EXEMPT_LIST', [])
+    csrf_exempt_list = conf.get("WTF_CSRF_EXEMPT_LIST", [])
     for ex in csrf_exempt_list:
         csrf.exempt(ex)
 
 pessimistic_connection_handling(db.engine)
 
-cache = setup_cache(app, conf.get('CACHE_CONFIG'))
-tables_cache = setup_cache(app, conf.get('TABLE_NAMES_CACHE_CONFIG'))
+cache = setup_cache(app, conf.get("CACHE_CONFIG"))
+tables_cache = setup_cache(app, conf.get("TABLE_NAMES_CACHE_CONFIG"))
 
-migrate = Migrate(app, db, directory=APP_DIR + '/migrations')
+migrate = Migrate(app, db, directory=APP_DIR + "/migrations")
 
 # Logging configuration
-logging.basicConfig(format=app.config.get('LOG_FORMAT'))
-logging.getLogger().setLevel(app.config.get('LOG_LEVEL'))
+logging.basicConfig(format=app.config.get("LOG_FORMAT"))
+logging.getLogger().setLevel(app.config.get("LOG_LEVEL"))
 
-if app.config.get('ENABLE_TIME_ROTATE'):
-    logging.getLogger().setLevel(app.config.get('TIME_ROTATE_LOG_LEVEL'))
+if app.config.get("ENABLE_TIME_ROTATE"):
+    logging.getLogger().setLevel(app.config.get("TIME_ROTATE_LOG_LEVEL"))
     handler = TimedRotatingFileHandler(
-        app.config.get('FILENAME'),
-        when=app.config.get('ROLLOVER'),
-        interval=app.config.get('INTERVAL'),
-        backupCount=app.config.get('BACKUP_COUNT'))
+        app.config.get("FILENAME"),
+        when=app.config.get("ROLLOVER"),
+        interval=app.config.get("INTERVAL"),
+        backupCount=app.config.get("BACKUP_COUNT"),
+    )
     logging.getLogger().addHandler(handler)
 
-if app.config.get('ENABLE_CORS'):
+if app.config.get("ENABLE_CORS"):
     from flask_cors import CORS
-    CORS(app, **app.config.get('CORS_OPTIONS'))
 
-if app.config.get('ENABLE_PROXY_FIX'):
+    CORS(app, **app.config.get("CORS_OPTIONS"))
+
+if app.config.get("ENABLE_PROXY_FIX"):
     app.wsgi_app = ProxyFix(app.wsgi_app)
 
-if app.config.get('ENABLE_CHUNK_ENCODING'):
+if app.config.get("ENABLE_CHUNK_ENCODING"):
 
     class ChunkedEncodingFix(object):
         def __init__(self, app):
@@ -150,39 +155,40 @@ if app.config.get('ENABLE_CHUNK_ENCODING'):
         def __call__(self, environ, start_response):
             # Setting wsgi.input_terminated tells werkzeug.wsgi to ignore
             # content-length and read the stream till the end.
-            if environ.get('HTTP_TRANSFER_ENCODING', '').lower() == u'chunked':
-                environ['wsgi.input_terminated'] = True
+            if environ.get("HTTP_TRANSFER_ENCODING", "").lower() == u"chunked":
+                environ["wsgi.input_terminated"] = True
             return self.app(environ, start_response)
 
     app.wsgi_app = ChunkedEncodingFix(app.wsgi_app)
 
-if app.config.get('UPLOAD_FOLDER'):
+if app.config.get("UPLOAD_FOLDER"):
     try:
-        os.makedirs(app.config.get('UPLOAD_FOLDER'))
+        os.makedirs(app.config.get("UPLOAD_FOLDER"))
     except OSError:
         pass
 
-for middleware in app.config.get('ADDITIONAL_MIDDLEWARE'):
+for middleware in app.config.get("ADDITIONAL_MIDDLEWARE"):
     app.wsgi_app = middleware(app.wsgi_app)
 
 
 class MyIndexView(IndexView):
-    @expose('/')
+    @expose("/")
     def index(self):
-        return redirect('/superset/welcome')
+        return redirect("/superset/welcome")
 
 
-custom_sm = app.config.get('CUSTOM_SECURITY_MANAGER') or SupersetSecurityManager
+custom_sm = app.config.get("CUSTOM_SECURITY_MANAGER") or SupersetSecurityManager
 if not issubclass(custom_sm, SupersetSecurityManager):
     raise Exception(
         """Your CUSTOM_SECURITY_MANAGER must now extend SupersetSecurityManager,
          not FAB's security manager.
-         See [4565] in UPDATING.md""")
+         See [4565] in UPDATING.md"""
+    )
 
 appbuilder = AppBuilder(
     app,
     db.session,
-    base_template='superset/base.html',
+    base_template="superset/base.html",
     indexview=MyIndexView,
     security_manager_class=custom_sm,
     update_perms=get_update_perms_flag(),
@@ -190,20 +196,20 @@ appbuilder = AppBuilder(
 
 security_manager = appbuilder.sm
 
-results_backend = app.config.get('RESULTS_BACKEND')
+results_backend = app.config.get("RESULTS_BACKEND")
 
 # Registering sources
-module_datasource_map = app.config.get('DEFAULT_MODULE_DS_MAP')
-module_datasource_map.update(app.config.get('ADDITIONAL_MODULE_DS_MAP'))
+module_datasource_map = app.config.get("DEFAULT_MODULE_DS_MAP")
+module_datasource_map.update(app.config.get("ADDITIONAL_MODULE_DS_MAP"))
 ConnectorRegistry.register_sources(module_datasource_map)
 
 # Flask-Compress
-if conf.get('ENABLE_FLASK_COMPRESS'):
+if conf.get("ENABLE_FLASK_COMPRESS"):
     Compress(app)
 
 # Hook that provides administrators a handle on the Flask APP
 # after initialization
-flask_app_mutator = app.config.get('FLASK_APP_MUTATOR')
+flask_app_mutator = app.config.get("FLASK_APP_MUTATOR")
 if flask_app_mutator:
     flask_app_mutator(app)
 
diff --git a/superset/cli.py b/superset/cli.py
index c6444b1..f418ff1 100755
--- a/superset/cli.py
+++ b/superset/cli.py
@@ -11,11 +11,8 @@ from pathlib2 import Path
 import werkzeug.serving
 import yaml
 
-from superset import (
-    app, data, db, security_manager,
-)
-from superset.utils import (
-    core as utils, dashboard_import_export, dict_import_export)
+from superset import app, data, db, security_manager
+from superset.utils import core as utils, dashboard_import_export, dict_import_export
 
 config = app.config
 celery_app = utils.get_celery_app(config)
@@ -39,16 +36,13 @@ def init():
 
 def debug_run(app, port, use_reloader):
     click.secho(
-        '[DEPRECATED] As of Flask >=1.0.0, this command is no longer '
-        'supported, please use `flask run` instead, as documented in our '
-        'CONTRIBUTING.md',
-        fg='red',
-    )
-    click.secho('[example]', fg='yellow')
-    click.secho(
-        'flask run -p 8080 --with-threads --reload --debugger',
-        fg='green',
+        "[DEPRECATED] As of Flask >=1.0.0, this command is no longer "
+        "supported, please use `flask run` instead, as documented in our "
+        "CONTRIBUTING.md",
+        fg="red",
     )
+    click.secho("[example]", fg="yellow")
+    click.secho("flask run -p 8080 --with-threads --reload --debugger", fg="green")
 
 
 def console_log_run(app, port, use_reloader):
@@ -60,13 +54,13 @@ def console_log_run(app, port, use_reloader):
 
     def run():
         server = pywsgi.WSGIServer(
-            ('0.0.0.0', int(port)),
-            app,
-            handler_class=WebSocketHandler)
+            ("0.0.0.0", int(port)), app, handler_class=WebSocketHandler
+        )
         server.serve_forever()
 
     if use_reloader:
         from gevent import monkey
+
         monkey.patch_all()
         run = werkzeug.serving.run_with_reloader(run)
 
@@ -74,35 +68,68 @@ def console_log_run(app, port, use_reloader):
 
 
 @app.cli.command()
-@click.option('--debug', '-d', is_flag=True, help='Start the web server in debug mode')
-@click.option('--console-log', is_flag=True,
-              help='Create logger that logs to the browser console (implies -d)')
-@click.option('--no-reload', '-n', 'use_reloader', flag_value=False,
-              default=config.get('FLASK_USE_RELOAD'),
-              help='Don\'t use the reloader in debug mode')
-@click.option('--address', '-a', default=config.get('SUPERSET_WEBSERVER_ADDRESS'),
-              help='Specify the address to which to bind the web server')
-@click.option('--port', '-p', default=config.get('SUPERSET_WEBSERVER_PORT'),
-              help='Specify the port on which to run the web server')
-@click.option('--workers', '-w', default=config.get('SUPERSET_WORKERS', 2),
-              help='Number of gunicorn web server workers to fire up [DEPRECATED]')
-@click.option('--timeout', '-t', default=config.get('SUPERSET_WEBSERVER_TIMEOUT'),
-              help='Specify the timeout (seconds) for the '
-                   'gunicorn web server [DEPRECATED]')
-@click.option('--socket', '-s', default=config.get('SUPERSET_WEBSERVER_SOCKET'),
-              help='Path to a UNIX socket as an alternative to address:port, e.g. '
-                   '/var/run/superset.sock. '
-                   'Will override the address and port values. [DEPRECATED]')
-def runserver(debug, console_log, use_reloader, address, port, timeout, workers, socket):
+@click.option("--debug", "-d", is_flag=True, help="Start the web server in debug mode")
+@click.option(
+    "--console-log",
+    is_flag=True,
+    help="Create logger that logs to the browser console (implies -d)",
+)
+@click.option(
+    "--no-reload",
+    "-n",
+    "use_reloader",
+    flag_value=False,
+    default=config.get("FLASK_USE_RELOAD"),
+    help="Don't use the reloader in debug mode",
+)
+@click.option(
+    "--address",
+    "-a",
+    default=config.get("SUPERSET_WEBSERVER_ADDRESS"),
+    help="Specify the address to which to bind the web server",
+)
+@click.option(
+    "--port",
+    "-p",
+    default=config.get("SUPERSET_WEBSERVER_PORT"),
+    help="Specify the port on which to run the web server",
+)
+@click.option(
+    "--workers",
+    "-w",
+    default=config.get("SUPERSET_WORKERS", 2),
+    help="Number of gunicorn web server workers to fire up [DEPRECATED]",
+)
+@click.option(
+    "--timeout",
+    "-t",
+    default=config.get("SUPERSET_WEBSERVER_TIMEOUT"),
+    help="Specify the timeout (seconds) for the " "gunicorn web server [DEPRECATED]",
+)
+@click.option(
+    "--socket",
+    "-s",
+    default=config.get("SUPERSET_WEBSERVER_SOCKET"),
+    help="Path to a UNIX socket as an alternative to address:port, e.g. "
+    "/var/run/superset.sock. "
+    "Will override the address and port values. [DEPRECATED]",
+)
+def runserver(
+    debug, console_log, use_reloader, address, port, timeout, workers, socket
+):
     """Starts a Superset web server."""
-    debug = debug or config.get('DEBUG') or console_log
+    debug = debug or config.get("DEBUG") or console_log
     if debug:
-        print(Fore.BLUE + '-=' * 20)
+        print(Fore.BLUE + "-=" * 20)
         print(
-            Fore.YELLOW + 'Starting Superset server in ' +
-            Fore.RED + 'DEBUG' +
-            Fore.YELLOW + ' mode')
-        print(Fore.BLUE + '-=' * 20)
+            Fore.YELLOW
+            + "Starting Superset server in "
+            + Fore.RED
+            + "DEBUG"
+            + Fore.YELLOW
+            + " mode"
+        )
+        print(Fore.BLUE + "-=" * 20)
         print(Style.RESET_ALL)
         if console_log:
             console_log_run(app, port, use_reloader)
@@ -111,129 +138,138 @@ def runserver(debug, console_log, use_reloader, address, port, timeout, workers,
     else:
         logging.info(
             "The Gunicorn 'superset runserver' command is deprecated. Please "
-            "use the 'gunicorn' command instead.")
-        addr_str = ' unix:{socket} ' if socket else' {address}:{port} '
+            "use the 'gunicorn' command instead."
+        )
+        addr_str = " unix:{socket} " if socket else " {address}:{port} "
         cmd = (
-            'gunicorn '
-            f'-w {workers} '
-            f'--timeout {timeout} '
-            f'-b {addr_str} '
-            '--limit-request-line 0 '
-            '--limit-request-field_size 0 '
-            'superset:app'
+            "gunicorn "
+            f"-w {workers} "
+            f"--timeout {timeout} "
+            f"-b {addr_str} "
+            "--limit-request-line 0 "
+            "--limit-request-field_size 0 "
+            "superset:app"
         )
-        print(Fore.GREEN + 'Starting server with command: ')
+        print(Fore.GREEN + "Starting server with command: ")
         print(Fore.YELLOW + cmd)
         print(Style.RESET_ALL)
         Popen(cmd, shell=True).wait()
 
 
 @app.cli.command()
-@click.option('--verbose', '-v', is_flag=True, help='Show extra information')
+@click.option("--verbose", "-v", is_flag=True, help="Show extra information")
 def version(verbose):
     """Prints the current version number"""
-    print(Fore.BLUE + '-=' * 15)
-    print(Fore.YELLOW + 'Superset ' + Fore.CYAN + '{version}'.format(
-        version=config.get('VERSION_STRING')))
-    print(Fore.BLUE + '-=' * 15)
+    print(Fore.BLUE + "-=" * 15)
+    print(
+        Fore.YELLOW
+        + "Superset "
+        + Fore.CYAN
+        + "{version}".format(version=config.get("VERSION_STRING"))
+    )
+    print(Fore.BLUE + "-=" * 15)
     if verbose:
-        print('[DB] : ' + '{}'.format(db.engine))
+        print("[DB] : " + "{}".format(db.engine))
     print(Style.RESET_ALL)
 
 
 def load_examples_run(load_test_data):
-    print('Loading examples into {}'.format(db))
+    print("Loading examples into {}".format(db))
 
     data.load_css_templates()
 
-    print('Loading energy related dataset')
+    print("Loading energy related dataset")
     data.load_energy()
 
     print("Loading [World Bank's Health Nutrition and Population Stats]")
     data.load_world_bank_health_n_pop()
 
-    print('Loading [Birth names]')
+    print("Loading [Birth names]")
     data.load_birth_names()
 
-    print('Loading [Unicode test data]')
+    print("Loading [Unicode test data]")
     data.load_unicode_test_data()
 
     if not load_test_data:
-        print('Loading [Random time series data]')
+        print("Loading [Random time series data]")
         data.load_random_time_series_data()
 
-        print('Loading [Random long/lat data]')
+        print("Loading [Random long/lat data]")
         data.load_long_lat_data()
 
-        print('Loading [Country Map data]')
+        print("Loading [Country Map data]")
         data.load_country_map_data()
 
-        print('Loading [Multiformat time series]')
+        print("Loading [Multiformat time series]")
         data.load_multiformat_time_series()
 
-        print('Loading [Paris GeoJson]')
+        print("Loading [Paris GeoJson]")
         data.load_paris_iris_geojson()
 
-        print('Loading [San Francisco population polygons]')
+        print("Loading [San Francisco population polygons]")
         data.load_sf_population_polygons()
 
-        print('Loading [Flights data]')
+        print("Loading [Flights data]")
         data.load_flights()
 
-        print('Loading [BART lines]')
+        print("Loading [BART lines]")
         data.load_bart_lines()
 
-        print('Loading [Multi Line]')
+        print("Loading [Multi Line]")
         data.load_multi_line()
 
-        print('Loading [Misc Charts] dashboard')
+        print("Loading [Misc Charts] dashboard")
         data.load_misc_dashboard()
 
-        print('Loading DECK.gl demo')
+        print("Loading DECK.gl demo")
         data.load_deck_dash()
 
 
 @app.cli.command()
-@click.option('--load-test-data', '-t', is_flag=True, help='Load additional test data')
+@click.option("--load-test-data", "-t", is_flag=True, help="Load additional test data")
 def load_examples(load_test_data):
     """Loads a set of Slices and Dashboards and a supporting dataset """
     load_examples_run(load_test_data)
 
 
 @app.cli.command()
-@click.option('--datasource', '-d', help='Specify which datasource name to load, if '
-                                         'omitted, all datasources will be refreshed')
-@click.option('--merge', '-m', is_flag=True, default=False,
-              help='Specify using \'merge\' property during operation. '
-                   'Default value is False.')
+@click.option(
+    "--datasource",
+    "-d",
+    help="Specify which datasource name to load, if "
+    "omitted, all datasources will be refreshed",
+)
+@click.option(
+    "--merge",
+    "-m",
+    is_flag=True,
+    default=False,
+    help="Specify using 'merge' property during operation. " "Default value is False.",
+)
 def refresh_druid(datasource, merge):
     """Refresh druid datasources"""
     session = db.session()
     from superset.connectors.druid.models import DruidCluster
+
     for cluster in session.query(DruidCluster).all():
         try:
-            cluster.refresh_datasources(datasource_name=datasource,
-                                        merge_flag=merge)
+            cluster.refresh_datasources(datasource_name=datasource, merge_flag=merge)
         except Exception as e:
-            print(
-                "Error while processing cluster '{}'\n{}".format(
-                    cluster, str(e)))
+            print("Error while processing cluster '{}'\n{}".format(cluster, str(e)))
             logging.exception(e)
         cluster.metadata_last_refreshed = datetime.now()
-        print(
-            'Refreshed metadata from cluster '
-            '[' + cluster.cluster_name + ']')
+        print("Refreshed metadata from cluster " "[" + cluster.cluster_name + "]")
     session.commit()
 
 
 @app.cli.command()
 @click.option(
-    '--path', '-p',
-    help='Path to a single JSON file or path containing multiple JSON files'
-         'files to import (*.json)')
-@click.option(
-    '--recursive', '-r',
-    help='recursively search the path for json files')
+    "--path",
+    "-p",
+    help="Path to a single JSON file or path containing multiple JSON files"
+    "files to import (*.json)",
+)
+@click.option("--recursive", "-r", help="recursively search the path for json files")
 def import_dashboards(path, recursive=False):
     """Import dashboards from JSON"""
     p = Path(path)
@@ -241,114 +277,107 @@ def import_dashboards(path, recursive=False):
     if p.is_file():
         files.append(p)
     elif p.exists() and not recursive:
-        files.extend(p.glob('*.json'))
+        files.extend(p.glob("*.json"))
     elif p.exists() and recursive:
-        files.extend(p.rglob('*.json'))
+        files.extend(p.rglob("*.json"))
     for f in files:
-        logging.info('Importing dashboard from file %s', f)
+        logging.info("Importing dashboard from file %s", f)
         try:
             with f.open() as data_stream:
-                dashboard_import_export.import_dashboards(
-                    db.session, data_stream)
+                dashboard_import_export.import_dashboards(db.session, data_stream)
         except Exception as e:
-            logging.error('Error when importing dashboard from file %s', f)
+            logging.error("Error when importing dashboard from file %s", f)
             logging.error(e)
 
 
 @app.cli.command()
 @click.option(
-    '--dashboard-file', '-f', default=None,
-    help='Specify the the file to export to')
-@click.option(
-    '--print_stdout', '-p',
-    help='Print JSON to stdout')
+    "--dashboard-file", "-f", default=None, help="Specify the the file to export to"
+)
+@click.option("--print_stdout", "-p", help="Print JSON to stdout")
 def export_dashboards(print_stdout, dashboard_file):
     """Export dashboards to JSON"""
     data = dashboard_import_export.export_dashboards(db.session)
     if print_stdout or not dashboard_file:
         print(data)
     if dashboard_file:
-        logging.info('Exporting dashboards to %s', dashboard_file)
-        with open(dashboard_file, 'w') as data_stream:
+        logging.info("Exporting dashboards to %s", dashboard_file)
+        with open(dashboard_file, "w") as data_stream:
             data_stream.write(data)
 
 
 @app.cli.command()
 @click.option(
-    '--path', '-p',
-    help='Path to a single YAML file or path containing multiple YAML '
-         'files to import (*.yaml or *.yml)')
-@click.option(
-    '--sync', '-s', 'sync', default='',
-    help='comma seperated list of element types to synchronize '
-         'e.g. "metrics,columns" deletes metrics and columns in the DB '
-         'that are not specified in the YAML file')
+    "--path",
+    "-p",
+    help="Path to a single YAML file or path containing multiple YAML "
+    "files to import (*.yaml or *.yml)",
+)
 @click.option(
-    '--recursive', '-r',
-    help='recursively search the path for yaml files')
+    "--sync",
+    "-s",
+    "sync",
+    default="",
+    help="comma seperated list of element types to synchronize "
+    'e.g. "metrics,columns" deletes metrics and columns in the DB '
+    "that are not specified in the YAML file",
+)
+@click.option("--recursive", "-r", help="recursively search the path for yaml files")
 def import_datasources(path, sync, recursive=False):
     """Import datasources from YAML"""
-    sync_array = sync.split(',')
+    sync_array = sync.split(",")
     p = Path(path)
     files = []
     if p.is_file():
         files.append(p)
     elif p.exists() and not recursive:
-        files.extend(p.glob('*.yaml'))
-        files.extend(p.glob('*.yml'))
+        files.extend(p.glob("*.yaml"))
+        files.extend(p.glob("*.yml"))
     elif p.exists() and recursive:
-        files.extend(p.rglob('*.yaml'))
-        files.extend(p.rglob('*.yml'))
+        files.extend(p.rglob("*.yaml"))
+        files.extend(p.rglob("*.yml"))
     for f in files:
-        logging.info('Importing datasources from file %s', f)
+        logging.info("Importing datasources from file %s", f)
         try:
             with f.open() as data_stream:
                 dict_import_export.import_from_dict(
-                    db.session,
-                    yaml.safe_load(data_stream),
-                    sync=sync_array)
+                    db.session, yaml.safe_load(data_stream), sync=sync_array
+                )
         except Exception as e:
-            logging.error('Error when importing datasources from file %s', f)
+            logging.error("Error when importing datasources from file %s", f)
             logging.error(e)
 
 
 @app.cli.command()
 @click.option(
-    '--datasource-file', '-f', default=None,
-    help='Specify the the file to export to')
-@click.option(
-    '--print_stdout', '-p',
-    help='Print YAML to stdout')
-@click.option(
-    '--back-references', '-b',
-    help='Include parent back references')
-@click.option(
-    '--include-defaults', '-d',
-    help='Include fields containing defaults')
-def export_datasources(print_stdout, datasource_file,
-                       back_references, include_defaults):
+    "--datasource-file", "-f", default=None, help="Specify the the file to export to"
+)
+@click.option("--print_stdout", "-p", help="Print YAML to stdout")
+@click.option("--back-references", "-b", help="Include parent back references")
+@click.option("--include-defaults", "-d", help="Include fields containing defaults")
+def export_datasources(
+    print_stdout, datasource_file, back_references, include_defaults
+):
     """Export datasources to YAML"""
     data = dict_import_export.export_to_dict(
         session=db.session,
         recursive=True,
         back_references=back_references,
-        include_defaults=include_defaults)
+        include_defaults=include_defaults,
+    )
     if print_stdout or not datasource_file:
         yaml.safe_dump(data, stdout, default_flow_style=False)
     if datasource_file:
-        logging.info('Exporting datasources to %s', datasource_file)
-        with open(datasource_file, 'w') as data_stream:
+        logging.info("Exporting datasources to %s", datasource_file)
+        with open(datasource_file, "w") as data_stream:
             yaml.safe_dump(data, data_stream, default_flow_style=False)
 
 
 @app.cli.command()
-@click.option(
-    '--back-references', '-b',
-    help='Include parent back references')
+@click.option("--back-references", "-b", help="Include parent back references")
 def export_datasource_schema(back_references):
     """Export datasource YAML schema to stdout"""
-    data = dict_import_export.export_schema_to_dict(
-        back_references=back_references)
+    data = dict_import_export.export_schema_to_dict(back_references=back_references)
     yaml.safe_dump(data, stdout, default_flow_style=False)
 
 
@@ -356,47 +385,49 @@ def export_datasource_schema(back_references):
 def update_datasources_cache():
     """Refresh sqllab datasources cache"""
     from superset.models.core import Database
+
     for database in db.session.query(Database).all():
         if database.allow_multi_schema_metadata_fetch:
-            print('Fetching {} datasources ...'.format(database.name))
+            print("Fetching {} datasources ...".format(database.name))
             try:
                 database.all_table_names_in_database(
-                    force=True, cache=True, cache_timeout=24 * 60 * 60)
+                    force=True, cache=True, cache_timeout=24 * 60 * 60
+                )
                 database.all_view_names_in_database(
-                    force=True, cache=True, cache_timeout=24 * 60 * 60)
+                    force=True, cache=True, cache_timeout=24 * 60 * 60
+                )
             except Exception as e:
-                print('{}'.format(str(e)))
+                print("{}".format(str(e)))
 
 
 @app.cli.command()
 @click.option(
-    '--workers', '-w',
-    type=int,
-    help='Number of celery server workers to fire up')
+    "--workers", "-w", type=int, help="Number of celery server workers to fire up"
+)
 def worker(workers):
     """Starts a Superset worker for async SQL query execution."""
     logging.info(
         "The 'superset worker' command is deprecated. Please use the 'celery "
-        "worker' command instead.")
+        "worker' command instead."
+    )
     if workers:
         celery_app.conf.update(CELERYD_CONCURRENCY=workers)
-    elif config.get('SUPERSET_CELERY_WORKERS'):
+    elif config.get("SUPERSET_CELERY_WORKERS"):
         celery_app.conf.update(
-            CELERYD_CONCURRENCY=config.get('SUPERSET_CELERY_WORKERS'))
+            CELERYD_CONCURRENCY=config.get("SUPERSET_CELERY_WORKERS")
+        )
 
-    worker = celery_app.Worker(optimization='fair')
+    worker = celery_app.Worker(optimization="fair")
     worker.start()
 
 
 @app.cli.command()
 @click.option(
-    '-p', '--port',
-    default='5555',
-    help='Port on which to start the Flower process')
+    "-p", "--port", default="5555", help="Port on which to start the Flower process"
+)
 @click.option(
-    '-a', '--address',
-    default='localhost',
-    help='Address on which to run the service')
+    "-a", "--address", default="localhost", help="Address on which to run the service"
+)
 def flower(port, address):
     """Runs a Celery Flower web server
 
@@ -404,18 +435,19 @@ def flower(port, address):
     broker"""
     BROKER_URL = celery_app.conf.BROKER_URL
     cmd = (
-        'celery flower '
-        f'--broker={BROKER_URL} '
-        f'--port={port} '
-        f'--address={address} '
+        "celery flower "
+        f"--broker={BROKER_URL} "
+        f"--port={port} "
+        f"--address={address} "
     )
     logging.info(
         "The 'superset flower' command is deprecated. Please use the 'celery "
-        "flower' command instead.")
-    print(Fore.GREEN + 'Starting a Celery Flower instance')
-    print(Fore.BLUE + '-=' * 40)
+        "flower' command instead."
+    )
+    print(Fore.GREEN + "Starting a Celery Flower instance")
+    print(Fore.BLUE + "-=" * 40)
     print(Fore.YELLOW + cmd)
-    print(Fore.BLUE + '-=' * 40)
+    print(Fore.BLUE + "-=" * 40)
     Popen(cmd, shell=True).wait()
 
 
@@ -426,7 +458,7 @@ def load_test_users():
 
     Syncs permissions for those users/roles
     """
-    print(Fore.GREEN + 'Loading a set of users for unit tests')
+    print(Fore.GREEN + "Loading a set of users for unit tests")
     load_test_users_run()
 
 
@@ -436,51 +468,73 @@ def load_test_users_run():
 
     Syncs permissions for those users/roles
     """
-    if config.get('TESTING'):
+    if config.get("TESTING"):
         security_manager.sync_role_definitions()
-        gamma_sqllab_role = security_manager.add_role('gamma_sqllab')
-        for perm in security_manager.find_role('Gamma').permissions:
+        gamma_sqllab_role = security_manager.add_role("gamma_sqllab")
+        for perm in security_manager.find_role("Gamma").permissions:
             security_manager.add_permission_role(gamma_sqllab_role, perm)
         utils.get_or_create_main_db()
         db_perm = utils.get_main_database(security_manager.get_session).perm
-        security_manager.merge_perm('database_access', db_perm)
+        security_manager.merge_perm("database_access", db_perm)
         db_pvm = security_manager.find_permission_view_menu(
-            view_menu_name=db_perm, permission_name='database_access')
+            view_menu_name=db_perm, permission_name="database_access"
+        )
         gamma_sqllab_role.permissions.append(db_pvm)
-        for perm in security_manager.find_role('sql_lab').permissions:
+        for perm in security_manager.find_role("sql_lab").permissions:
             security_manager.add_permission_role(gamma_sqllab_role, perm)
 
-        admin = security_manager.find_user('admin')
+        admin = security_manager.find_user("admin")
         if not admin:
             security_manager.add_user(
-                'admin', 'admin', ' user', 'admin@fab.org',
-                security_manager.find_role('Admin'),
-                password='general')
-
-        gamma = security_manager.find_user('gamma')
+                "admin",
+                "admin",
+                " user",
+                "admin@fab.org",
+                security_manager.find_role("Admin"),
+                password="general",
+            )
+
+        gamma = security_manager.find_user("gamma")
         if not gamma:
             security_manager.add_user(
-                'gamma', 'gamma', 'user', 'gamma@fab.org',
-                security_manager.find_role('Gamma'),
-                password='general')
-
-        gamma2 = security_manager.find_user('gamma2')
+                "gamma",
+                "gamma",
+                "user",
+                "gamma@fab.org",
+                security_manager.find_role("Gamma"),
+                password="general",
+            )
+
+        gamma2 = security_manager.find_user("gamma2")
         if not gamma2:
             security_manager.add_user(
-                'gamma2', 'gamma2', 'user', 'gamma2@fab.org',
-                security_manager.find_role('Gamma'),
-                password='general')
-
-        gamma_sqllab_user = security_manager.find_user('gamma_sqllab')
+                "gamma2",
+                "gamma2",
+                "user",
+                "gamma2@fab.org",
+                security_manager.find_role("Gamma"),
+                password="general",
+            )
+
+        gamma_sqllab_user = security_manager.find_user("gamma_sqllab")
         if not gamma_sqllab_user:
             security_manager.add_user(
-                'gamma_sqllab', 'gamma_sqllab', 'user', 'gamma_sqllab@fab.org',
-                gamma_sqllab_role, password='general')
-
-        alpha = security_manager.find_user('alpha')
+                "gamma_sqllab",
+                "gamma_sqllab",
+                "user",
+                "gamma_sqllab@fab.org",
+                gamma_sqllab_role,
+                password="general",
+            )
+
+        alpha = security_manager.find_user("alpha")
         if not alpha:
             security_manager.add_user(
-                'alpha', 'alpha', 'user', 'alpha@fab.org',
-                security_manager.find_role('Alpha'),
-                password='general')
+                "alpha",
+                "alpha",
+                "user",
+                "alpha@fab.org",
+                security_manager.find_role("Alpha"),
+                password="general",
+            )
         security_manager.get_session.commit()
diff --git a/superset/common/query_context.py b/superset/common/query_context.py
index 21b0dac..264b94c 100644
--- a/superset/common/query_context.py
+++ b/superset/common/query_context.py
@@ -11,16 +11,13 @@ class QueryContext:
     The query context contains the query object and additional fields necessary
     to retrieve the data payload for a given viz.
     """
+
     # TODO: Type datasource and query_object dictionary with TypedDict when it becomes
     # a vanilla python type https://github.com/python/mypy/issues/5288
-    def __init__(
-            self,
-            datasource: Dict,
-            queries: List[Dict],
-    ):
-        self.datasource = ConnectorRegistry.get_datasource(datasource.get('type'),
-                                                           int(datasource.get('id')),
-                                                           db.session)
+    def __init__(self, datasource: Dict, queries: List[Dict]):
+        self.datasource = ConnectorRegistry.get_datasource(
+            datasource.get("type"), int(datasource.get("id")), db.session
+        )
         self.queries = list(map(lambda query_obj: QueryObject(**query_obj), queries))
 
     def get_data(self):
diff --git a/superset/common/query_object.py b/superset/common/query_object.py
index 8116d26..aef88de 100644
--- a/superset/common/query_object.py
+++ b/superset/common/query_object.py
@@ -14,20 +14,21 @@ class QueryObject:
     The query object's schema matches the interfaces of DB connectors like sqla
     and druid. The query objects are constructed on the client.
     """
+
     def __init__(
-            self,
-            granularity: str,
-            groupby: List[str] = None,
-            metrics: List[Metric] = None,
-            filters: List[str] = None,
-            time_range: Optional[str] = None,
-            time_shift: Optional[str] = None,
-            is_timeseries: bool = False,
-            row_limit: int = app.config.get('ROW_LIMIT'),
-            limit: int = 0,
-            timeseries_limit_metric: Optional[Metric] = None,
-            order_desc: bool = True,
-            extras: Optional[Dict] = None,
+        self,
+        granularity: str,
+        groupby: List[str] = None,
+        metrics: List[Metric] = None,
+        filters: List[str] = None,
+        time_range: Optional[str] = None,
+        time_shift: Optional[str] = None,
+        is_timeseries: bool = False,
+        row_limit: int = app.config.get("ROW_LIMIT"),
+        limit: int = 0,
+        timeseries_limit_metric: Optional[Metric] = None,
+        order_desc: bool = True,
+        extras: Optional[Dict] = None,
     ):
         self.granularity = granularity
         self.from_dttm, self.to_dttm = utils.get_since_until(time_range, time_shift)
diff --git a/superset/config.py b/superset/config.py
index 1613e75..48ee2a3 100644
--- a/superset/config.py
+++ b/superset/config.py
@@ -20,18 +20,18 @@ from superset.stats_logger import DummyStatsLogger
 STATS_LOGGER = DummyStatsLogger()
 
 BASE_DIR = os.path.abspath(os.path.dirname(__file__))
-if 'SUPERSET_HOME' in os.environ:
-    DATA_DIR = os.environ['SUPERSET_HOME']
+if "SUPERSET_HOME" in os.environ:
+    DATA_DIR = os.environ["SUPERSET_HOME"]
 else:
-    DATA_DIR = os.path.join(os.path.expanduser('~'), '.superset')
+    DATA_DIR = os.path.join(os.path.expanduser("~"), ".superset")
 
 # ---------------------------------------------------------
 # Superset specific config
 # ---------------------------------------------------------
-PACKAGE_DIR = os.path.join(BASE_DIR, 'static', 'assets')
-PACKAGE_FILE = os.path.join(PACKAGE_DIR, 'package.json')
+PACKAGE_DIR = os.path.join(BASE_DIR, "static", "assets")
+PACKAGE_FILE = os.path.join(PACKAGE_DIR, "package.json")
 with open(PACKAGE_FILE) as package_file:
-    VERSION_STRING = json.load(package_file)['version']
+    VERSION_STRING = json.load(package_file)["version"]
 
 ROW_LIMIT = 50000
 VIZ_ROW_LIMIT = 10000
@@ -40,7 +40,7 @@ FILTER_SELECT_ROW_LIMIT = 10000
 SUPERSET_WORKERS = 2  # deprecated
 SUPERSET_CELERY_WORKERS = 32  # deprecated
 
-SUPERSET_WEBSERVER_ADDRESS = '0.0.0.0'
+SUPERSET_WEBSERVER_ADDRESS = "0.0.0.0"
 SUPERSET_WEBSERVER_PORT = 8088
 SUPERSET_WEBSERVER_TIMEOUT = 60  # deprecated
 SUPERSET_DASHBOARD_POSITION_DATA_LIMIT = 65535
@@ -50,10 +50,10 @@ SQLALCHEMY_TRACK_MODIFICATIONS = False
 # ---------------------------------------------------------
 
 # Your App secret key
-SECRET_KEY = '\2\1thisismyscretkey\1\2\e\y\y\h'  # noqa
+SECRET_KEY = "\2\1thisismyscretkey\1\2\e\y\y\h"  # noqa
 
 # The SQLAlchemy connection string.
-SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(DATA_DIR, 'superset.db')
+SQLALCHEMY_DATABASE_URI = "sqlite:///" + os.path.join(DATA_DIR, "superset.db")
 # SQLALCHEMY_DATABASE_URI = 'mysql://myapp@localhost/myapp'
 # SQLALCHEMY_DATABASE_URI = 'postgresql://root:password@localhost/myapp'
 
@@ -89,10 +89,10 @@ ENABLE_PROXY_FIX = False
 # GLOBALS FOR APP Builder
 # ------------------------------
 # Uncomment to setup Your App name
-APP_NAME = 'Superset'
+APP_NAME = "Superset"
 
 # Uncomment to setup an App icon
-APP_ICON = '/static/assets/images/superset-logo@2x.png'
+APP_ICON = "/static/assets/images/superset-logo@2x.png"
 
 # Druid query timezone
 # tz.tzutc() : Using utc timezone
@@ -103,7 +103,7 @@ APP_ICON = '/static/assets/images/superset-logo@2x.png'
 # other tz can be overridden by providing a local_config
 DRUID_IS_ACTIVE = True
 DRUID_TZ = tz.tzutc()
-DRUID_ANALYSIS_TYPES = ['cardinality']
+DRUID_ANALYSIS_TYPES = ["cardinality"]
 
 # ----------------------------------------------------
 # AUTHENTICATION CONFIG
@@ -149,20 +149,20 @@ PUBLIC_ROLE_LIKE_GAMMA = False
 # Babel config for translations
 # ---------------------------------------------------
 # Setup default language
-BABEL_DEFAULT_LOCALE = 'en'
+BABEL_DEFAULT_LOCALE = "en"
 # Your application default translation path
-BABEL_DEFAULT_FOLDER = 'superset/translations'
+BABEL_DEFAULT_FOLDER = "superset/translations"
 # The allowed translation for you app
 LANGUAGES = {
-    'en': {'flag': 'us', 'name': 'English'},
-    'it': {'flag': 'it', 'name': 'Italian'},
-    'fr': {'flag': 'fr', 'name': 'French'},
-    'zh': {'flag': 'cn', 'name': 'Chinese'},
-    'ja': {'flag': 'jp', 'name': 'Japanese'},
-    'de': {'flag': 'de', 'name': 'German'},
-    'pt': {'flag': 'pt', 'name': 'Portuguese'},
-    'pt_BR': {'flag': 'br', 'name': 'Brazilian Portuguese'},
-    'ru': {'flag': 'ru', 'name': 'Russian'},
+    "en": {"flag": "us", "name": "English"},
+    "it": {"flag": "it", "name": "Italian"},
+    "fr": {"flag": "fr", "name": "French"},
+    "zh": {"flag": "cn", "name": "Chinese"},
+    "ja": {"flag": "jp", "name": "Japanese"},
+    "de": {"flag": "de", "name": "German"},
+    "pt": {"flag": "pt", "name": "Portuguese"},
+    "pt_BR": {"flag": "br", "name": "Brazilian Portuguese"},
+    "ru": {"flag": "ru", "name": "Russian"},
 }
 
 # ---------------------------------------------------
@@ -176,19 +176,19 @@ FEATURE_FLAGS = {}
 # Image and file configuration
 # ---------------------------------------------------
 # The file upload folder, when using models with files
-UPLOAD_FOLDER = BASE_DIR + '/app/static/uploads/'
+UPLOAD_FOLDER = BASE_DIR + "/app/static/uploads/"
 
 # The image upload folder, when using models with images
-IMG_UPLOAD_FOLDER = BASE_DIR + '/app/static/uploads/'
+IMG_UPLOAD_FOLDER = BASE_DIR + "/app/static/uploads/"
 
 # The image upload url, when using models with images
-IMG_UPLOAD_URL = '/static/uploads/'
+IMG_UPLOAD_URL = "/static/uploads/"
 # Setup image size default is (300, 200, True)
 # IMG_SIZE = (300, 200, True)
 
 CACHE_DEFAULT_TIMEOUT = 60 * 60 * 24
-CACHE_CONFIG = {'CACHE_TYPE': 'null'}
-TABLE_NAMES_CACHE_CONFIG = {'CACHE_TYPE': 'null'}
+CACHE_CONFIG = {"CACHE_TYPE": "null"}
+TABLE_NAMES_CACHE_CONFIG = {"CACHE_TYPE": "null"}
 
 # CORS Options
 ENABLE_CORS = False
@@ -203,13 +203,11 @@ SUPERSET_WEBSERVER_DOMAINS = None
 
 # Allowed format types for upload on Database view
 # TODO: Add processing of other spreadsheet formats (xls, xlsx etc)
-ALLOWED_EXTENSIONS = set(['csv'])
+ALLOWED_EXTENSIONS = set(["csv"])
 
 # CSV Options: key/value pairs that will be passed as argument to DataFrame.to_csv method
 # note: index option should not be overridden
-CSV_EXPORT = {
-    'encoding': 'utf-8',
-}
+CSV_EXPORT = {"encoding": "utf-8"}
 
 # ---------------------------------------------------
 # Time grain configurations
@@ -252,10 +250,12 @@ DRUID_DATA_SOURCE_BLACKLIST = []
 # --------------------------------------------------
 # Modules, datasources and middleware to be registered
 # --------------------------------------------------
-DEFAULT_MODULE_DS_MAP = OrderedDict([
-    ('superset.connectors.sqla.models', ['SqlaTable']),
-    ('superset.connectors.druid.models', ['DruidDatasource']),
-])
+DEFAULT_MODULE_DS_MAP = OrderedDict(
+    [
+        ("superset.connectors.sqla.models", ["SqlaTable"]),
+        ("superset.connectors.druid.models", ["DruidDatasource"]),
+    ]
+)
 ADDITIONAL_MODULE_DS_MAP = {}
 ADDITIONAL_MIDDLEWARE = []
 
@@ -266,8 +266,8 @@ ADDITIONAL_MIDDLEWARE = []
 
 # Console Log Settings
 
-LOG_FORMAT = '%(asctime)s:%(levelname)s:%(name)s:%(message)s'
-LOG_LEVEL = 'DEBUG'
+LOG_FORMAT = "%(asctime)s:%(levelname)s:%(name)s:%(message)s"
+LOG_LEVEL = "DEBUG"
 
 # ---------------------------------------------------
 # Enable Time Rotate Log Handler
@@ -275,9 +275,9 @@ LOG_LEVEL = 'DEBUG'
 # LOG_LEVEL = DEBUG, INFO, WARNING, ERROR, CRITICAL
 
 ENABLE_TIME_ROTATE = False
-TIME_ROTATE_LOG_LEVEL = 'DEBUG'
-FILENAME = os.path.join(DATA_DIR, 'superset.log')
-ROLLOVER = 'midnight'
+TIME_ROTATE_LOG_LEVEL = "DEBUG"
+FILENAME = os.path.join(DATA_DIR, "superset.log")
+ROLLOVER = "midnight"
 INTERVAL = 1
 BACKUP_COUNT = 30
 
@@ -288,7 +288,7 @@ BACKUP_COUNT = 30
 #     pass
 
 # Set this API key to enable Mapbox visualizations
-MAPBOX_API_KEY = os.environ.get('MAPBOX_API_KEY', '')
+MAPBOX_API_KEY = os.environ.get("MAPBOX_API_KEY", "")
 
 # Maximum number of rows returned from a database
 # in async mode, no more than SQL_MAX_ROW will be returned and stored
@@ -326,7 +326,7 @@ CELERY_CONFIG = None
 # static http headers to be served by your Superset server.
 # This header prevents iFrames from other domains and
 # "clickjacking" as a result
-HTTP_HEADERS = {'X-Frame-Options': 'SAMEORIGIN'}
+HTTP_HEADERS = {"X-Frame-Options": "SAMEORIGIN"}
 # If you need to allow iframes from other domains (and are
 # aware of the risks), you can disable this header:
 # HTTP_HEADERS = {}
@@ -355,7 +355,7 @@ CSV_TO_HIVE_UPLOAD_S3_BUCKET = None
 
 # The directory within the bucket specified above that will
 # contain all the external tables
-CSV_TO_HIVE_UPLOAD_DIRECTORY = 'EXTERNAL_HIVE_TABLES/'
+CSV_TO_HIVE_UPLOAD_DIRECTORY = "EXTERNAL_HIVE_TABLES/"
 
 # The namespace within hive where the tables created from
 # uploading CSVs will be stored.
@@ -369,9 +369,9 @@ JINJA_CONTEXT_ADDONS = {}
 
 # Roles that are controlled by the API / Superset and should not be changes
 # by humans.
-ROBOT_PERMISSION_ROLES = ['Public', 'Gamma', 'Alpha', 'Admin', 'sql_lab']
+ROBOT_PERMISSION_ROLES = ["Public", "Gamma", "Alpha", "Admin", "sql_lab"]
 
-CONFIG_PATH_ENV_VAR = 'SUPERSET_CONFIG_PATH'
+CONFIG_PATH_ENV_VAR = "SUPERSET_CONFIG_PATH"
 
 # If a callable is specified, it will be called at app startup while passing
 # a reference to the Flask app. This can be used to alter the Flask app
@@ -385,16 +385,16 @@ ENABLE_ACCESS_REQUEST = False
 
 # smtp server configuration
 EMAIL_NOTIFICATIONS = False  # all the emails are sent using dryrun
-SMTP_HOST = 'localhost'
+SMTP_HOST = "localhost"
 SMTP_STARTTLS = True
 SMTP_SSL = False
-SMTP_USER = 'superset'
+SMTP_USER = "superset"
 SMTP_PORT = 25
-SMTP_PASSWORD = 'superset'
-SMTP_MAIL_FROM = 'superset@superset.com'
+SMTP_PASSWORD = "superset"
+SMTP_MAIL_FROM = "superset@superset.com"
 
 if not CACHE_DEFAULT_TIMEOUT:
-    CACHE_DEFAULT_TIMEOUT = CACHE_CONFIG.get('CACHE_DEFAULT_TIMEOUT')
+    CACHE_DEFAULT_TIMEOUT = CACHE_CONFIG.get("CACHE_DEFAULT_TIMEOUT")
 
 # Whether to bump the logging level to ERRROR on the flask_appbiulder package
 # Set to False if/when debugging FAB related issues like
@@ -403,14 +403,14 @@ SILENCE_FAB = True
 
 # The link to a page containing common errors and their resolutions
 # It will be appended at the bottom of sql_lab errors.
-TROUBLESHOOTING_LINK = ''
+TROUBLESHOOTING_LINK = ""
 
 # CSRF token timeout, set to None for a token that never expires
 WTF_CSRF_TIME_LIMIT = 60 * 60 * 24 * 7
 
 # This link should lead to a page with instructions on how to gain access to a
 # Datasource. It will be placed at the bottom of permissions errors.
-PERMISSION_INSTRUCTIONS_LINK = ''
+PERMISSION_INSTRUCTIONS_LINK = ""
 
 # Integrate external Blueprints to the app by passing them to your
 # configuration. These blueprints will get integrated in the app
@@ -467,12 +467,15 @@ try:
     if CONFIG_PATH_ENV_VAR in os.environ:
         # Explicitly import config module that is not in pythonpath; useful
         # for case where app is being executed via pex.
-        print('Loaded your LOCAL configuration at [{}]'.format(
-            os.environ[CONFIG_PATH_ENV_VAR]))
+        print(
+            "Loaded your LOCAL configuration at [{}]".format(
+                os.environ[CONFIG_PATH_ENV_VAR]
+            )
+        )
         module = sys.modules[__name__]
         override_conf = imp.load_source(
-            'superset_config',
-            os.environ[CONFIG_PATH_ENV_VAR])
+            "superset_config", os.environ[CONFIG_PATH_ENV_VAR]
+        )
         for key in dir(override_conf):
             if key.isupper():
                 setattr(module, key, getattr(override_conf, key))
@@ -480,7 +483,9 @@ try:
     else:
         from superset_config import *  # noqa
         import superset_config
-        print('Loaded your LOCAL configuration at [{}]'.format(
-            superset_config.__file__))
+
+        print(
+            "Loaded your LOCAL configuration at [{}]".format(superset_config.__file__)
+        )
 except ImportError:
     pass
diff --git a/superset/connectors/base/models.py b/superset/connectors/base/models.py
index 216ed9e..51e2523 100644
--- a/superset/connectors/base/models.py
+++ b/superset/connectors/base/models.py
@@ -2,9 +2,7 @@
 import json
 
 from past.builtins import basestring
-from sqlalchemy import (
-    and_, Boolean, Column, Integer, String, Text,
-)
+from sqlalchemy import and_, Boolean, Column, Integer, String, Text
 from sqlalchemy.ext.declarative import declared_attr
 from sqlalchemy.orm import foreign, relationship
 
@@ -51,7 +49,7 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
     @declared_attr
     def slices(self):
         return relationship(
-            'Slice',
+            "Slice",
             primaryjoin=lambda: and_(
                 foreign(Slice.datasource_id) == self.id,
                 foreign(Slice.datasource_type) == self.type,
@@ -66,7 +64,7 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
     @property
     def uid(self):
         """Unique id across datasource types"""
-        return f'{self.id}__{self.type}'
+        return f"{self.id}__{self.type}"
 
     @property
     def column_names(self):
@@ -78,7 +76,7 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
 
     @property
     def main_dttm_col(self):
-        return 'timestamp'
+        return "timestamp"
 
     @property
     def datasource_name(self):
@@ -108,22 +106,18 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
 
     @property
     def url(self):
-        return '/{}/edit/{}'.format(self.baselink, self.id)
+        return "/{}/edit/{}".format(self.baselink, self.id)
 
     @property
     def explore_url(self):
         if self.default_endpoint:
             return self.default_endpoint
         else:
-            return '/superset/explore/{obj.type}/{obj.id}/'.format(obj=self)
+            return "/superset/explore/{obj.type}/{obj.id}/".format(obj=self)
 
     @property
     def column_formats(self):
-        return {
-            m.metric_name: m.d3format
-            for m in self.metrics
-            if m.d3format
-        }
+        return {m.metric_name: m.d3format for m in self.metrics if m.d3format}
 
     def add_missing_metrics(self, metrics):
         exisiting_metrics = {m.metric_name for m in self.metrics}
@@ -136,22 +130,24 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
     def metrics_combo(self):
         return sorted(
             [
-                (m.metric_name, m.verbose_name or m.metric_name or '')
-                for m in self.metrics],
-            key=lambda x: x[1])
+                (m.metric_name, m.verbose_name or m.metric_name or "")
+                for m in self.metrics
+            ],
+            key=lambda x: x[1],
+        )
 
     @property
     def short_data(self):
         """Data representation of the datasource sent to the frontend"""
         return {
-            'edit_url': self.url,
-            'id': self.id,
-            'uid': self.uid,
-            'schema': self.schema,
-            'name': self.name,
-            'type': self.type,
-            'connection': self.connection,
-            'creator': str(self.created_by),
+            "edit_url": self.url,
+            "id": self.id,
+            "uid": self.uid,
+            "schema": self.schema,
+            "name": self.name,
+            "type": self.type,
+            "connection": self.connection,
+            "creator": str(self.created_by),
         }
 
     @property
@@ -163,69 +159,67 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
         """Data representation of the datasource sent to the frontend"""
         order_by_choices = []
         for s in sorted(self.column_names):
-            order_by_choices.append((json.dumps([s, True]), s + ' [asc]'))
-            order_by_choices.append((json.dumps([s, False]), s + ' [desc]'))
-
-        verbose_map = {'__timestamp': 'Time'}
-        verbose_map.update({
-            o.metric_name: o.verbose_name or o.metric_name
-            for o in self.metrics
-        })
-        verbose_map.update({
-            o.column_name: o.verbose_name or o.column_name
-            for o in self.columns
-        })
+            order_by_choices.append((json.dumps([s, True]), s + " [asc]"))
+            order_by_choices.append((json.dumps([s, False]), s + " [desc]"))
+
+        verbose_map = {"__timestamp": "Time"}
+        verbose_map.update(
+            {o.metric_name: o.verbose_name or o.metric_name for o in self.metrics}
+        )
+        verbose_map.update(
+            {o.column_name: o.verbose_name or o.column_name for o in self.columns}
+        )
         return {
             # simple fields
-            'id': self.id,
-            'column_formats': self.column_formats,
-            'description': self.description,
-            'database': self.database.data,  # pylint: disable=no-member
-            'default_endpoint': self.default_endpoint,
-            'filter_select': self.filter_select_enabled,  # TODO deprecate
-            'filter_select_enabled': self.filter_select_enabled,
-            'name': self.name,
-            'datasource_name': self.datasource_name,
-            'type': self.type,
-            'schema': self.schema,
-            'offset': self.offset,
-            'cache_timeout': self.cache_timeout,
-            'params': self.params,
-            'perm': self.perm,
-
+            "id": self.id,
+            "column_formats": self.column_formats,
+            "description": self.description,
+            "database": self.database.data,  # pylint: disable=no-member
+            "default_endpoint": self.default_endpoint,
+            "filter_select": self.filter_select_enabled,  # TODO deprecate
+            "filter_select_enabled": self.filter_select_enabled,
+            "name": self.name,
+            "datasource_name": self.datasource_name,
+            "type": self.type,
+            "schema": self.schema,
+            "offset": self.offset,
+            "cache_timeout": self.cache_timeout,
+            "params": self.params,
+            "perm": self.perm,
             # sqla-specific
-            'sql': self.sql,
-
+            "sql": self.sql,
             # computed fields
-            'all_cols': utils.choicify(self.column_names),
-            'columns': [o.data for o in self.columns],
-            'edit_url': self.url,
-            'filterable_cols': utils.choicify(self.filterable_column_names),
-            'gb_cols': utils.choicify(self.groupby_column_names),
-            'metrics': [o.data for o in self.metrics],
-            'metrics_combo': self.metrics_combo,
-            'order_by_choices': order_by_choices,
-            'owner': self.owner.id if self.owner else None,
-            'verbose_map': verbose_map,
-            'select_star': self.select_star,
+            "all_cols": utils.choicify(self.column_names),
+            "columns": [o.data for o in self.columns],
+            "edit_url": self.url,
+            "filterable_cols": utils.choicify(self.filterable_column_names),
+            "gb_cols": utils.choicify(self.groupby_column_names),
+            "metrics": [o.data for o in self.metrics],
+            "metrics_combo": self.metrics_combo,
+            "order_by_choices": order_by_choices,
+            "owner": self.owner.id if self.owner else None,
+            "verbose_map": verbose_map,
+            "select_star": self.select_star,
         }
 
     @staticmethod
     def filter_values_handler(
-            values, target_column_is_numeric=False, is_list_target=False):
+        values, target_column_is_numeric=False, is_list_target=False
+    ):
         def handle_single_value(v):
             # backward compatibility with previous <select> components
             if isinstance(v, basestring):
-                v = v.strip('\t\n \'"')
+                v = v.strip("\t\n '\"")
                 if target_column_is_numeric:
                     # For backwards compatibility and edge cases
                     # where a column data type might have changed
                     v = utils.string_to_num(v)
-                if v == '<NULL>':
+                if v == "<NULL>":
                     return None
-                elif v == '<empty string>':
-                    return ''
+                elif v == "<empty string>":
+                    return ""
             return v
+
         if isinstance(values, (list, tuple)):
             values = [handle_single_value(v) for v in values]
         else:
@@ -274,8 +268,7 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
             if col.column_name == column_name:
                 return col
 
-    def get_fk_many_from_list(
-            self, object_list, fkmany, fkmany_class, key_attr):
+    def get_fk_many_from_list(self, object_list, fkmany, fkmany_class, key_attr):
         """Update ORM one-to-many list from object list
 
         Used for syncing metrics and columns using the same code"""
@@ -298,13 +291,10 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
         for obj in object_list:
             key = obj.get(key_attr)
             if key not in orm_keys:
-                del obj['id']
+                del obj["id"]
                 orm_kwargs = {}
                 for k in obj:
-                    if (
-                        k in fkmany_class.update_from_object_fields and
-                        k in obj
-                    ):
+                    if k in fkmany_class.update_from_object_fields and k in obj:
                         orm_kwargs[k] = obj[k]
                 new_obj = fkmany_class(**orm_kwargs)
                 new_fks.append(new_obj)
@@ -325,16 +315,18 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
         for attr in self.update_from_object_fields:
             setattr(self, attr, obj.get(attr))
 
-        self.user_id = obj.get('owner')
+        self.user_id = obj.get("owner")
 
         # Syncing metrics
         metrics = self.get_fk_many_from_list(
-            obj.get('metrics'), self.metrics, self.metric_class, 'metric_name')
+            obj.get("metrics"), self.metrics, self.metric_class, "metric_name"
+        )
         self.metrics = metrics
 
         # Syncing columns
         self.columns = self.get_fk_many_from_list(
-            obj.get('columns'), self.columns, self.column_class, 'column_name')
+            obj.get("columns"), self.columns, self.column_class, "column_name"
+        )
 
 
 class BaseColumn(AuditMixinNullable, ImportMixin):
@@ -364,32 +356,30 @@ class BaseColumn(AuditMixinNullable, ImportMixin):
         return self.column_name
 
     num_types = (
-        'DOUBLE', 'FLOAT', 'INT', 'BIGINT',
-        'LONG', 'REAL', 'NUMERIC', 'DECIMAL', 'MONEY',
+        "DOUBLE",
+        "FLOAT",
+        "INT",
+        "BIGINT",
+        "LONG",
+        "REAL",
+        "NUMERIC",
+        "DECIMAL",
+        "MONEY",
     )
-    date_types = ('DATE', 'TIME', 'DATETIME')
-    str_types = ('VARCHAR', 'STRING', 'CHAR')
+    date_types = ("DATE", "TIME", "DATETIME")
+    str_types = ("VARCHAR", "STRING", "CHAR")
 
     @property
     def is_num(self):
-        return (
-            self.type and
-            any([t in self.type.upper() for t in self.num_types])
-        )
+        return self.type and any([t in self.type.upper() for t in self.num_types])
 
     @property
     def is_time(self):
-        return (
-            self.type and
-            any([t in self.type.upper() for t in self.date_types])
-        )
+        return self.type and any([t in self.type.upper() for t in self.date_types])
 
     @property
     def is_string(self):
-        return (
-            self.type and
-            any([t in self.type.upper() for t in self.str_types])
-        )
+        return self.type and any([t in self.type.upper() for t in self.str_types])
 
     @property
     def expression(self):
@@ -398,9 +388,17 @@ class BaseColumn(AuditMixinNullable, ImportMixin):
     @property
     def data(self):
         attrs = (
-            'id', 'column_name', 'verbose_name', 'description', 'expression',
-            'filterable', 'groupby', 'is_dttm', 'type',
-            'database_expression', 'python_date_format',
+            "id",
+            "column_name",
+            "verbose_name",
+            "description",
+            "expression",
+            "filterable",
+            "groupby",
+            "is_dttm",
+            "type",
+            "database_expression",
+            "python_date_format",
         )
         return {s: getattr(self, s) for s in attrs if hasattr(self, s)}
 
@@ -433,6 +431,7 @@ class BaseMetric(AuditMixinNullable, ImportMixin):
         backref=backref('metrics', cascade='all, delete-orphan'),
         enable_typechecks=False)
     """
+
     @property
     def perm(self):
         raise NotImplementedError()
@@ -444,6 +443,12 @@ class BaseMetric(AuditMixinNullable, ImportMixin):
     @property
     def data(self):
         attrs = (
-            'id', 'metric_name', 'verbose_name', 'description', 'expression',
-            'warning_text', 'd3format')
+            "id",
+            "metric_name",
+            "verbose_name",
+            "description",
+            "expression",
+            "warning_text",
+            "d3format",
+        )
         return {s: getattr(self, s) for s in attrs}
diff --git a/superset/connectors/base/views.py b/superset/connectors/base/views.py
index a77177e..15acfe0 100644
--- a/superset/connectors/base/views.py
+++ b/superset/connectors/base/views.py
@@ -8,7 +8,10 @@ from superset.views.base import SupersetModelView
 class DatasourceModelView(SupersetModelView):
     def pre_delete(self, obj):
         if obj.slices:
-            raise SupersetException(Markup(
-                'Cannot delete a datasource that has slices attached to it.'
-                "Here's the list of associated charts: " +
-                ''.join([o.slice_link for o in obj.slices])))
+            raise SupersetException(
+                Markup(
+                    "Cannot delete a datasource that has slices attached to it."
+                    "Here's the list of associated charts: "
+                    + "".join([o.slice_link for o in obj.slices])
+                )
+            )
diff --git a/superset/connectors/connector_registry.py b/superset/connectors/connector_registry.py
index 258d2f5..626986a 100644
--- a/superset/connectors/connector_registry.py
+++ b/superset/connectors/connector_registry.py
@@ -35,15 +35,21 @@ class ConnectorRegistry(object):
         return datasources
 
     @classmethod
-    def get_datasource_by_name(cls, session, datasource_type, datasource_name,
-                               schema, database_name):
+    def get_datasource_by_name(
+        cls, session, datasource_type, datasource_name, schema, database_name
+    ):
         datasource_class = ConnectorRegistry.sources[datasource_type]
         datasources = session.query(datasource_class).all()
 
         # Filter datasoures that don't have database.
-        db_ds = [d for d in datasources if d.database and
-                 d.database.name == database_name and
-                 d.name == datasource_name and schema == schema]
+        db_ds = [
+            d
+            for d in datasources
+            if d.database
+            and d.database.name == database_name
+            and d.name == datasource_name
+            and schema == schema
+        ]
         return db_ds[0]
 
     @classmethod
@@ -71,8 +77,8 @@ class ConnectorRegistry(object):
         )
 
     @classmethod
-    def query_datasources_by_name(
-            cls, session, database, datasource_name, schema=None):
+    def query_datasources_by_name(cls, session, database, datasource_name, schema=None):
         datasource_class = ConnectorRegistry.sources[database.type]
         return datasource_class.query_datasources_by_name(
-            session, database, datasource_name, schema=None)
+            session, database, datasource_name, schema=None
+        )
diff --git a/superset/connectors/druid/models.py b/superset/connectors/druid/models.py
index 937c8d8..6da1b1f 100644
--- a/superset/connectors/druid/models.py
+++ b/superset/connectors/druid/models.py
@@ -21,28 +21,36 @@ from pydruid.utils.dimensions import MapLookupExtraction, RegexExtraction
 from pydruid.utils.filters import Dimension, Filter
 from pydruid.utils.having import Aggregation
 from pydruid.utils.postaggregator import (
-    Const, Field, HyperUniqueCardinality, Postaggregator, Quantile, Quantiles,
+    Const,
+    Field,
+    HyperUniqueCardinality,
+    Postaggregator,
+    Quantile,
+    Quantiles,
 )
 import requests
 import sqlalchemy as sa
 from sqlalchemy import (
-    Boolean, Column, DateTime, ForeignKey, Integer, String, Text, UniqueConstraint,
+    Boolean,
+    Column,
+    DateTime,
+    ForeignKey,
+    Integer,
+    String,
+    Text,
+    UniqueConstraint,
 )
 from sqlalchemy.orm import backref, relationship
 
 from superset import conf, db, security_manager
 from superset.connectors.base.models import BaseColumn, BaseDatasource, BaseMetric
 from superset.exceptions import MetricPermException, SupersetException
-from superset.models.helpers import (
-    AuditMixinNullable, ImportMixin, QueryResult,
-)
+from superset.models.helpers import AuditMixinNullable, ImportMixin, QueryResult
 from superset.utils import core as utils, import_datasource
-from superset.utils.core import (
-    DimSelector, DTTM_ALIAS, flasher,
-)
+from superset.utils.core import DimSelector, DTTM_ALIAS, flasher
 
-DRUID_TZ = conf.get('DRUID_TZ')
-POST_AGG_TYPE = 'postagg'
+DRUID_TZ = conf.get("DRUID_TZ")
+POST_AGG_TYPE = "postagg"
 
 
 # Function wrapper because bound methods cannot
@@ -54,16 +62,17 @@ def _fetch_metadata_for(datasource):
 class JavascriptPostAggregator(Postaggregator):
     def __init__(self, name, field_names, function):
         self.post_aggregator = {
-            'type': 'javascript',
-            'fieldNames': field_names,
-            'name': name,
-            'function': function,
+            "type": "javascript",
+            "fieldNames": field_names,
+            "name": name,
+            "function": function,
         }
         self.name = name
 
 
 class CustomPostAggregator(Postaggregator):
     """A way to allow users to specify completely custom PostAggregators"""
+
     def __init__(self, name, post_aggregator):
         self.name = name
         self.post_aggregator = post_aggregator
@@ -73,8 +82,8 @@ class DruidCluster(Model, AuditMixinNullable, ImportMixin):
 
     """ORM object referencing the Druid clusters"""
 
-    __tablename__ = 'clusters'
-    type = 'druid'
+    __tablename__ = "clusters"
+    type = "druid"
 
     id = Column(Integer, primary_key=True)
     verbose_name = Column(String(250), unique=True)
@@ -82,14 +91,19 @@ class DruidCluster(Model, AuditMixinNullable, ImportMixin):
     cluster_name = Column(String(250), unique=True)
     broker_host = Column(String(255))
     broker_port = Column(Integer, default=8082)
-    broker_endpoint = Column(String(255), default='druid/v2')
+    broker_endpoint = Column(String(255), default="druid/v2")
     metadata_last_refreshed = Column(DateTime)
     cache_timeout = Column(Integer)
 
-    export_fields = ('cluster_name', 'broker_host', 'broker_port',
-                     'broker_endpoint', 'cache_timeout')
+    export_fields = (
+        "cluster_name",
+        "broker_host",
+        "broker_port",
+        "broker_endpoint",
+        "cache_timeout",
+    )
     update_from_object_fields = export_fields
-    export_children = ['datasources']
+    export_children = ["datasources"]
 
     def __repr__(self):
         return self.verbose_name if self.verbose_name else self.cluster_name
@@ -99,39 +113,33 @@ class DruidCluster(Model, AuditMixinNullable, ImportMixin):
 
     @property
     def data(self):
-        return {
-            'id': self.id,
-            'name': self.cluster_name,
-            'backend': 'druid',
-        }
+        return {"id": self.id, "name": self.cluster_name, "backend": "druid"}
 
     @staticmethod
     def get_base_url(host, port):
-        if not re.match('http(s)?://', host):
-            host = 'http://' + host
+        if not re.match("http(s)?://", host):
+            host = "http://" + host
 
-        url = '{0}:{1}'.format(host, port) if port else host
+        url = "{0}:{1}".format(host, port) if port else host
         return url
 
     def get_base_broker_url(self):
-        base_url = self.get_base_url(
-            self.broker_host, self.broker_port)
-        return f'{base_url}/{self.broker_endpoint}'
+        base_url = self.get_base_url(self.broker_host, self.broker_port)
+        return f"{base_url}/{self.broker_endpoint}"
 
     def get_pydruid_client(self):
         cli = PyDruid(
-            self.get_base_url(self.broker_host, self.broker_port),
-            self.broker_endpoint)
+            self.get_base_url(self.broker_host, self.broker_port), self.broker_endpoint
+        )
         return cli
 
     def get_datasources(self):
-        endpoint = self.get_base_broker_url() + '/datasources'
+        endpoint = self.get_base_broker_url() + "/datasources"
         return json.loads(requests.get(endpoint).text)
 
     def get_druid_version(self):
-        endpoint = self.get_base_url(
-            self.broker_host, self.broker_port) + '/status'
-        return json.loads(requests.get(endpoint).text)['version']
+        endpoint = self.get_base_url(self.broker_host, self.broker_port) + "/status"
+        return json.loads(requests.get(endpoint).text)["version"]
 
     @property
     @utils.memoized
@@ -139,15 +147,13 @@ class DruidCluster(Model, AuditMixinNullable, ImportMixin):
         return self.get_druid_version()
 
     def refresh_datasources(
-            self,
-            datasource_name=None,
-            merge_flag=True,
-            refreshAll=True):
+        self, datasource_name=None, merge_flag=True, refreshAll=True
+    ):
         """Refresh metadata of all datasources in the cluster
         If ``datasource_name`` is specified, only that datasource is updated
         """
         ds_list = self.get_datasources()
-        blacklist = conf.get('DRUID_DATA_SOURCE_BLACKLIST', [])
+        blacklist = conf.get("DRUID_DATA_SOURCE_BLACKLIST", [])
         ds_refresh = []
         if not datasource_name:
             ds_refresh = list(filter(lambda ds: ds not in blacklist, ds_list))
@@ -175,12 +181,10 @@ class DruidCluster(Model, AuditMixinNullable, ImportMixin):
                 datasource = DruidDatasource(datasource_name=ds_name)
                 with session.no_autoflush:
                     session.add(datasource)
-                flasher(
-                    _('Adding new datasource [{}]').format(ds_name), 'success')
+                flasher(_("Adding new datasource [{}]").format(ds_name), "success")
                 ds_map[ds_name] = datasource
             elif refreshAll:
-                flasher(
-                    _('Refreshing datasource [{}]').format(ds_name), 'info')
+                flasher(_("Refreshing datasource [{}]").format(ds_name), "info")
             else:
                 del ds_map[ds_name]
                 continue
@@ -206,21 +210,21 @@ class DruidCluster(Model, AuditMixinNullable, ImportMixin):
                 )
                 col_objs = {col.column_name: col for col in col_objs_list}
                 for col in cols:
-                    if col == '__time':  # skip the time column
+                    if col == "__time":  # skip the time column
                         continue
                     col_obj = col_objs.get(col)
                     if not col_obj:
                         col_obj = DruidColumn(
-                            datasource_id=datasource.id,
-                            column_name=col)
+                            datasource_id=datasource.id, column_name=col
+                        )
                         with session.no_autoflush:
                             session.add(col_obj)
-                    col_obj.type = cols[col]['type']
+                    col_obj.type = cols[col]["type"]
                     col_obj.datasource = datasource
-                    if col_obj.type == 'STRING':
+                    if col_obj.type == "STRING":
                         col_obj.groupby = True
                         col_obj.filterable = True
-                    if col_obj.type == 'hyperUnique' or col_obj.type == 'thetaSketch':
+                    if col_obj.type == "hyperUnique" or col_obj.type == "thetaSketch":
                         col_obj.count_distinct = True
                     if col_obj.is_num:
                         col_obj.sum = True
@@ -231,7 +235,7 @@ class DruidCluster(Model, AuditMixinNullable, ImportMixin):
 
     @property
     def perm(self):
-        return '[{obj.cluster_name}].(id:{obj.id})'.format(obj=self)
+        return "[{obj.cluster_name}].(id:{obj.id})".format(obj=self)
 
     def get_perm(self):
         return self.perm
@@ -248,26 +252,36 @@ class DruidCluster(Model, AuditMixinNullable, ImportMixin):
 class DruidColumn(Model, BaseColumn):
     """ORM model for storing Druid datasource column metadata"""
 
-    __tablename__ = 'columns'
-    __table_args__ = (UniqueConstraint('column_name', 'datasource_id'),)
+    __tablename__ = "columns"
+    __table_args__ = (UniqueConstraint("column_name", "datasource_id"),)
 
-    datasource_id = Column(
-        Integer,
-        ForeignKey('datasources.id'))
+    datasource_id = Column(Integer, ForeignKey("datasources.id"))
     # Setting enable_typechecks=False disables polymorphic inheritance.
     datasource = relationship(
-        'DruidDatasource',
-        backref=backref('columns', cascade='all, delete-orphan'),
-        enable_typechecks=False)
+        "DruidDatasource",
+        backref=backref("columns", cascade="all, delete-orphan"),
+        enable_typechecks=False,
+    )
     dimension_spec_json = Column(Text)
 
     export_fields = (
-        'datasource_id', 'column_name', 'is_active', 'type', 'groupby',
-        'count_distinct', 'sum', 'avg', 'max', 'min', 'filterable',
-        'description', 'dimension_spec_json', 'verbose_name',
+        "datasource_id",
+        "column_name",
+        "is_active",
+        "type",
+        "groupby",
+        "count_distinct",
+        "sum",
+        "avg",
+        "max",
+        "min",
+        "filterable",
+        "description",
+        "dimension_spec_json",
+        "verbose_name",
     )
     update_from_object_fields = export_fields
-    export_parent = 'datasource'
+    export_parent = "datasource"
 
     def __repr__(self):
         return self.column_name
@@ -283,82 +297,87 @@ class DruidColumn(Model, BaseColumn):
 
     def get_metrics(self):
         metrics = {}
-        metrics['count'] = DruidMetric(
-            metric_name='count',
-            verbose_name='COUNT(*)',
-            metric_type='count',
-            json=json.dumps({'type': 'count', 'name': 'count'}),
+        metrics["count"] = DruidMetric(
+            metric_name="count",
+            verbose_name="COUNT(*)",
+            metric_type="count",
+            json=json.dumps({"type": "count", "name": "count"}),
         )
         # Somehow we need to reassign this for UDAFs
-        if self.type in ('DOUBLE', 'FLOAT'):
-            corrected_type = 'DOUBLE'
+        if self.type in ("DOUBLE", "FLOAT"):
+            corrected_type = "DOUBLE"
         else:
             corrected_type = self.type
 
         if self.sum and self.is_num:
-            mt = corrected_type.lower() + 'Sum'
-            name = 'sum__' + self.column_name
+            mt = corrected_type.lower() + "Sum"
+            name = "sum__" + self.column_name
             metrics[name] = DruidMetric(
                 metric_name=name,
-                metric_type='sum',
-                verbose_name='SUM({})'.format(self.column_name),
-                json=json.dumps({
-                    'type': mt, 'name': name, 'fieldName': self.column_name}),
+                metric_type="sum",
+                verbose_name="SUM({})".format(self.column_name),
+                json=json.dumps(
+                    {"type": mt, "name": name, "fieldName": self.column_name}
+                ),
             )
 
         if self.avg and self.is_num:
-            mt = corrected_type.lower() + 'Avg'
-            name = 'avg__' + self.column_name
+            mt = corrected_type.lower() + "Avg"
+            name = "avg__" + self.column_name
             metrics[name] = DruidMetric(
                 metric_name=name,
-                metric_type='avg',
-                verbose_name='AVG({})'.format(self.column_name),
-                json=json.dumps({
-                    'type': mt, 'name': name, 'fieldName': self.column_name}),
+                metric_type="avg",
+                verbose_name="AVG({})".format(self.column_name),
+                json=json.dumps(
+                    {"type": mt, "name": name, "fieldName": self.column_name}
+                ),
             )
 
         if self.min and self.is_num:
-            mt = corrected_type.lower() + 'Min'
-            name = 'min__' + self.column_name
+            mt = corrected_type.lower() + "Min"
+            name = "min__" + self.column_name
             metrics[name] = DruidMetric(
                 metric_name=name,
-                metric_type='min',
-                verbose_name='MIN({})'.format(self.column_name),
-                json=json.dumps({
-                    'type': mt, 'name': name, 'fieldName': self.column_name}),
+                metric_type="min",
+                verbose_name="MIN({})".format(self.column_name),
+                json=json.dumps(
+                    {"type": mt, "name": name, "fieldName": self.column_name}
+                ),
             )
         if self.max and self.is_num:
-            mt = corrected_type.lower() + 'Max'
-            name = 'max__' + self.column_name
+            mt = corrected_type.lower() + "Max"
+            name = "max__" + self.column_name
             metrics[name] = DruidMetric(
                 metric_name=name,
-                metric_type='max',
-                verbose_name='MAX({})'.format(self.column_name),
-                json=json.dumps({
-                    'type': mt, 'name': name, 'fieldName': self.column_name}),
+                metric_type="max",
+                verbose_name="MAX({})".format(self.column_name),
+                json=json.dumps(
+                    {"type": mt, "name": name, "fieldName": self.column_name}
+                ),
             )
         if self.count_distinct:
-            name = 'count_distinct__' + self.column_name
-            if self.type == 'hyperUnique' or self.type == 'thetaSketch':
+            name = "count_distinct__" + self.column_name
+            if self.type == "hyperUnique" or self.type == "thetaSketch":
                 metrics[name] = DruidMetric(
                     metric_name=name,
-                    verbose_name='COUNT(DISTINCT {})'.format(self.column_name),
+                    verbose_name="COUNT(DISTINCT {})".format(self.column_name),
                     metric_type=self.type,
-                    json=json.dumps({
-                        'type': self.type,
-                        'name': name,
-                        'fieldName': self.column_name,
-                    }),
+                    json=json.dumps(
+                        {"type": self.type, "name": name, "fieldName": self.column_name}
+                    ),
                 )
             else:
                 metrics[name] = DruidMetric(
                     metric_name=name,
-                    verbose_name='COUNT(DISTINCT {})'.format(self.column_name),
-                    metric_type='count_distinct',
-                    json=json.dumps({
-                        'type': 'cardinality',
-                        'name': name,
-                        'fieldNames': [self.column_name]}),
+                    verbose_name="COUNT(DISTINCT {})".format(self.column_name),
+                    metric_type="count_distinct",
+                    json=json.dumps(
+                        {
+                            "type": "cardinality",
+                            "name": name,
+                            "fieldNames": [self.column_name],
+                        }
+                    ),
                 )
         return metrics
 
@@ -374,7 +393,7 @@ class DruidColumn(Model, BaseColumn):
         for metric in metrics.values():
             dbmetric = dbmetrics.get(metric.metric_name)
             if dbmetric:
-                for attr in ['json', 'metric_type']:
+                for attr in ["json", "metric_type"]:
                     setattr(dbmetric, attr, getattr(metric, attr))
             else:
                 with db.session.no_autoflush:
@@ -384,9 +403,14 @@ class DruidColumn(Model, BaseColumn):
     @classmethod
     def import_obj(cls, i_column):
         def lookup_obj(lookup_column):
-            return db.session.query(DruidColumn).filter(
-                DruidColumn.datasource_id == lookup_column.datasource_id,
-                DruidColumn.column_name == lookup_column.column_name).first()
+            return (
+                db.session.query(DruidColumn)
+                .filter(
+                    DruidColumn.datasource_id == lookup_column.datasource_id,
+                    DruidColumn.column_name == lookup_column.column_name,
+                )
+                .first()
+            )
 
         return import_datasource.import_simple_obj(db.session, i_column, lookup_obj)
 
@@ -395,24 +419,30 @@ class DruidMetric(Model, BaseMetric):
 
     """ORM object referencing Druid metrics for a datasource"""
 
-    __tablename__ = 'metrics'
-    __table_args__ = (UniqueConstraint('metric_name', 'datasource_id'),)
-    datasource_id = Column(
-        Integer,
-        ForeignKey('datasources.id'))
+    __tablename__ = "metrics"
+    __table_args__ = (UniqueConstraint("metric_name", "datasource_id"),)
+    datasource_id = Column(Integer, ForeignKey("datasources.id"))
     # Setting enable_typechecks=False disables polymorphic inheritance.
     datasource = relationship(
-        'DruidDatasource',
-        backref=backref('metrics', cascade='all, delete-orphan'),
-        enable_typechecks=False)
+        "DruidDatasource",
+        backref=backref("metrics", cascade="all, delete-orphan"),
+        enable_typechecks=False,
+    )
     json = Column(Text)
 
     export_fields = (
-        'metric_name', 'verbose_name', 'metric_type', 'datasource_id',
-        'json', 'description', 'is_restricted', 'd3format', 'warning_text',
+        "metric_name",
+        "verbose_name",
+        "metric_type",
+        "datasource_id",
+        "json",
+        "description",
+        "is_restricted",
+        "d3format",
+        "warning_text",
     )
     update_from_object_fields = export_fields
-    export_parent = 'datasource'
+    export_parent = "datasource"
 
     @property
     def expression(self):
@@ -429,10 +459,12 @@ class DruidMetric(Model, BaseMetric):
     @property
     def perm(self):
         return (
-            '{parent_name}.[{obj.metric_name}](id:{obj.id})'
-        ).format(obj=self,
-                 parent_name=self.datasource.full_name,
-                 ) if self.datasource else None
+            ("{parent_name}.[{obj.metric_name}](id:{obj.id})").format(
+                obj=self, parent_name=self.datasource.full_name
+            )
+            if self.datasource
+            else None
+        )
 
     def get_perm(self):
         return self.perm
@@ -440,9 +472,15 @@ class DruidMetric(Model, BaseMetric):
     @classmethod
     def import_obj(cls, i_metric):
         def lookup_obj(lookup_metric):
-            return db.session.query(DruidMetric).filter(
-                DruidMetric.datasource_id == lookup_metric.datasource_id,
-                DruidMetric.metric_name == lookup_metric.metric_name).first()
+            return (
+                db.session.query(DruidMetric)
+                .filter(
+                    DruidMetric.datasource_id == lookup_metric.datasource_id,
+                    DruidMetric.metric_name == lookup_metric.metric_name,
+                )
+                .first()
+            )
+
         return import_datasource.import_simple_obj(db.session, i_metric, lookup_obj)
 
 
@@ -450,42 +488,49 @@ class DruidDatasource(Model, BaseDatasource):
 
     """ORM object referencing Druid datasources (tables)"""
 
-    __tablename__ = 'datasources'
-    __table_args__ = (UniqueConstraint('datasource_name', 'cluster_name'),)
+    __tablename__ = "datasources"
+    __table_args__ = (UniqueConstraint("datasource_name", "cluster_name"),)
 
-    type = 'druid'
-    query_language = 'json'
+    type = "druid"
+    query_language = "json"
     cluster_class = DruidCluster
     metric_class = DruidMetric
     column_class = DruidColumn
 
-    baselink = 'druiddatasourcemodelview'
+    baselink = "druiddatasourcemodelview"
 
     # Columns
     datasource_name = Column(String(255))
     is_hidden = Column(Boolean, default=False)
     filter_select_enabled = Column(Boolean, default=True)  # override default
     fetch_values_from = Column(String(100))
-    cluster_name = Column(
-        String(250), ForeignKey('clusters.cluster_name'))
+    cluster_name = Column(String(250), ForeignKey("clusters.cluster_name"))
     cluster = relationship(
-        'DruidCluster', backref='datasources', foreign_keys=[cluster_name])
-    user_id = Column(Integer, ForeignKey('ab_user.id'))
+        "DruidCluster", backref="datasources", foreign_keys=[cluster_name]
+    )
+    user_id = Column(Integer, ForeignKey("ab_user.id"))
     owner = relationship(
         security_manager.user_model,
-        backref=backref('datasources', cascade='all, delete-orphan'),
-        foreign_keys=[user_id])
-    UniqueConstraint('cluster_name', 'datasource_name')
+        backref=backref("datasources", cascade="all, delete-orphan"),
+        foreign_keys=[user_id],
+    )
+    UniqueConstraint("cluster_name", "datasource_name")
 
     export_fields = (
-        'datasource_name', 'is_hidden', 'description', 'default_endpoint',
-        'cluster_name', 'offset', 'cache_timeout', 'params',
-        'filter_select_enabled',
+        "datasource_name",
+        "is_hidden",
+        "description",
+        "default_endpoint",
+        "cluster_name",
+        "offset",
+        "cache_timeout",
+        "params",
+        "filter_select_enabled",
     )
     update_from_object_fields = export_fields
 
-    export_parent = 'cluster'
-    export_children = ['columns', 'metrics']
+    export_parent = "cluster"
+    export_children = ["columns", "metrics"]
 
     @property
     def database(self):
@@ -505,8 +550,8 @@ class DruidDatasource(Model, BaseDatasource):
 
     @property
     def schema(self):
-        ds_name = self.datasource_name or ''
-        name_pieces = ds_name.split('.')
+        ds_name = self.datasource_name or ""
+        name_pieces = ds_name.split(".")
         if len(name_pieces) > 1:
             return name_pieces[0]
         else:
@@ -518,9 +563,9 @@ class DruidDatasource(Model, BaseDatasource):
         return security_manager.get_schema_perm(self.cluster, self.schema)
 
     def get_perm(self):
-        return (
-            '[{obj.cluster_name}].[{obj.datasource_name}]'
-            '(id:{obj.id})').format(obj=self)
+        return ("[{obj.cluster_name}].[{obj.datasource_name}]" "(id:{obj.id})").format(
+            obj=self
+        )
 
     def update_from_object(self, obj):
         return NotImplementedError()
@@ -532,35 +577,43 @@ class DruidDatasource(Model, BaseDatasource):
 
     @property
     def full_name(self):
-        return utils.get_datasource_full_name(
-            self.cluster_name, self.datasource_name)
+        return utils.get_datasource_full_name(self.cluster_name, self.datasource_name)
 
     @property
     def time_column_grains(self):
         return {
-            'time_columns': [
-                'all', '5 seconds', '30 seconds', '1 minute', '5 minutes',
-                '30 minutes', '1 hour', '6 hour', '1 day', '7 days',
-                'week', 'week_starting_sunday', 'week_ending_saturday',
-                'month', 'quarter', 'year',
+            "time_columns": [
+                "all",
+                "5 seconds",
+                "30 seconds",
+                "1 minute",
+                "5 minutes",
+                "30 minutes",
+                "1 hour",
+                "6 hour",
+                "1 day",
+                "7 days",
+                "week",
+                "week_starting_sunday",
+                "week_ending_saturday",
+                "month",
+                "quarter",
+                "year",
             ],
-            'time_grains': ['now'],
+            "time_grains": ["now"],
         }
 
     def __repr__(self):
         return self.datasource_name
 
-    @renders('datasource_name')
+    @renders("datasource_name")
     def datasource_link(self):
-        url = f'/superset/explore/{self.type}/{self.id}/'
+        url = f"/superset/explore/{self.type}/{self.id}/"
         name = escape(self.datasource_name)
         return Markup(f'<a href="{url}">{name}</a>')
 
     def get_metric_obj(self, metric_name):
-        return [
-            m.json_obj for m in self.metrics
-            if m.metric_name == metric_name
-        ][0]
+        return [m.json_obj for m in self.metrics if m.metric_name == metric_name][0]
 
     @classmethod
     def import_obj(cls, i_datasource, import_time=None):
@@ -570,29 +623,38 @@ class DruidDatasource(Model, BaseDatasource):
          This function can be used to import/export dashboards between multiple
          superset instances. Audit metadata isn't copies over.
         """
+
         def lookup_datasource(d):
-            return db.session.query(DruidDatasource).filter(
-                DruidDatasource.datasource_name == d.datasource_name,
-                DruidCluster.cluster_name == d.cluster_name,
-            ).first()
+            return (
+                db.session.query(DruidDatasource)
+                .filter(
+                    DruidDatasource.datasource_name == d.datasource_name,
+                    DruidCluster.cluster_name == d.cluster_name,
+                )
+                .first()
+            )
 
         def lookup_cluster(d):
-            return db.session.query(DruidCluster).filter_by(
-                cluster_name=d.cluster_name).one()
+            return (
+                db.session.query(DruidCluster)
+                .filter_by(cluster_name=d.cluster_name)
+                .one()
+            )
+
         return import_datasource.import_datasource(
-            db.session, i_datasource, lookup_cluster, lookup_datasource,
-            import_time)
+            db.session, i_datasource, lookup_cluster, lookup_datasource, import_time
+        )
 
     def latest_metadata(self):
         """Returns segment metadata from the latest segment"""
-        logging.info('Syncing datasource [{}]'.format(self.datasource_name))
+        logging.info("Syncing datasource [{}]".format(self.datasource_name))
         client = self.cluster.get_pydruid_client()
         try:
             results = client.time_boundary(datasource=self.datasource_name)
         except IOError:
             results = None
         if results:
-            max_time = results[0]['result']['maxTime']
+            max_time = results[0]["result"]["maxTime"]
             max_time = dparse(max_time)
         else:
             max_time = datetime.now()
@@ -601,7 +663,7 @@ class DruidDatasource(Model, BaseDatasource):
         # realtime segments, which triggered a bug (fixed in druid 0.8.2).
         # https://groups.google.com/forum/#!topic/druid-user/gVCqqspHqOQ
         lbound = (max_time - timedelta(days=7)).isoformat()
-        if LooseVersion(self.cluster.druid_version) < LooseVersion('0.8.2'):
+        if LooseVersion(self.cluster.druid_version) < LooseVersion("0.8.2"):
             rbound = (max_time - timedelta(1)).isoformat()
         else:
             rbound = max_time.isoformat()
@@ -609,53 +671,48 @@ class DruidDatasource(Model, BaseDatasource):
         try:
             segment_metadata = client.segment_metadata(
                 datasource=self.datasource_name,
-                intervals=lbound + '/' + rbound,
+                intervals=lbound + "/" + rbound,
                 merge=self.merge_flag,
-                analysisTypes=[])
+                analysisTypes=[],
+            )
         except Exception as e:
-            logging.warning('Failed first attempt to get latest segment')
+            logging.warning("Failed first attempt to get latest segment")
             logging.exception(e)
         if not segment_metadata:
             # if no segments in the past 7 days, look at all segments
             lbound = datetime(1901, 1, 1).isoformat()[:10]
-            if LooseVersion(self.cluster.druid_version) < LooseVersion('0.8.2'):
+            if LooseVersion(self.cluster.druid_version) < LooseVersion("0.8.2"):
                 rbound = datetime.now().isoformat()
             else:
                 rbound = datetime(2050, 1, 1).isoformat()[:10]
             try:
                 segment_metadata = client.segment_metadata(
                     datasource=self.datasource_name,
-                    intervals=lbound + '/' + rbound,
+                    intervals=lbound + "/" + rbound,
                     merge=self.merge_flag,
-                    analysisTypes=[])
+                    analysisTypes=[],
+                )
             except Exception as e:
-                logging.warning('Failed 2nd attempt to get latest segment')
+                logging.warning("Failed 2nd attempt to get latest segment")
                 logging.exception(e)
         if segment_metadata:
-            return segment_metadata[-1]['columns']
+            return segment_metadata[-1]["columns"]
 
     def refresh_metrics(self):
         for col in self.columns:
             col.refresh_metrics()
 
     @classmethod
-    def sync_to_db_from_config(
-            cls,
-            druid_config,
-            user,
-            cluster,
-            refresh=True):
+    def sync_to_db_from_config(cls, druid_config, user, cluster, refresh=True):
         """Merges the ds config from druid_config into one stored in the db."""
         session = db.session
         datasource = (
-            session.query(cls)
-            .filter_by(datasource_name=druid_config['name'])
-            .first()
+            session.query(cls).filter_by(datasource_name=druid_config["name"]).first()
         )
         # Create a new datasource.
         if not datasource:
             datasource = cls(
-                datasource_name=druid_config['name'],
+                datasource_name=druid_config["name"],
                 cluster=cluster,
                 owner=user,
                 changed_by_fk=user.id,
@@ -665,7 +722,7 @@ class DruidDatasource(Model, BaseDatasource):
         elif not refresh:
             return
 
-        dimensions = druid_config['dimensions']
+        dimensions = druid_config["dimensions"]
         col_objs = (
             session.query(DruidColumn)
             .filter(DruidColumn.datasource_id == datasource.id)
@@ -681,7 +738,7 @@ class DruidDatasource(Model, BaseDatasource):
                     groupby=True,
                     filterable=True,
                     # TODO: fetch type from Hive.
-                    type='STRING',
+                    type="STRING",
                     datasource=datasource,
                 )
                 session.add(col_obj)
@@ -689,42 +746,43 @@ class DruidDatasource(Model, BaseDatasource):
         metric_objs = (
             session.query(DruidMetric)
             .filter(DruidMetric.datasource_id == datasource.id)
-            .filter(DruidMetric.metric_name.in_(
-                spec['name'] for spec in druid_config['metrics_spec']
-            ))
+            .filter(
+                DruidMetric.metric_name.in_(
+                    spec["name"] for spec in druid_config["metrics_spec"]
+                )
+            )
         )
         metric_objs = {metric.metric_name: metric for metric in metric_objs}
-        for metric_spec in druid_config['metrics_spec']:
-            metric_name = metric_spec['name']
-            metric_type = metric_spec['type']
+        for metric_spec in druid_config["metrics_spec"]:
+            metric_name = metric_spec["name"]
+            metric_type = metric_spec["type"]
             metric_json = json.dumps(metric_spec)
 
-            if metric_type == 'count':
-                metric_type = 'longSum'
-                metric_json = json.dumps({
-                    'type': 'longSum',
-                    'name': metric_name,
-                    'fieldName': metric_name,
-                })
+            if metric_type == "count":
+                metric_type = "longSum"
+                metric_json = json.dumps(
+                    {"type": "longSum", "name": metric_name, "fieldName": metric_name}
+                )
 
             metric_obj = metric_objs.get(metric_name, None)
             if not metric_obj:
                 metric_obj = DruidMetric(
                     metric_name=metric_name,
                     metric_type=metric_type,
-                    verbose_name='%s(%s)' % (metric_type, metric_name),
+                    verbose_name="%s(%s)" % (metric_type, metric_name),
                     datasource=datasource,
                     json=metric_json,
                     description=(
-                        'Imported from the airolap config dir for %s' %
-                        druid_config['name']),
+                        "Imported from the airolap config dir for %s"
+                        % druid_config["name"]
+                    ),
                 )
                 session.add(metric_obj)
         session.commit()
 
     @staticmethod
     def time_offset(granularity):
-        if granularity == 'week_ending_saturday':
+        if granularity == "week_ending_saturday":
             return 6 * 24 * 3600 * 1000  # 6 days
         return 0
 
@@ -733,50 +791,51 @@ class DruidDatasource(Model, BaseDatasource):
     # TODO: pass origin from the UI
     @staticmethod
     def granularity(period_name, timezone=None, origin=None):
-        if not period_name or period_name == 'all':
-            return 'all'
+        if not period_name or period_name == "all":
+            return "all"
         iso_8601_dict = {
-            '5 seconds': 'PT5S',
-            '30 seconds': 'PT30S',
-            '1 minute': 'PT1M',
-            '5 minutes': 'PT5M',
-            '30 minutes': 'PT30M',
-            '1 hour': 'PT1H',
-            '6 hour': 'PT6H',
-            'one day': 'P1D',
-            '1 day': 'P1D',
-            '7 days': 'P7D',
-            'week': 'P1W',
-            'week_starting_sunday': 'P1W',
-            'week_ending_saturday': 'P1W',
-            'month': 'P1M',
-            'quarter': 'P3M',
-            'year': 'P1Y',
+            "5 seconds": "PT5S",
+            "30 seconds": "PT30S",
+            "1 minute": "PT1M",
+            "5 minutes": "PT5M",
+            "30 minutes": "PT30M",
+            "1 hour": "PT1H",
+            "6 hour": "PT6H",
+            "one day": "P1D",
+            "1 day": "P1D",
+            "7 days": "P7D",
+            "week": "P1W",
+            "week_starting_sunday": "P1W",
+            "week_ending_saturday": "P1W",
+            "month": "P1M",
+            "quarter": "P3M",
+            "year": "P1Y",
         }
 
-        granularity = {'type': 'period'}
+        granularity = {"type": "period"}
         if timezone:
-            granularity['timeZone'] = timezone
+            granularity["timeZone"] = timezone
 
         if origin:
             dttm = utils.parse_human_datetime(origin)
-            granularity['origin'] = dttm.isoformat()
+            granularity["origin"] = dttm.isoformat()
 
         if period_name in iso_8601_dict:
-            granularity['period'] = iso_8601_dict[period_name]
-            if period_name in ('week_ending_saturday', 'week_starting_sunday'):
+            granularity["period"] = iso_8601_dict[period_name]
+            if period_name in ("week_ending_saturday", "week_starting_sunday"):
                 # use Sunday as start of the week
-                granularity['origin'] = '2016-01-03T00:00:00'
+                granularity["origin"] = "2016-01-03T00:00:00"
         elif not isinstance(period_name, str):
-            granularity['type'] = 'duration'
-            granularity['duration'] = period_name
-        elif period_name.startswith('P'):
+            granularity["type"] = "duration"
+            granularity["duration"] = period_name
+        elif period_name.startswith("P"):
             # identify if the string is the iso_8601 period
-            granularity['period'] = period_name
+            granularity["period"] = period_name
         else:
-            granularity['type'] = 'duration'
-            granularity['duration'] = utils.parse_human_timedelta(
-                period_name).total_seconds() * 1000
+            granularity["type"] = "duration"
+            granularity["duration"] = (
+                utils.parse_human_timedelta(period_name).total_seconds() * 1000
+            )
         return granularity
 
     @staticmethod
@@ -785,47 +844,35 @@ class DruidDatasource(Model, BaseDatasource):
         For a metric specified as `postagg` returns the
         kind of post aggregation for pydruid.
         """
-        if mconf.get('type') == 'javascript':
+        if mconf.get("type") == "javascript":
             return JavascriptPostAggregator(
-                name=mconf.get('name', ''),
-                field_names=mconf.get('fieldNames', []),
-                function=mconf.get('function', ''))
-        elif mconf.get('type') == 'quantile':
-            return Quantile(
-                mconf.get('name', ''),
-                mconf.get('probability', ''),
-            )
-        elif mconf.get('type') == 'quantiles':
-            return Quantiles(
-                mconf.get('name', ''),
-                mconf.get('probabilities', ''),
-            )
-        elif mconf.get('type') == 'fieldAccess':
-            return Field(mconf.get('name'))
-        elif mconf.get('type') == 'constant':
-            return Const(
-                mconf.get('value'),
-                output_name=mconf.get('name', ''),
+                name=mconf.get("name", ""),
+                field_names=mconf.get("fieldNames", []),
+                function=mconf.get("function", ""),
             )
-        elif mconf.get('type') == 'hyperUniqueCardinality':
-            return HyperUniqueCardinality(
-                mconf.get('name'),
-            )
-        elif mconf.get('type') == 'arithmetic':
+        elif mconf.get("type") == "quantile":
+            return Quantile(mconf.get("name", ""), mconf.get("probability", ""))
+        elif mconf.get("type") == "quantiles":
+            return Quantiles(mconf.get("name", ""), mconf.get("probabilities", ""))
+        elif mconf.get("type") == "fieldAccess":
+            return Field(mconf.get("name"))
+        elif mconf.get("type") == "constant":
+            return Const(mconf.get("value"), output_name=mconf.get("name", ""))
+        elif mconf.get("type") == "hyperUniqueCardinality":
+            return HyperUniqueCardinality(mconf.get("name"))
+        elif mconf.get("type") == "arithmetic":
             return Postaggregator(
-                mconf.get('fn', '/'),
-                mconf.get('fields', []),
-                mconf.get('name', ''))
+                mconf.get("fn", "/"), mconf.get("fields", []), mconf.get("name", "")
+            )
         else:
-            return CustomPostAggregator(
-                mconf.get('name', ''),
-                mconf)
+            return CustomPostAggregator(mconf.get("name", ""), mconf)
 
     @staticmethod
     def find_postaggs_for(postagg_names, metrics_dict):
         """Return a list of metrics that are post aggregations"""
         postagg_metrics = [
-            metrics_dict[name] for name in postagg_names
+            metrics_dict[name]
+            for name in postagg_names
             if metrics_dict[name].metric_type == POST_AGG_TYPE
         ]
         # Remove post aggregations that were found
@@ -835,13 +882,12 @@ class DruidDatasource(Model, BaseDatasource):
 
     @staticmethod
     def recursive_get_fields(_conf):
-        _type = _conf.get('type')
-        _field = _conf.get('field')
-        _fields = _conf.get('fields')
+        _type = _conf.get("type")
+        _field = _conf.get("field")
+        _fields = _conf.get("fields")
         field_names = []
-        if _type in ['fieldAccess', 'hyperUniqueCardinality',
-                     'quantile', 'quantiles']:
-            field_names.append(_conf.get('fieldName', ''))
+        if _type in ["fieldAccess", "hyperUniqueCardinality", "quantile", "quantiles"]:
+            field_names.append(_conf.get("fieldName", ""))
         if _field:
             field_names += DruidDatasource.recursive_get_fields(_field)
         if _fields:
@@ -853,18 +899,22 @@ class DruidDatasource(Model, BaseDatasource):
     def resolve_postagg(postagg, post_aggs, agg_names, visited_postaggs, metrics_dict):
         mconf = postagg.json_obj
         required_fields = set(
-            DruidDatasource.recursive_get_fields(mconf) +
-            mconf.get('fieldNames', []))
+            DruidDatasource.recursive_get_fields(mconf) + mconf.get("fieldNames", [])
+        )
         # Check if the fields are already in aggs
         # or is a previous postagg
-        required_fields = set([
-            field for field in required_fields
-            if field not in visited_postaggs and field not in agg_names
-        ])
+        required_fields = set(
+            [
+                field
+                for field in required_fields
+                if field not in visited_postaggs and field not in agg_names
+            ]
+        )
         # First try to find postaggs that match
         if len(required_fields) > 0:
             missing_postaggs = DruidDatasource.find_postaggs_for(
-                required_fields, metrics_dict)
+                required_fields, metrics_dict
+            )
             for missing_metric in required_fields:
                 agg_names.add(missing_metric)
             for missing_postagg in missing_postaggs:
@@ -873,7 +923,12 @@ class DruidDatasource(Model, BaseDatasource):
                 visited_postaggs.add(missing_postagg.metric_name)
             for missing_postagg in missing_postaggs:
                 DruidDatasource.resolve_postagg(
-                    missing_postagg, post_aggs, agg_names, visited_postaggs, metrics_dict)
+                    missing_postagg,
+                    post_aggs,
+                    agg_names,
+                    visited_postaggs,
+                    metrics_dict,
+                )
         post_aggs[postagg.metric_name] = DruidDatasource.get_post_agg(postagg.json_obj)
 
     @staticmethod
@@ -898,21 +953,18 @@ class DruidDatasource(Model, BaseDatasource):
             postagg = metrics_dict[postagg_name]
             visited_postaggs.add(postagg_name)
             DruidDatasource.resolve_postagg(
-                postagg, post_aggs, saved_agg_names, visited_postaggs, metrics_dict)
+                postagg, post_aggs, saved_agg_names, visited_postaggs, metrics_dict
+            )
         aggs = DruidDatasource.get_aggregations(
-            metrics_dict,
-            saved_agg_names,
-            adhoc_agg_configs,
+            metrics_dict, saved_agg_names, adhoc_agg_configs
         )
         return aggs, post_aggs
 
-    def values_for_column(self,
-                          column_name,
-                          limit=10000):
+    def values_for_column(self, column_name, limit=10000):
         """Retrieve some values for the given column"""
         logging.info(
-            'Getting values for columns [{}] limited to [{}]'
-            .format(column_name, limit))
+            "Getting values for columns [{}] limited to [{}]".format(column_name, limit)
+        )
         # TODO: Use Lexicographic TopNMetricSpec once supported by PyDruid
         if self.fetch_values_from:
             from_dttm = utils.parse_human_datetime(self.fetch_values_from)
@@ -921,11 +973,11 @@ class DruidDatasource(Model, BaseDatasource):
 
         qry = dict(
             datasource=self.datasource_name,
-            granularity='all',
-            intervals=from_dttm.isoformat() + '/' + datetime.now().isoformat(),
-            aggregations=dict(count=count('count')),
+            granularity="all",
+            intervals=from_dttm.isoformat() + "/" + datetime.now().isoformat(),
+            aggregations=dict(count=count("count")),
             dimension=column_name,
-            metric='count',
+            metric="count",
             threshold=limit,
         )
 
@@ -947,16 +999,18 @@ class DruidDatasource(Model, BaseDatasource):
                     f = None
                     # Check if this dimension uses an extraction function
                     # If so, create the appropriate pydruid extraction object
-                    if isinstance(dim, dict) and 'extractionFn' in dim:
-                        (col, extraction_fn) = DruidDatasource._create_extraction_fn(dim)
-                        dim_val = dim['outputName']
+                    if isinstance(dim, dict) and "extractionFn" in dim:
+                        (col, extraction_fn) = DruidDatasource._create_extraction_fn(
+                            dim
+                        )
+                        dim_val = dim["outputName"]
                         f = Filter(
                             dimension=col,
                             value=row[dim_val],
                             extraction_function=extraction_fn,
                         )
                     elif isinstance(dim, dict):
-                        dim_val = dim['outputName']
+                        dim_val = dim["outputName"]
                         if dim_val:
                             f = Dimension(dim_val) == row[dim_val]
                     else:
@@ -964,27 +1018,27 @@ class DruidDatasource(Model, BaseDatasource):
                     if f:
                         fields.append(f)
                 if len(fields) > 1:
-                    term = Filter(type='and', fields=fields)
+                    term = Filter(type="and", fields=fields)
                     new_filters.append(term)
                 elif fields:
                     new_filters.append(fields[0])
             if new_filters:
-                ff = Filter(type='or', fields=new_filters)
+                ff = Filter(type="or", fields=new_filters)
                 if not dim_filter:
                     ret = ff
                 else:
-                    ret = Filter(type='and', fields=[ff, dim_filter])
+                    ret = Filter(type="and", fields=[ff, dim_filter])
         return ret
 
     @staticmethod
     def druid_type_from_adhoc_metric(adhoc_metric):
-        column_type = adhoc_metric['column']['type'].lower()
-        aggregate = adhoc_metric['aggregate'].lower()
+        column_type = adhoc_metric["column"]["type"].lower()
+        aggregate = adhoc_metric["aggregate"].lower()
 
-        if aggregate == 'count':
-            return 'count'
-        if aggregate == 'count_distinct':
-            return 'cardinality'
+        if aggregate == "count":
+            return "count"
+        if aggregate == "count_distinct":
+            return "cardinality"
         else:
             return column_type + aggregate.capitalize()
 
@@ -1011,26 +1065,28 @@ class DruidDatasource(Model, BaseDatasource):
                 invalid_metric_names.append(metric_name)
         if len(invalid_metric_names) > 0:
             raise SupersetException(
-                _('Metric(s) {} must be aggregations.').format(invalid_metric_names))
+                _("Metric(s) {} must be aggregations.").format(invalid_metric_names)
+            )
         for adhoc_metric in adhoc_metrics:
-            aggregations[adhoc_metric['label']] = {
-                'fieldName': adhoc_metric['column']['column_name'],
-                'fieldNames': [adhoc_metric['column']['column_name']],
-                'type': DruidDatasource.druid_type_from_adhoc_metric(adhoc_metric),
-                'name': adhoc_metric['label'],
+            aggregations[adhoc_metric["label"]] = {
+                "fieldName": adhoc_metric["column"]["column_name"],
+                "fieldNames": [adhoc_metric["column"]["column_name"]],
+                "type": DruidDatasource.druid_type_from_adhoc_metric(adhoc_metric),
+                "name": adhoc_metric["label"],
             }
         return aggregations
 
     def check_restricted_metrics(self, aggregations):
         rejected_metrics = [
-            m.metric_name for m in self.metrics
-            if m.is_restricted and
-            m.metric_name in aggregations.keys() and
-            not security_manager.has_access('metric_access', m.perm)
+            m.metric_name
+            for m in self.metrics
+            if m.is_restricted
+            and m.metric_name in aggregations.keys()
+            and not security_manager.has_access("metric_access", m.perm)
         ]
         if rejected_metrics:
             raise MetricPermException(
-                'Access to the metrics denied: ' + ', '.join(rejected_metrics),
+                "Access to the metrics denied: " + ", ".join(rejected_metrics)
             )
 
     def get_dimensions(self, groupby, columns_dict):
@@ -1053,9 +1109,9 @@ class DruidDatasource(Model, BaseDatasource):
         # add tzinfo to native datetime with config
         from_dttm = from_dttm.replace(tzinfo=DRUID_TZ)
         to_dttm = to_dttm.replace(tzinfo=DRUID_TZ)
-        return '{}/{}'.format(
-            from_dttm.isoformat() if from_dttm else '',
-            to_dttm.isoformat() if to_dttm else '',
+        return "{}/{}".format(
+            from_dttm.isoformat() if from_dttm else "",
+            to_dttm.isoformat() if to_dttm else "",
         )
 
     @staticmethod
@@ -1067,10 +1123,10 @@ class DruidDatasource(Model, BaseDatasource):
         values = []
         for dimension in dimensions:
             if isinstance(dimension, dict):
-                if 'extractionFn' in dimension:
+                if "extractionFn" in dimension:
                     values.append(dimension)
-                elif 'dimension' in dimension:
-                    values.append(dimension['dimension'])
+                elif "dimension" in dimension:
+                    values.append(dimension["dimension"])
             else:
                 values.append(dimension)
 
@@ -1083,60 +1139,64 @@ class DruidDatasource(Model, BaseDatasource):
         :param dict metric: The metric to sanitize
         """
         if (
-            utils.is_adhoc_metric(metric) and
-            metric['column']['type'].upper() == 'FLOAT'
+            utils.is_adhoc_metric(metric)
+            and metric["column"]["type"].upper() == "FLOAT"
         ):
-            metric['column']['type'] = 'DOUBLE'
+            metric["column"]["type"] = "DOUBLE"
 
     def run_query(  # noqa / druid
-            self,
-            groupby, metrics,
-            granularity,
-            from_dttm, to_dttm,
-            filter=None,  # noqa
-            is_timeseries=True,
-            timeseries_limit=None,
-            timeseries_limit_metric=None,
-            row_limit=None,
-            inner_from_dttm=None, inner_to_dttm=None,
-            orderby=None,
-            extras=None,  # noqa
-            columns=None, phase=2, client=None,
-            order_desc=True,
-            prequeries=None,
-            is_prequery=False,
-        ):
+        self,
+        groupby,
+        metrics,
+        granularity,
+        from_dttm,
+        to_dttm,
+        filter=None,  # noqa
+        is_timeseries=True,
+        timeseries_limit=None,
+        timeseries_limit_metric=None,
+        row_limit=None,
+        inner_from_dttm=None,
+        inner_to_dttm=None,
+        orderby=None,
+        extras=None,  # noqa
+        columns=None,
+        phase=2,
+        client=None,
+        order_desc=True,
+        prequeries=None,
+        is_prequery=False,
+    ):
         """Runs a query against Druid and returns a dataframe.
         """
         # TODO refactor into using a TBD Query object
         client = client or self.cluster.get_pydruid_client()
-        row_limit = row_limit or conf.get('ROW_LIMIT')
+        row_limit = row_limit or conf.get("ROW_LIMIT")
 
         if not is_timeseries:
-            granularity = 'all'
+            granularity = "all"
 
-        if granularity == 'all':
+        if granularity == "all":
             phase = 1
         inner_from_dttm = inner_from_dttm or from_dttm
         inner_to_dttm = inner_to_dttm or to_dttm
 
         timezone = from_dttm.replace(tzinfo=DRUID_TZ).tzname() if from_dttm else None
 
-        query_str = ''
+        query_str = ""
         metrics_dict = {m.metric_name: m for m in self.metrics}
         columns_dict = {c.column_name: c for c in self.columns}
 
-        if (
-            self.cluster and
-            LooseVersion(self.cluster.get_druid_version()) < LooseVersion('0.11.0')
-        ):
+        if self.cluster and LooseVersion(
+            self.cluster.get_druid_version()
+        ) < LooseVersion("0.11.0"):
             for metric in metrics:
                 self.sanitize_metric_object(metric)
             self.sanitize_metric_object(timeseries_limit_metric)
 
         aggregations, post_aggs = DruidDatasource.metrics_and_post_aggs(
-            metrics,
-            metrics_dict)
+            metrics, metrics_dict
+        )
 
         self.check_restricted_metrics(aggregations)
 
@@ -1148,9 +1208,7 @@ class DruidDatasource(Model, BaseDatasource):
             dimensions=dimensions,
             aggregations=aggregations,
             granularity=DruidDatasource.granularity(
-                granularity,
-                timezone=timezone,
-                origin=extras.get('druid_time_origin'),
+                granularity, timezone=timezone, origin=extras.get("druid_time_origin")
             ),
             post_aggregations=post_aggs,
             intervals=self.intervals_from_dttms(from_dttm, to_dttm),
@@ -1158,97 +1216,91 @@ class DruidDatasource(Model, BaseDatasource):
 
         filters = DruidDatasource.get_filters(filter, self.num_cols, columns_dict)
         if filters:
-            qry['filter'] = filters
+            qry["filter"] = filters
 
-        having_filters = self.get_having_filters(extras.get('having_druid'))
+        having_filters = self.get_having_filters(extras.get("having_druid"))
         if having_filters:
-            qry['having'] = having_filters
+            qry["having"] = having_filters
 
-        order_direction = 'descending' if order_desc else 'ascending'
+        order_direction = "descending" if order_desc else "ascending"
 
         if columns:
-            columns.append('__time')
-            del qry['post_aggregations']
-            del qry['aggregations']
-            qry['dimensions'] = columns
-            qry['metrics'] = []
-            qry['granularity'] = 'all'
-            qry['limit'] = row_limit
+            columns.append("__time")
+            del qry["post_aggregations"]
+            del qry["aggregations"]
+            qry["dimensions"] = columns
+            qry["metrics"] = []
+            qry["granularity"] = "all"
+            qry["limit"] = row_limit
             client.scan(**qry)
         elif len(groupby) == 0 and not having_filters:
-            logging.info('Running timeseries query for no groupby values')
-            del qry['dimensions']
+            logging.info("Running timeseries query for no groupby values")
+            del qry["dimensions"]
             client.timeseries(**qry)
-        elif (
-                not having_filters and
-                len(groupby) == 1 and
-                order_desc
-        ):
-            dim = list(qry.get('dimensions'))[0]
-            logging.info('Running two-phase topn query for dimension [{}]'.format(dim))
+        elif not having_filters and len(groupby) == 1 and order_desc:
+            dim = list(qry.get("dimensions"))[0]
+            logging.info("Running two-phase topn query for dimension [{}]".format(dim))
             pre_qry = deepcopy(qry)
             if timeseries_limit_metric:
                 order_by = utils.get_metric_name(timeseries_limit_metric)
                 aggs_dict, post_aggs_dict = DruidDatasource.metrics_and_post_aggs(
-                    [timeseries_limit_metric],
-                    metrics_dict)
+                    [timeseries_limit_metric], metrics_dict
+                )
                 if phase == 1:
-                    pre_qry['aggregations'].update(aggs_dict)
-                    pre_qry['post_aggregations'].update(post_aggs_dict)
+                    pre_qry["aggregations"].update(aggs_dict)
+                    pre_qry["post_aggregations"].update(post_aggs_dict)
                 else:
-                    pre_qry['aggregations'] = aggs_dict
-                    pre_qry['post_aggregations'] = post_aggs_dict
+                    pre_qry["aggregations"] = aggs_dict
+                    pre_qry["post_aggregations"] = post_aggs_dict
             else:
-                order_by = list(qry['aggregations'].keys())[0]
+                order_by = list(qry["aggregations"].keys())[0]
             # Limit on the number of timeseries, doing a two-phases query
-            pre_qry['granularity'] = 'all'
-            pre_qry['threshold'] = min(row_limit,
-                                       timeseries_limit or row_limit)
-            pre_qry['metric'] = order_by
-            pre_qry['dimension'] = self._dimensions_to_values(qry.get('dimensions'))[0]
-            del pre_qry['dimensions']
+            pre_qry["granularity"] = "all"
+            pre_qry["threshold"] = min(row_limit, timeseries_limit or row_limit)
+            pre_qry["metric"] = order_by
+            pre_qry["dimension"] = self._dimensions_to_values(qry.get("dimensions"))[0]
+            del pre_qry["dimensions"]
 
             client.topn(**pre_qry)
-            logging.info('Phase 1 Complete')
+            logging.info("Phase 1 Complete")
             if phase == 2:
-                query_str += '// Two phase query\n// Phase 1\n'
+                query_str += "// Two phase query\n// Phase 1\n"
             query_str += json.dumps(
-                client.query_builder.last_query.query_dict, indent=2)
-            query_str += '\n'
+                client.query_builder.last_query.query_dict, indent=2
+            )
+            query_str += "\n"
             if phase == 1:
                 return query_str
-            query_str += (
-                "// Phase 2 (built based on phase one's results)\n")
+            query_str += "// Phase 2 (built based on phase one's results)\n"
             df = client.export_pandas()
-            qry['filter'] = self._add_filter_from_pre_query_data(
-                df,
-                [pre_qry['dimension']],
-                filters)
-            qry['threshold'] = timeseries_limit or 1000
-            if row_limit and granularity == 'all':
-                qry['threshold'] = row_limit
-            qry['dimension'] = dim
-            del qry['dimensions']
-            qry['metric'] = list(qry['aggregations'].keys())[0]
+            qry["filter"] = self._add_filter_from_pre_query_data(
+                df, [pre_qry["dimension"]], filters
+            )
+            qry["threshold"] = timeseries_limit or 1000
+            if row_limit and granularity == "all":
+                qry["threshold"] = row_limit
+            qry["dimension"] = dim
+            del qry["dimensions"]
+            qry["metric"] = list(qry["aggregations"].keys())[0]
             client.topn(**qry)
-            logging.info('Phase 2 Complete')
+            logging.info("Phase 2 Complete")
         elif len(groupby) > 0 or having_filters:
             # If grouping on multiple fields or using a having filter
             # we have to force a groupby query
-            logging.info('Running groupby query for dimensions [{}]'.format(dimensions))
+            logging.info("Running groupby query for dimensions [{}]".format(dimensions))
             if timeseries_limit and is_timeseries:
-                logging.info('Running two-phase query for timeseries')
+                logging.info("Running two-phase query for timeseries")
 
                 pre_qry = deepcopy(qry)
-                pre_qry_dims = self._dimensions_to_values(qry['dimensions'])
+                pre_qry_dims = self._dimensions_to_values(qry["dimensions"])
 
                 # Can't use set on an array with dicts
                 # Use set with non-dict items only
                 non_dict_dims = list(
-                    set([x for x in pre_qry_dims if not isinstance(x, dict)]),
+                    set([x for x in pre_qry_dims if not isinstance(x, dict)])
                 )
                 dict_dims = [x for x in pre_qry_dims if isinstance(x, dict)]
-                pre_qry['dimensions'] = non_dict_dims + dict_dims
+                pre_qry["dimensions"] = non_dict_dims + dict_dims
 
                 order_by = None
                 if metrics:
@@ -1259,62 +1311,59 @@ class DruidDatasource(Model, BaseDatasource):
                 if timeseries_limit_metric:
                     order_by = utils.get_metric_name(timeseries_limit_metric)
                     aggs_dict, post_aggs_dict = DruidDatasource.metrics_and_post_aggs(
-                        [timeseries_limit_metric],
-                        metrics_dict)
+                        [timeseries_limit_metric], metrics_dict
+                    )
                     if phase == 1:
-                        pre_qry['aggregations'].update(aggs_dict)
-                        pre_qry['post_aggregations'].update(post_aggs_dict)
+                        pre_qry["aggregations"].update(aggs_dict)
+                        pre_qry["post_aggregations"].update(post_aggs_dict)
                     else:
-                        pre_qry['aggregations'] = aggs_dict
-                        pre_qry['post_aggregations'] = post_aggs_dict
+                        pre_qry["aggregations"] = aggs_dict
+                        pre_qry["post_aggregations"] = post_aggs_dict
 
                 # Limit on the number of timeseries, doing a two-phases query
-                pre_qry['granularity'] = 'all'
-                pre_qry['limit_spec'] = {
-                    'type': 'default',
-                    'limit': min(timeseries_limit, row_limit),
-                    'intervals': self.intervals_from_dttms(
-                        inner_from_dttm, inner_to_dttm),
-                    'columns': [{
-                        'dimension': order_by,
-                        'direction': order_direction,
-                    }],
+                pre_qry["granularity"] = "all"
+                pre_qry["limit_spec"] = {
+                    "type": "default",
+                    "limit": min(timeseries_limit, row_limit),
+                    "intervals": self.intervals_from_dttms(
+                        inner_from_dttm, inner_to_dttm
+                    ),
+                    "columns": [{"dimension": order_by, "direction": order_direction}],
                 }
                 client.groupby(**pre_qry)
-                logging.info('Phase 1 Complete')
-                query_str += '// Two phase query\n// Phase 1\n'
+                logging.info("Phase 1 Complete")
+                query_str += "// Two phase query\n// Phase 1\n"
                 query_str += json.dumps(
-                    client.query_builder.last_query.query_dict, indent=2)
-                query_str += '\n'
+                    client.query_builder.last_query.query_dict, indent=2
+                )
+                query_str += "\n"
                 if phase == 1:
                     return query_str
-                query_str += (
-                    "// Phase 2 (built based on phase one's results)\n")
+                query_str += "// Phase 2 (built based on phase one's results)\n"
                 df = client.export_pandas()
-                qry['filter'] = self._add_filter_from_pre_query_data(
-                    df,
-                    pre_qry['dimensions'],
-                    filters,
+                qry["filter"] = self._add_filter_from_pre_query_data(
+                    df, pre_qry["dimensions"], filters
                 )
-                qry['limit_spec'] = None
+                qry["limit_spec"] = None
             if row_limit:
                 dimension_values = self._dimensions_to_values(dimensions)
-                qry['limit_spec'] = {
-                    'type': 'default',
-                    'limit': row_limit,
-                    'columns': [{
-                        'dimension': (
-                            utils.get_metric_name(
-                                metrics[0],
-                            ) if metrics else dimension_values[0]
-                        ),
-                        'direction': order_direction,
-                    }],
+                qry["limit_spec"] = {
+                    "type": "default",
+                    "limit": row_limit,
+                    "columns": [
+                        {
+                            "dimension": (
+                                utils.get_metric_name(metrics[0])
+                                if metrics
+                                else dimension_values[0]
+                            ),
+                            "direction": order_direction,
+                        }
+                    ],
                 }
             client.groupby(**qry)
-            logging.info('Query Complete')
-        query_str += json.dumps(
-            client.query_builder.last_query.query_dict, indent=2)
+            logging.info("Query Complete")
+        query_str += json.dumps(client.query_builder.last_query.query_dict, indent=2)
         return query_str
 
     @staticmethod
@@ -1329,82 +1378,79 @@ class DruidDatasource(Model, BaseDatasource):
         str instead of an object.
         """
         for col in groupby_cols:
-            df[col] = df[col].fillna('<NULL>').astype('unicode')
+            df[col] = df[col].fillna("<NULL>").astype("unicode")
         return df
 
     def query(self, query_obj):
         qry_start_dttm = datetime.now()
         client = self.cluster.get_pydruid_client()
-        query_str = self.get_query_str(
-            client=client, query_obj=query_obj, phase=2)
+        query_str = self.get_query_str(client=client, query_obj=query_obj, phase=2)
         df = client.export_pandas()
 
         if df is None or df.size == 0:
             return QueryResult(
                 df=pandas.DataFrame([]),
                 query=query_str,
-                duration=datetime.now() - qry_start_dttm)
+                duration=datetime.now() - qry_start_dttm,
+            )
 
-        df = self.homogenize_types(df, query_obj.get('groupby', []))
+        df = self.homogenize_types(df, query_obj.get("groupby", []))
         df.columns = [
-            DTTM_ALIAS if c in ('timestamp', '__time') else c
-            for c in df.columns
+            DTTM_ALIAS if c in ("timestamp", "__time") else c for c in df.columns
         ]
 
-        is_timeseries = query_obj['is_timeseries'] \
-            if 'is_timeseries' in query_obj else True
-        if (
-                not is_timeseries and
-                DTTM_ALIAS in df.columns):
+        is_timeseries = (
+            query_obj["is_timeseries"] if "is_timeseries" in query_obj else True
+        )
+        if not is_timeseries and DTTM_ALIAS in df.columns:
             del df[DTTM_ALIAS]
 
         # Reordering columns
         cols = []
         if DTTM_ALIAS in df.columns:
             cols += [DTTM_ALIAS]
-        cols += query_obj.get('groupby') or []
-        cols += query_obj.get('columns') or []
-        cols += query_obj.get('metrics') or []
+        cols += query_obj.get("groupby") or []
+        cols += query_obj.get("columns") or []
+        cols += query_obj.get("metrics") or []
 
         cols = utils.get_metric_names(cols)
         cols = [col for col in cols if col in df.columns]
         df = df[cols]
 
-        time_offset = DruidDatasource.time_offset(query_obj['granularity'])
+        time_offset = DruidDatasource.time_offset(query_obj["granularity"])
 
         def increment_timestamp(ts):
-            dt = utils.parse_human_datetime(ts).replace(
-                tzinfo=DRUID_TZ)
+            dt = utils.parse_human_datetime(ts).replace(tzinfo=DRUID_TZ)
             return dt + timedelta(milliseconds=time_offset)
+
         if DTTM_ALIAS in df.columns and time_offset:
             df[DTTM_ALIAS] = df[DTTM_ALIAS].apply(increment_timestamp)
 
         return QueryResult(
-            df=df,
-            query=query_str,
-            duration=datetime.now() - qry_start_dttm)
+            df=df, query=query_str, duration=datetime.now() - qry_start_dttm
+        )
 
     @staticmethod
     def _create_extraction_fn(dim_spec):
         extraction_fn = None
-        if dim_spec and 'extractionFn' in dim_spec:
-            col = dim_spec['dimension']
-            fn = dim_spec['extractionFn']
-            ext_type = fn.get('type')
-            if ext_type == 'lookup' and fn['lookup'].get('type') == 'map':
-                replace_missing_values = fn.get('replaceMissingValueWith')
-                retain_missing_values = fn.get('retainMissingValue', False)
-                injective = fn.get('isOneToOne', False)
+        if dim_spec and "extractionFn" in dim_spec:
+            col = dim_spec["dimension"]
+            fn = dim_spec["extractionFn"]
+            ext_type = fn.get("type")
+            if ext_type == "lookup" and fn["lookup"].get("type") == "map":
+                replace_missing_values = fn.get("replaceMissingValueWith")
+                retain_missing_values = fn.get("retainMissingValue", False)
+                injective = fn.get("isOneToOne", False)
                 extraction_fn = MapLookupExtraction(
-                    fn['lookup']['map'],
+                    fn["lookup"]["map"],
                     replace_missing_values=replace_missing_values,
                     retain_missing_values=retain_missing_values,
                     injective=injective,
                 )
-            elif ext_type == 'regex':
-                extraction_fn = RegexExtraction(fn['expr'])
+            elif ext_type == "regex":
+                extraction_fn = RegexExtraction(fn["expr"])
             else:
-                raise Exception(_('Unsupported extraction function: ' + ext_type))
+                raise Exception(_("Unsupported extraction function: " + ext_type))
         return (col, extraction_fn)
 
     @classmethod
@@ -1412,13 +1458,14 @@ class DruidDatasource(Model, BaseDatasource):
         """Given Superset filter data structure, returns pydruid Filter(s)"""
         filters = None
         for flt in raw_filters:
-            col = flt.get('col')
-            op = flt.get('op')
-            eq = flt.get('val')
+            col = flt.get("col")
+            op = flt.get("op")
+            eq = flt.get("val")
             if (
-                    not col or
-                    not op or
-                    (eq is None and op not in ('IS NULL', 'IS NOT NULL'))):
+                not col
+                or not op
+                or (eq is None and op not in ("IS NULL", "IS NOT NULL"))
+            ):
                 continue
 
             # Check if this dimension uses an extraction function
@@ -1426,23 +1473,29 @@ class DruidDatasource(Model, BaseDatasource):
             column_def = columns_dict.get(col)
             dim_spec = column_def.dimension_spec if column_def else None
             extraction_fn = None
-            if dim_spec and 'extractionFn' in dim_spec:
+            if dim_spec and "extractionFn" in dim_spec:
                 (col, extraction_fn) = DruidDatasource._create_extraction_fn(dim_spec)
 
             cond = None
             is_numeric_col = col in num_cols
-            is_list_target = op in ('in', 'not in')
+            is_list_target = op in ("in", "not in")
             eq = cls.filter_values_handler(
-                eq, is_list_target=is_list_target,
-                target_column_is_numeric=is_numeric_col)
+                eq,
+                is_list_target=is_list_target,
+                target_column_is_numeric=is_numeric_col,
+            )
 
             # For these two ops, could have used Dimension,
             # but it doesn't support extraction functions
-            if op == '==':
-                cond = Filter(dimension=col, value=eq, extraction_function=extraction_fn)
-            elif op == '!=':
-                cond = ~Filter(dimension=col, value=eq, extraction_function=extraction_fn)
-            elif op in ('in', 'not in'):
+            if op == "==":
+                cond = Filter(
+                    dimension=col, value=eq, extraction_function=extraction_fn
+                )
+            elif op == "!=":
+                cond = ~Filter(
+                    dimension=col, value=eq, extraction_function=extraction_fn
+                )
+            elif op in ("in", "not in"):
                 fields = []
                 # ignore the filter if it has no value
                 if not len(eq):
@@ -1453,7 +1506,7 @@ class DruidDatasource(Model, BaseDatasource):
                     cond = Filter(
                         dimension=col,
                         values=eq,
-                        type='in',
+                        type="in",
                         extraction_function=extraction_fn,
                     )
                 elif len(eq) == 1:
@@ -1461,22 +1514,22 @@ class DruidDatasource(Model, BaseDatasource):
                 else:
                     for s in eq:
                         fields.append(Dimension(col) == s)
-                    cond = Filter(type='or', fields=fields)
-                if op == 'not in':
+                    cond = Filter(type="or", fields=fields)
+                if op == "not in":
                     cond = ~cond
-            elif op == 'regex':
+            elif op == "regex":
                 cond = Filter(
                     extraction_function=extraction_fn,
-                    type='regex',
+                    type="regex",
                     pattern=eq,
                     dimension=col,
                 )
 
             # For the ops below, could have used pydruid's Bound,
             # but it doesn't support extraction functions
-            elif op == '>=':
+            elif op == ">=":
                 cond = Filter(
-                    type='bound',
+                    type="bound",
                     extraction_function=extraction_fn,
                     dimension=col,
                     lowerStrict=False,
@@ -1485,9 +1538,9 @@ class DruidDatasource(Model, BaseDatasource):
                     upper=None,
                     alphaNumeric=is_numeric_col,
                 )
-            elif op == '<=':
+            elif op == "<=":
                 cond = Filter(
-                    type='bound',
+                    type="bound",
                     extraction_function=extraction_fn,
                     dimension=col,
                     lowerStrict=False,
@@ -1496,9 +1549,9 @@ class DruidDatasource(Model, BaseDatasource):
                     upper=eq,
                     alphaNumeric=is_numeric_col,
                 )
-            elif op == '>':
+            elif op == ">":
                 cond = Filter(
-                    type='bound',
+                    type="bound",
                     extraction_function=extraction_fn,
                     lowerStrict=True,
                     upperStrict=False,
@@ -1507,9 +1560,9 @@ class DruidDatasource(Model, BaseDatasource):
                     upper=None,
                     alphaNumeric=is_numeric_col,
                 )
-            elif op == '<':
+            elif op == "<":
                 cond = Filter(
-                    type='bound',
+                    type="bound",
                     extraction_function=extraction_fn,
                     upperStrict=True,
                     lowerStrict=False,
@@ -1518,16 +1571,13 @@ class DruidDatasource(Model, BaseDatasource):
                     upper=eq,
                     alphaNumeric=is_numeric_col,
                 )
-            elif op == 'IS NULL':
+            elif op == "IS NULL":
                 cond = Dimension(col) == None  # NOQA
-            elif op == 'IS NOT NULL':
+            elif op == "IS NOT NULL":
                 cond = Dimension(col) != None  # NOQA
 
             if filters:
-                filters = Filter(type='and', fields=[
-                    cond,
-                    filters,
-                ])
+                filters = Filter(type="and", fields=[cond, filters])
             else:
                 filters = cond
 
@@ -1535,34 +1585,30 @@ class DruidDatasource(Model, BaseDatasource):
 
     def _get_having_obj(self, col, op, eq):
         cond = None
-        if op == '==':
+        if op == "==":
             if col in self.column_names:
                 cond = DimSelector(dimension=col, value=eq)
             else:
                 cond = Aggregation(col) == eq
-        elif op == '>':
+        elif op == ">":
             cond = Aggregation(col) > eq
-        elif op == '<':
+        elif op == "<":
             cond = Aggregation(col) < eq
 
         return cond
 
     def get_having_filters(self, raw_filters):
         filters = None
-        reversed_op_map = {
-            '!=': '==',
-            '>=': '<',
-            '<=': '>',
-        }
+        reversed_op_map = {"!=": "==", ">=": "<", "<=": ">"}
 
         for flt in raw_filters:
-            if not all(f in flt for f in ['col', 'op', 'val']):
+            if not all(f in flt for f in ["col", "op", "val"]):
                 continue
-            col = flt['col']
-            op = flt['op']
-            eq = flt['val']
+            col = flt["col"]
+            op = flt["op"]
+            eq = flt["val"]
             cond = None
-            if op in ['==', '>', '<']:
+            if op in ["==", ">", "<"]:
                 cond = self._get_having_obj(col, op, eq)
             elif op in reversed_op_map:
                 cond = ~self._get_having_obj(col, reversed_op_map[op], eq)
@@ -1574,8 +1620,7 @@ class DruidDatasource(Model, BaseDatasource):
         return filters
 
     @classmethod
-    def query_datasources_by_name(
-            cls, session, database, datasource_name, schema=None):
+    def query_datasources_by_name(cls, session, database, datasource_name, schema=None):
         return (
             session.query(cls)
             .filter_by(cluster_name=database.id)
@@ -1586,13 +1631,10 @@ class DruidDatasource(Model, BaseDatasource):
     def external_metadata(self):
         self.merge_flag = True
         return [
-            {
-                'name': k,
-                'type': v.get('type'),
-            }
+            {"name": k, "type": v.get("type")}
             for k, v in self.latest_metadata().items()
         ]
 
 
-sa.event.listen(DruidDatasource, 'after_insert', security_manager.set_perm)
-sa.event.listen(DruidDatasource, 'after_update', security_manager.set_perm)
+sa.event.listen(DruidDatasource, "after_insert", security_manager.set_perm)
+sa.event.listen(DruidDatasource, "after_update", security_manager.set_perm)
diff --git a/superset/connectors/druid/views.py b/superset/connectors/druid/views.py
index 18c1aef..2752f2e 100644
--- a/superset/connectors/druid/views.py
+++ b/superset/connectors/druid/views.py
@@ -15,9 +15,14 @@ from superset.connectors.base.views import DatasourceModelView
 from superset.connectors.connector_registry import ConnectorRegistry
 from superset.utils import core as utils
 from superset.views.base import (
-    BaseSupersetView, DatasourceFilter, DeleteMixin,
-    get_datasource_exist_error_msg, ListWidgetWithCheckboxes, SupersetModelView,
-    validate_json, YamlExportMixin,
+    BaseSupersetView,
+    DatasourceFilter,
+    DeleteMixin,
+    get_datasource_exist_error_msg,
+    ListWidgetWithCheckboxes,
+    SupersetModelView,
+    validate_json,
+    YamlExportMixin,
 )
 from . import models
 
@@ -25,47 +30,67 @@ from . import models
 class DruidColumnInlineView(CompactCRUDMixin, SupersetModelView):  # noqa
     datamodel = SQLAInterface(models.DruidColumn)
 
-    list_title = _('List Druid Column')
-    show_title = _('Show Druid Column')
-    add_title = _('Add Druid Column')
-    edit_title = _('Edit Druid Column')
+    list_title = _("List Druid Column")
+    show_title = _("Show Druid Column")
+    add_title = _("Add Druid Column")
+    edit_title = _("Edit Druid Column")
 
     list_widget = ListWidgetWithCheckboxes
 
     edit_columns = [
-        'column_name', 'verbose_name', 'description', 'dimension_spec_json', 'datasource',
-        'groupby', 'filterable', 'count_distinct', 'sum', 'min', 'max']
+        "column_name",
+        "verbose_name",
+        "description",
+        "dimension_spec_json",
+        "datasource",
+        "groupby",
+        "filterable",
+        "count_distinct",
+        "sum",
+        "min",
+        "max",
+    ]
     add_columns = edit_columns
     list_columns = [
-        'column_name', 'verbose_name', 'type', 'groupby', 'filterable', 'count_distinct',
-        'sum', 'min', 'max']
+        "column_name",
+        "verbose_name",
+        "type",
+        "groupby",
+        "filterable",
+        "count_distinct",
+        "sum",
+        "min",
+        "max",
+    ]
     can_delete = False
     page_size = 500
     label_columns = {
-        'column_name': _('Column'),
-        'type': _('Type'),
-        'datasource': _('Datasource'),
-        'groupby': _('Groupable'),
-        'filterable': _('Filterable'),
-        'count_distinct': _('Count Distinct'),
-        'sum': _('Sum'),
-        'min': _('Min'),
-        'max': _('Max'),
-        'verbose_name': _('Verbose Name'),
-        'description': _('Description'),
+        "column_name": _("Column"),
+        "type": _("Type"),
+        "datasource": _("Datasource"),
+        "groupby": _("Groupable"),
+        "filterable": _("Filterable"),
+        "count_distinct": _("Count Distinct"),
+        "sum": _("Sum"),
+        "min": _("Min"),
+        "max": _("Max"),
+        "verbose_name": _("Verbose Name"),
+        "description": _("Description"),
     }
     description_columns = {
-        'filterable': _(
-            'Whether this column is exposed in the `Filters` section '
-            'of the explore view.'),
-        'dimension_spec_json': utils.markdown(
-            'this field can be used to specify  '
-            'a `dimensionSpec` as documented [here]'
-            '(http://druid.io/docs/latest/querying/dimensionspecs.html). '
-            'Make sure to input valid JSON and that the '
-            '`outputName` matches the `column_name` defined '
-            'above.',
-            True),
+        "filterable": _(
+            "Whether this column is exposed in the `Filters` section "
+            "of the explore view."
+        ),
+        "dimension_spec_json": utils.markdown(
+            "this field can be used to specify  "
+            "a `dimensionSpec` as documented [here]"
+            "(http://druid.io/docs/latest/querying/dimensionspecs.html). "
+            "Make sure to input valid JSON and that the "
+            "`outputName` matches the `column_name` defined "
+            "above.",
+            True,
+        ),
     }
 
     def pre_update(self, col):
@@ -75,18 +100,20 @@ class DruidColumnInlineView(CompactCRUDMixin, SupersetModelView):  # noqa
             try:
                 dimension_spec = json.loads(col.dimension_spec_json)
             except ValueError as e:
-                raise ValueError('Invalid Dimension Spec JSON: ' + str(e))
+                raise ValueError("Invalid Dimension Spec JSON: " + str(e))
             if not isinstance(dimension_spec, dict):
-                raise ValueError('Dimension Spec must be a JSON object')
-            if 'outputName' not in dimension_spec:
-                raise ValueError('Dimension Spec does not contain `outputName`')
-            if 'dimension' not in dimension_spec:
-                raise ValueError('Dimension Spec is missing `dimension`')
+                raise ValueError("Dimension Spec must be a JSON object")
+            if "outputName" not in dimension_spec:
+                raise ValueError("Dimension Spec does not contain `outputName`")
+            if "dimension" not in dimension_spec:
+                raise ValueError("Dimension Spec is missing `dimension`")
             # `outputName` should be the same as the `column_name`
-            if dimension_spec['outputName'] != col.column_name:
+            if dimension_spec["outputName"] != col.column_name:
                 raise ValueError(
-                    '`outputName` [{}] unequal to `column_name` [{}]'
-                    .format(dimension_spec['outputName'], col.column_name))
+                    "`outputName` [{}] unequal to `column_name` [{}]".format(
+                        dimension_spec["outputName"], col.column_name
+                    )
+                )
 
     def post_update(self, col):
         col.refresh_metrics()
@@ -101,49 +128,58 @@ appbuilder.add_view_no_menu(DruidColumnInlineView)
 class DruidMetricInlineView(CompactCRUDMixin, SupersetModelView):  # noqa
     datamodel = SQLAInterface(models.DruidMetric)
 
-    list_title = _('List Druid Metric')
-    show_title = _('Show Druid Metric')
-    add_title = _('Add Druid Metric')
-    edit_title = _('Edit Druid Metric')
+    list_title = _("List Druid Metric")
+    show_title = _("Show Druid Metric")
+    add_title = _("Add Druid Metric")
+    edit_title = _("Edit Druid Metric")
 
-    list_columns = ['metric_name', 'verbose_name', 'metric_type']
+    list_columns = ["metric_name", "verbose_name", "metric_type"]
     edit_columns = [
-        'metric_name', 'description', 'verbose_name', 'metric_type', 'json',
-        'datasource', 'd3format', 'is_restricted', 'warning_text']
+        "metric_name",
+        "description",
+        "verbose_name",
+        "metric_type",
+        "json",
+        "datasource",
+        "d3format",
+        "is_restricted",
+        "warning_text",
+    ]
     add_columns = edit_columns
     page_size = 500
-    validators_columns = {
-        'json': [validate_json],
-    }
+    validators_columns = {"json": [validate_json]}
     description_columns = {
-        'metric_type': utils.markdown(
-            'use `postagg` as the metric type if you are defining a '
-            '[Druid Post Aggregation]'
-            '(http://druid.io/docs/latest/querying/post-aggregations.html)',
-            True),
-        'is_restricted': _('Whether access to this metric is restricted '
-                           'to certain roles. Only roles with the permission '
-                           "'metric access on XXX (the name of this metric)' "
-                           'are allowed to access this metric'),
+        "metric_type": utils.markdown(
+            "use `postagg` as the metric type if you are defining a "
+            "[Druid Post Aggregation]"
+            "(http://druid.io/docs/latest/querying/post-aggregations.html)",
+            True,
+        ),
+        "is_restricted": _(
+            "Whether access to this metric is restricted "
+            "to certain roles. Only roles with the permission "
+            "'metric access on XXX (the name of this metric)' "
+            "are allowed to access this metric"
+        ),
     }
     label_columns = {
-        'metric_name': _('Metric'),
-        'description': _('Description'),
-        'verbose_name': _('Verbose Name'),
-        'metric_type': _('Type'),
-        'json': _('JSON'),
-        'datasource': _('Druid Datasource'),
-        'warning_text': _('Warning Message'),
-        'is_restricted': _('Is Restricted'),
+        "metric_name": _("Metric"),
+        "description": _("Description"),
+        "verbose_name": _("Verbose Name"),
+        "metric_type": _("Type"),
+        "json": _("JSON"),
+        "datasource": _("Druid Datasource"),
+        "warning_text": _("Warning Message"),
+        "is_restricted": _("Is Restricted"),
     }
 
     def post_add(self, metric):
         if metric.is_restricted:
-            security_manager.merge_perm('metric_access', metric.get_perm())
+            security_manager.merge_perm("metric_access", metric.get_perm())
 
     def post_update(self, metric):
         if metric.is_restricted:
-            security_manager.merge_perm('metric_access', metric.get_perm())
+            security_manager.merge_perm("metric_access", metric.get_perm())
 
 
 appbuilder.add_view_no_menu(DruidMetricInlineView)
@@ -152,36 +188,41 @@ appbuilder.add_view_no_menu(DruidMetricInlineView)
 class DruidClusterModelView(SupersetModelView, DeleteMixin, YamlExportMixin):  # noqa
     datamodel = SQLAInterface(models.DruidCluster)
 
-    list_title = _('List Druid Cluster')
-    show_title = _('Show Druid Cluster')
-    add_title = _('Add Druid Cluster')
-    edit_title = _('Edit Druid Cluster')
+    list_title = _("List Druid Cluster")
+    show_title = _("Show Druid Cluster")
+    add_title = _("Add Druid Cluster")
+    edit_title = _("Edit Druid Cluster")
 
     add_columns = [
-        'verbose_name', 'broker_host', 'broker_port',
-        'broker_endpoint', 'cache_timeout', 'cluster_name',
+        "verbose_name",
+        "broker_host",
+        "broker_port",
+        "broker_endpoint",
+        "cache_timeout",
+        "cluster_name",
     ]
     edit_columns = add_columns
-    list_columns = ['cluster_name', 'metadata_last_refreshed']
-    search_columns = ('cluster_name',)
+    list_columns = ["cluster_name", "metadata_last_refreshed"]
+    search_columns = ("cluster_name",)
     label_columns = {
-        'cluster_name': _('Cluster'),
-        'broker_host': _('Broker Host'),
-        'broker_port': _('Broker Port'),
-        'broker_endpoint': _('Broker Endpoint'),
-        'verbose_name': _('Verbose Name'),
-        'cache_timeout': _('Cache Timeout'),
-        'metadata_last_refreshed': _('Metadata Last Refreshed'),
+        "cluster_name": _("Cluster"),
+        "broker_host": _("Broker Host"),
+        "broker_port": _("Broker Port"),
+        "broker_endpoint": _("Broker Endpoint"),
+        "verbose_name": _("Verbose Name"),
+        "cache_timeout": _("Cache Timeout"),
+        "metadata_last_refreshed": _("Metadata Last Refreshed"),
     }
     description_columns = {
-        'cache_timeout': _(
-            'Duration (in seconds) of the caching timeout for this cluster. '
-            'A timeout of 0 indicates that the cache never expires. '
-            'Note this defaults to the global timeout if undefined.'),
+        "cache_timeout": _(
+            "Duration (in seconds) of the caching timeout for this cluster. "
+            "A timeout of 0 indicates that the cache never expires. "
+            "Note this defaults to the global timeout if undefined."
+        )
     }
 
     def pre_add(self, cluster):
-        security_manager.merge_perm('database_access', cluster.perm)
+        security_manager.merge_perm("database_access", cluster.perm)
 
     def pre_update(self, cluster):
         self.pre_add(cluster)
@@ -192,107 +233,115 @@ class DruidClusterModelView(SupersetModelView, DeleteMixin, YamlExportMixin):  #
 
 appbuilder.add_view(
     DruidClusterModelView,
-    name='Druid Clusters',
-    label=__('Druid Clusters'),
-    icon='fa-cubes',
-    category='Sources',
-    category_label=__('Sources'),
-    category_icon='fa-database',
+    name="Druid Clusters",
+    label=__("Druid Clusters"),
+    icon="fa-cubes",
+    category="Sources",
+    category_label=__("Sources"),
+    category_icon="fa-database",
 )
 
 
-class DruidDatasourceModelView(DatasourceModelView, DeleteMixin, YamlExportMixin):  # noqa
+class DruidDatasourceModelView(
+    DatasourceModelView, DeleteMixin, YamlExportMixin
+):  # noqa
     datamodel = SQLAInterface(models.DruidDatasource)
 
-    list_title = _('List Druid Datasource')
-    show_title = _('Show Druid Datasource')
-    add_title = _('Add Druid Datasource')
-    edit_title = _('Edit Druid Datasource')
+    list_title = _("List Druid Datasource")
+    show_title = _("Show Druid Datasource")
+    add_title = _("Add Druid Datasource")
+    edit_title = _("Edit Druid Datasource")
 
-    list_columns = [
-        'datasource_link', 'cluster', 'changed_by_', 'modified']
-    order_columns = ['datasource_link', 'modified']
+    list_columns = ["datasource_link", "cluster", "changed_by_", "modified"]
+    order_columns = ["datasource_link", "modified"]
     related_views = [DruidColumnInlineView, DruidMetricInlineView]
     edit_columns = [
-        'datasource_name', 'cluster', 'description', 'owner',
-        'is_hidden',
-        'filter_select_enabled', 'fetch_values_from',
-        'default_endpoint', 'offset', 'cache_timeout']
-    search_columns = (
-        'datasource_name', 'cluster', 'description', 'owner',
-    )
+        "datasource_name",
+        "cluster",
+        "description",
+        "owner",
+        "is_hidden",
+        "filter_select_enabled",
+        "fetch_values_from",
+        "default_endpoint",
+        "offset",
+        "cache_timeout",
+    ]
+    search_columns = ("datasource_name", "cluster", "description", "owner")
     add_columns = edit_columns
-    show_columns = add_columns + ['perm', 'slices']
+    show_columns = add_columns + ["perm", "slices"]
     page_size = 500
-    base_order = ('datasource_name', 'asc')
+    base_order = ("datasource_name", "asc")
     description_columns = {
-        'slices': _(
-            'The list of charts associated with this table. By '
-            'altering this datasource, you may change how these associated '
-            'charts behave. '
-            'Also note that charts need to point to a datasource, so '
-            'this form will fail at saving if removing charts from a '
-            'datasource. If you want to change the datasource for a chart, '
-            "overwrite the chart from the 'explore view'"),
-        'offset': _('Timezone offset (in hours) for this datasource'),
-        'description': Markup(
+        "slices": _(
+            "The list of charts associated with this table. By "
+            "altering this datasource, you may change how these associated "
+            "charts behave. "
+            "Also note that charts need to point to a datasource, so "
+            "this form will fail at saving if removing charts from a "
+            "datasource. If you want to change the datasource for a chart, "
+            "overwrite the chart from the 'explore view'"
+        ),
+        "offset": _("Timezone offset (in hours) for this datasource"),
+        "description": Markup(
             'Supports <a href="'
-            'https://daringfireball.net/projects/markdown/">markdown</a>'),
-        'fetch_values_from': _(
-            'Time expression to use as a predicate when retrieving '
-            'distinct values to populate the filter component. '
-            'Only applies when `Enable Filter Select` is on. If '
-            'you enter `7 days ago`, the distinct list of values in '
-            'the filter will be populated based on the distinct value over '
-            'the past week'),
-        'filter_select_enabled': _(
+            'https://daringfireball.net/projects/markdown/">markdown</a>'
+        ),
+        "fetch_values_from": _(
+            "Time expression to use as a predicate when retrieving "
+            "distinct values to populate the filter component. "
+            "Only applies when `Enable Filter Select` is on. If "
+            "you enter `7 days ago`, the distinct list of values in "
+            "the filter will be populated based on the distinct value over "
+            "the past week"
+        ),
+        "filter_select_enabled": _(
             "Whether to populate the filter's dropdown in the explore "
             "view's filter section with a list of distinct values fetched "
-            'from the backend on the fly'),
-        'default_endpoint': _(
-            'Redirects to this endpoint when clicking on the datasource '
-            'from the datasource list'),
-        'cache_timeout': _(
-            'Duration (in seconds) of the caching timeout for this datasource. '
-            'A timeout of 0 indicates that the cache never expires. '
-            'Note this defaults to the cluster timeout if undefined.'),
+            "from the backend on the fly"
+        ),
+        "default_endpoint": _(
+            "Redirects to this endpoint when clicking on the datasource "
+            "from the datasource list"
+        ),
+        "cache_timeout": _(
+            "Duration (in seconds) of the caching timeout for this datasource. "
+            "A timeout of 0 indicates that the cache never expires. "
+            "Note this defaults to the cluster timeout if undefined."
+        ),
     }
-    base_filters = [['id', DatasourceFilter, lambda: []]]
+    base_filters = [["id", DatasourceFilter, lambda: []]]
     label_columns = {
-        'slices': _('Associated Charts'),
-        'datasource_link': _('Data Source'),
-        'cluster': _('Cluster'),
-        'description': _('Description'),
-        'owner': _('Owner'),
-        'is_hidden': _('Is Hidden'),
-        'filter_select_enabled': _('Enable Filter Select'),
-        'default_endpoint': _('Default Endpoint'),
-        'offset': _('Time Offset'),
-        'cache_timeout': _('Cache Timeout'),
-        'datasource_name': _('Datasource Name'),
-        'fetch_values_from': _('Fetch Values From'),
-        'changed_by_': _('Changed By'),
-        'modified': _('Modified'),
+        "slices": _("Associated Charts"),
+        "datasource_link": _("Data Source"),
+        "cluster": _("Cluster"),
+        "description": _("Description"),
+        "owner": _("Owner"),
+        "is_hidden": _("Is Hidden"),
+        "filter_select_enabled": _("Enable Filter Select"),
+        "default_endpoint": _("Default Endpoint"),
+        "offset": _("Time Offset"),
+        "cache_timeout": _("Cache Timeout"),
+        "datasource_name": _("Datasource Name"),
+        "fetch_values_from": _("Fetch Values From"),
+        "changed_by_": _("Changed By"),
+        "modified": _("Modified"),
     }
 
     def pre_add(self, datasource):
         with db.session.no_autoflush:
-            query = (
-                db.session.query(models.DruidDatasource)
-                .filter(models.DruidDatasource.datasource_name ==
-                        datasource.datasource_name,
-                        models.DruidDatasource.cluster_name ==
-                        datasource.cluster.id)
+            query = db.session.query(models.DruidDatasource).filter(
+                models.DruidDatasource.datasource_name == datasource.datasource_name,
+                models.DruidDatasource.cluster_name == datasource.cluster.id,
             )
             if db.session.query(query.exists()).scalar():
-                raise Exception(get_datasource_exist_error_msg(
-                    datasource.full_name))
+                raise Exception(get_datasource_exist_error_msg(datasource.full_name))
 
     def post_add(self, datasource):
         datasource.refresh_metrics()
-        security_manager.merge_perm('datasource_access', datasource.get_perm())
+        security_manager.merge_perm("datasource_access", datasource.get_perm())
         if datasource.schema:
-            security_manager.merge_perm('schema_access', datasource.schema_perm)
+            security_manager.merge_perm("schema_access", datasource.schema_perm)
 
     def post_update(self, datasource):
         self.post_add(datasource)
@@ -303,22 +352,23 @@ class DruidDatasourceModelView(DatasourceModelView, DeleteMixin, YamlExportMixin
 
 appbuilder.add_view(
     DruidDatasourceModelView,
-    'Druid Datasources',
-    label=__('Druid Datasources'),
-    category='Sources',
-    category_label=__('Sources'),
-    icon='fa-cube')
+    "Druid Datasources",
+    label=__("Druid Datasources"),
+    category="Sources",
+    category_label=__("Sources"),
+    icon="fa-cube",
+)
 
 
 class Druid(BaseSupersetView):
     """The base views for Superset!"""
 
     @has_access
-    @expose('/refresh_datasources/')
+    @expose("/refresh_datasources/")
     def refresh_datasources(self, refreshAll=True):
         """endpoint that refreshes druid datasources metadata"""
         session = db.session()
-        DruidCluster = ConnectorRegistry.sources['druid'].cluster_class
+        DruidCluster = ConnectorRegistry.sources["druid"].cluster_class
         for cluster in session.query(DruidCluster).all():
             cluster_name = cluster.cluster_name
             try:
@@ -326,20 +376,22 @@ class Druid(BaseSupersetView):
             except Exception as e:
                 flash(
                     "Error while processing cluster '{}'\n{}".format(
-                        cluster_name, utils.error_msg_from_exception(e)),
-                    'danger')
+                        cluster_name, utils.error_msg_from_exception(e)
+                    ),
+                    "danger",
+                )
                 logging.exception(e)
-                return redirect('/druidclustermodelview/list/')
+                return redirect("/druidclustermodelview/list/")
             cluster.metadata_last_refreshed = datetime.now()
             flash(
-                _('Refreshed metadata from cluster [{}]').format(
-                    cluster.cluster_name),
-                'info')
+                _("Refreshed metadata from cluster [{}]").format(cluster.cluster_name),
+                "info",
+            )
         session.commit()
-        return redirect('/druiddatasourcemodelview/list/')
+        return redirect("/druiddatasourcemodelview/list/")
 
     @has_access
-    @expose('/scan_new_datasources/')
+    @expose("/scan_new_datasources/")
     def scan_new_datasources(self):
         """
         Calling this endpoint will cause a scan for new
@@ -351,21 +403,23 @@ class Druid(BaseSupersetView):
 appbuilder.add_view_no_menu(Druid)
 
 appbuilder.add_link(
-    'Scan New Datasources',
-    label=__('Scan New Datasources'),
-    href='/druid/scan_new_datasources/',
-    category='Sources',
-    category_label=__('Sources'),
-    category_icon='fa-database',
-    icon='fa-refresh')
+    "Scan New Datasources",
+    label=__("Scan New Datasources"),
+    href="/druid/scan_new_datasources/",
+    category="Sources",
+    category_label=__("Sources"),
+    category_icon="fa-database",
+    icon="fa-refresh",
+)
 appbuilder.add_link(
-    'Refresh Druid Metadata',
-    label=__('Refresh Druid Metadata'),
-    href='/druid/refresh_datasources/',
-    category='Sources',
-    category_label=__('Sources'),
-    category_icon='fa-database',
-    icon='fa-cog')
+    "Refresh Druid Metadata",
+    label=__("Refresh Druid Metadata"),
+    href="/druid/refresh_datasources/",
+    category="Sources",
+    category_label=__("Sources"),
+    category_icon="fa-database",
+    icon="fa-cog",
+)
 
 
-appbuilder.add_separator('Sources')
+appbuilder.add_separator("Sources")
diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py
index cf22add..1bc314e 100644
--- a/superset/connectors/sqla/models.py
+++ b/superset/connectors/sqla/models.py
@@ -8,8 +8,18 @@ from flask_babel import lazy_gettext as _
 import pandas as pd
 import sqlalchemy as sa
 from sqlalchemy import (
-    and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, or_,
-    select, String, Text,
+    and_,
+    asc,
+    Boolean,
+    Column,
+    DateTime,
+    desc,
+    ForeignKey,
+    Integer,
+    or_,
+    select,
+    String,
+    Text,
 )
 from sqlalchemy.exc import CompileError
 from sqlalchemy.orm import backref, relationship
@@ -40,25 +50,21 @@ class AnnotationDatasource(BaseDatasource):
         df = None
         error_message = None
         qry = db.session.query(Annotation)
-        qry = qry.filter(Annotation.layer_id == query_obj['filter'][0]['val'])
-        if query_obj['from_dttm']:
-            qry = qry.filter(Annotation.start_dttm >= query_obj['from_dttm'])
-        if query_obj['to_dttm']:
-            qry = qry.filter(Annotation.end_dttm <= query_obj['to_dttm'])
+        qry = qry.filter(Annotation.layer_id == query_obj["filter"][0]["val"])
+        if query_obj["from_dttm"]:
+            qry = qry.filter(Annotation.start_dttm >= query_obj["from_dttm"])
+        if query_obj["to_dttm"]:
+            qry = qry.filter(Annotation.end_dttm <= query_obj["to_dttm"])
         status = utils.QueryStatus.SUCCESS
         try:
             df = pd.read_sql_query(qry.statement, db.engine)
         except Exception as e:
             status = utils.QueryStatus.FAILED
             logging.exception(e)
-            error_message = (
-                utils.error_msg_from_exception(e))
+            error_message = utils.error_msg_from_exception(e)
         return QueryResult(
-            status=status,
-            df=df,
-            duration=0,
-            query='',
-            error_message=error_message)
+            status=status, df=df, duration=0, query="", error_message=error_message
+        )
 
     def get_query_str(self, query_obj):
         raise NotImplementedError()
@@ -71,32 +77,47 @@ class TableColumn(Model, BaseColumn):
 
     """ORM object for table columns, each table can have multiple columns"""
 
-    __tablename__ = 'table_columns'
-    __table_args__ = (UniqueConstraint('table_id', 'column_name'),)
-    table_id = Column(Integer, ForeignKey('tables.id'))
+    __tablename__ = "table_columns"
+    __table_args__ = (UniqueConstraint("table_id", "column_name"),)
+    table_id = Column(Integer, ForeignKey("tables.id"))
     table = relationship(
-        'SqlaTable',
-        backref=backref('columns', cascade='all, delete-orphan'),
-        foreign_keys=[table_id])
+        "SqlaTable",
+        backref=backref("columns", cascade="all, delete-orphan"),
+        foreign_keys=[table_id],
+    )
     is_dttm = Column(Boolean, default=False)
-    expression = Column(Text, default='')
+    expression = Column(Text, default="")
     python_date_format = Column(String(255))
     database_expression = Column(String(255))
 
     export_fields = (
-        'table_id', 'column_name', 'verbose_name', 'is_dttm', 'is_active',
-        'type', 'groupby', 'count_distinct', 'sum', 'avg', 'max', 'min',
-        'filterable', 'expression', 'description', 'python_date_format',
-        'database_expression',
+        "table_id",
+        "column_name",
+        "verbose_name",
+        "is_dttm",
+        "is_active",
+        "type",
+        "groupby",
+        "count_distinct",
+        "sum",
+        "avg",
+        "max",
+        "min",
+        "filterable",
+        "expression",
+        "description",
+        "python_date_format",
+        "database_expression",
     )
 
-    update_from_object_fields = [
-        s for s in export_fields if s not in ('table_id',)]
-    export_parent = 'table'
+    update_from_object_fields = [s for s in export_fields if s not in ("table_id",)]
+    export_parent = "table"
 
     def get_sqla_col(self, label=None):
         db_engine_spec = self.table.database.db_engine_spec
-        label = db_engine_spec.make_label_compatible(label if label else self.column_name)
+        label = db_engine_spec.make_label_compatible(
+            label if label else self.column_name
+        )
         if not self.expression:
             col = column(self.column_name).label(label)
         else:
@@ -108,7 +129,7 @@ class TableColumn(Model, BaseColumn):
         return self.table
 
     def get_time_filter(self, start_dttm, end_dttm):
-        col = self.get_sqla_col(label='__time')
+        col = self.get_sqla_col(label="__time")
         l = []  # noqa: E741
         if start_dttm:
             l.append(col >= text(self.dttm_sql_literal(start_dttm)))
@@ -119,7 +140,7 @@ class TableColumn(Model, BaseColumn):
     def get_timestamp_expression(self, time_grain):
         """Getting the time component of the query"""
         pdf = self.python_date_format
-        is_epoch = pdf in ('epoch_s', 'epoch_ms')
+        is_epoch = pdf in ("epoch_s", "epoch_ms")
         if not self.expression and not time_grain and not is_epoch:
             return column(self.column_name, type_=DateTime).label(utils.DTTM_ALIAS)
 
@@ -127,9 +148,9 @@ class TableColumn(Model, BaseColumn):
         if is_epoch:
             # if epoch, translate to DATE using db specific conf
             db_spec = self.table.database.db_engine_spec
-            if pdf == 'epoch_s':
+            if pdf == "epoch_s":
                 expr = db_spec.epoch_to_dttm().format(col=expr)
-            elif pdf == 'epoch_ms':
+            elif pdf == "epoch_ms":
                 expr = db_spec.epoch_ms_to_dttm().format(col=expr)
         if time_grain:
             grain = self.table.database.grains_dict().get(time_grain)
@@ -140,9 +161,15 @@ class TableColumn(Model, BaseColumn):
     @classmethod
     def import_obj(cls, i_column):
         def lookup_obj(lookup_column):
-            return db.session.query(TableColumn).filter(
-                TableColumn.table_id == lookup_column.table_id,
-                TableColumn.column_name == lookup_column.column_name).first()
+            return (
+                db.session.query(TableColumn)
+                .filter(
+                    TableColumn.table_id == lookup_column.table_id,
+                    TableColumn.column_name == lookup_column.column_name,
+                )
+                .first()
+            )
+
         return import_datasource.import_simple_obj(db.session, i_column, lookup_obj)
 
     def dttm_sql_literal(self, dttm):
@@ -156,17 +183,16 @@ class TableColumn(Model, BaseColumn):
         """
         tf = self.python_date_format
         if self.database_expression:
-            return self.database_expression.format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
+            return self.database_expression.format(dttm.strftime("%Y-%m-%d %H:%M:%S"))
         elif tf:
-            if tf == 'epoch_s':
+            if tf == "epoch_s":
                 return str((dttm - datetime(1970, 1, 1)).total_seconds())
-            elif tf == 'epoch_ms':
+            elif tf == "epoch_ms":
                 return str((dttm - datetime(1970, 1, 1)).total_seconds() * 1000.0)
             return "'{}'".format(dttm.strftime(tf))
         else:
-            s = self.table.database.db_engine_spec.convert_dttm(
-                self.type or '', dttm)
-            return s or "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S.%f'))
+            s = self.table.database.db_engine_spec.convert_dttm(self.type or "", dttm)
+            return s or "'{}'".format(dttm.strftime("%Y-%m-%d %H:%M:%S.%f"))
 
     def get_metrics(self):
         # TODO deprecate, this is not needed since MetricsControl
@@ -174,35 +200,45 @@ class TableColumn(Model, BaseColumn):
         M = SqlMetric  # noqa
         quoted = self.column_name
         if self.sum:
-            metrics.append(M(
-                metric_name='sum__' + self.column_name,
-                metric_type='sum',
-                expression='SUM({})'.format(quoted),
-            ))
+            metrics.append(
+                M(
+                    metric_name="sum__" + self.column_name,
+                    metric_type="sum",
+                    expression="SUM({})".format(quoted),
+                )
+            )
         if self.avg:
-            metrics.append(M(
-                metric_name='avg__' + self.column_name,
-                metric_type='avg',
-                expression='AVG({})'.format(quoted),
-            ))
+            metrics.append(
+                M(
+                    metric_name="avg__" + self.column_name,
+                    metric_type="avg",
+                    expression="AVG({})".format(quoted),
+                )
+            )
         if self.max:
-            metrics.append(M(
-                metric_name='max__' + self.column_name,
-                metric_type='max',
-                expression='MAX({})'.format(quoted),
-            ))
+            metrics.append(
+                M(
+                    metric_name="max__" + self.column_name,
+                    metric_type="max",
+                    expression="MAX({})".format(quoted),
+                )
+            )
         if self.min:
-            metrics.append(M(
-                metric_name='min__' + self.column_name,
-                metric_type='min',
-                expression='MIN({})'.format(quoted),
-            ))
+            metrics.append(
+                M(
+                    metric_name="min__" + self.column_name,
+                    metric_type="min",
+                    expression="MIN({})".format(quoted),
+                )
+            )
         if self.count_distinct:
-            metrics.append(M(
-                metric_name='count_distinct__' + self.column_name,
-                metric_type='count_distinct',
-                expression='COUNT(DISTINCT {})'.format(quoted),
-            ))
+            metrics.append(
+                M(
+                    metric_name="count_distinct__" + self.column_name,
+                    metric_type="count_distinct",
+                    expression="COUNT(DISTINCT {})".format(quoted),
+                )
+            )
         return {m.metric_name: m for m in metrics}
 
 
@@ -210,33 +246,48 @@ class SqlMetric(Model, BaseMetric):
 
     """ORM object for metrics, each table can have multiple metrics"""
 
-    __tablename__ = 'sql_metrics'
-    __table_args__ = (UniqueConstraint('table_id', 'metric_name'),)
-    table_id = Column(Integer, ForeignKey('tables.id'))
+    __tablename__ = "sql_metrics"
+    __table_args__ = (UniqueConstraint("table_id", "metric_name"),)
+    table_id = Column(Integer, ForeignKey("tables.id"))
     table = relationship(
-        'SqlaTable',
-        backref=backref('metrics', cascade='all, delete-orphan'),
-        foreign_keys=[table_id])
+        "SqlaTable",
+        backref=backref("metrics", cascade="all, delete-orphan"),
+        foreign_keys=[table_id],
+    )
     expression = Column(Text)
 
     export_fields = (
-        'metric_name', 'verbose_name', 'metric_type', 'table_id', 'expression',
-        'description', 'is_restricted', 'd3format', 'warning_text')
-    update_from_object_fields = list([
-        s for s in export_fields if s not in ('table_id', )])
-    export_parent = 'table'
+        "metric_name",
+        "verbose_name",
+        "metric_type",
+        "table_id",
+        "expression",
+        "description",
+        "is_restricted",
+        "d3format",
+        "warning_text",
+    )
+    update_from_object_fields = list(
+        [s for s in export_fields if s not in ("table_id",)]
+    )
+    export_parent = "table"
 
     def get_sqla_col(self, label=None):
         db_engine_spec = self.table.database.db_engine_spec
-        label = db_engine_spec.make_label_compatible(label if label else self.metric_name)
+        label = db_engine_spec.make_label_compatible(
+            label if label else self.metric_name
+        )
         return literal_column(self.expression).label(label)
 
     @property
     def perm(self):
         return (
-            '{parent_name}.[{obj.metric_name}](id:{obj.id})'
-        ).format(obj=self,
-                 parent_name=self.table.full_name) if self.table else None
+            ("{parent_name}.[{obj.metric_name}](id:{obj.id})").format(
+                obj=self, parent_name=self.table.full_name
+            )
+            if self.table
+            else None
+        )
 
     def get_perm(self):
         return self.perm
@@ -244,9 +295,15 @@ class SqlMetric(Model, BaseMetric):
     @classmethod
     def import_obj(cls, i_metric):
         def lookup_obj(lookup_metric):
-            return db.session.query(SqlMetric).filter(
-                SqlMetric.table_id == lookup_metric.table_id,
-                SqlMetric.metric_name == lookup_metric.metric_name).first()
+            return (
+                db.session.query(SqlMetric)
+                .filter(
+                    SqlMetric.table_id == lookup_metric.table_id,
+                    SqlMetric.metric_name == lookup_metric.metric_name,
+                )
+                .first()
+            )
+
         return import_datasource.import_simple_obj(db.session, i_metric, lookup_obj)
 
 
@@ -254,52 +311,62 @@ class SqlaTable(Model, BaseDatasource):
 
     """An ORM object for SqlAlchemy table references"""
 
-    type = 'table'
-    query_language = 'sql'
+    type = "table"
+    query_language = "sql"
     metric_class = SqlMetric
     column_class = TableColumn
 
-    __tablename__ = 'tables'
-    __table_args__ = (UniqueConstraint('database_id', 'table_name'),)
+    __tablename__ = "tables"
+    __table_args__ = (UniqueConstraint("database_id", "table_name"),)
 
     table_name = Column(String(250))
     main_dttm_col = Column(String(250))
-    database_id = Column(Integer, ForeignKey('dbs.id'), nullable=False)
+    database_id = Column(Integer, ForeignKey("dbs.id"), nullable=False)
     fetch_values_predicate = Column(String(1000))
-    user_id = Column(Integer, ForeignKey('ab_user.id'))
+    user_id = Column(Integer, ForeignKey("ab_user.id"))
     owner = relationship(
-        security_manager.user_model,
-        backref='tables',
-        foreign_keys=[user_id])
+        security_manager.user_model, backref="tables", foreign_keys=[user_id]
+    )
     database = relationship(
-        'Database',
-        backref=backref('tables', cascade='all, delete-orphan'),
-        foreign_keys=[database_id])
+        "Database",
+        backref=backref("tables", cascade="all, delete-orphan"),
+        foreign_keys=[database_id],
+    )
     schema = Column(String(255))
     sql = Column(Text)
     is_sqllab_view = Column(Boolean, default=False)
     template_params = Column(Text)
 
-    baselink = 'tablemodelview'
+    baselink = "tablemodelview"
 
     export_fields = (
-        'table_name', 'main_dttm_col', 'description', 'default_endpoint',
-        'database_id', 'offset', 'cache_timeout', 'schema',
-        'sql', 'params', 'template_params', 'filter_select_enabled',
-        'fetch_values_predicate',
+        "table_name",
+        "main_dttm_col",
+        "description",
+        "default_endpoint",
+        "database_id",
+        "offset",
+        "cache_timeout",
+        "schema",
+        "sql",
+        "params",
+        "template_params",
+        "filter_select_enabled",
+        "fetch_values_predicate",
     )
     update_from_object_fields = [
-        f for f in export_fields if f not in ('table_name', 'database_id')]
-    export_parent = 'database'
-    export_children = ['metrics', 'columns']
+        f for f in export_fields if f not in ("table_name", "database_id")
+    ]
+    export_parent = "database"
+    export_children = ["metrics", "columns"]
 
     sqla_aggregations = {
-        'COUNT_DISTINCT': lambda column_name: sa.func.COUNT(sa.distinct(column_name)),
-        'COUNT': sa.func.COUNT,
-        'SUM': sa.func.SUM,
-        'AVG': sa.func.AVG,
-        'MIN': sa.func.MIN,
-        'MAX': sa.func.MAX,
+        "COUNT_DISTINCT": lambda column_name: sa.func.COUNT(sa.distinct(column_name)),
+        "COUNT": sa.func.COUNT,
+        "SUM": sa.func.SUM,
+        "AVG": sa.func.AVG,
+        "MIN": sa.func.MIN,
+        "MAX": sa.func.MAX,
     }
 
     def __repr__(self):
@@ -333,20 +400,19 @@ class SqlaTable(Model, BaseDatasource):
         return security_manager.get_schema_perm(self.database, self.schema)
 
     def get_perm(self):
-        return (
-            '[{obj.database}].[{obj.table_name}]'
-            '(id:{obj.id})').format(obj=self)
+        return ("[{obj.database}].[{obj.table_name}]" "(id:{obj.id})").format(obj=self)
 
     @property
     def name(self):
         if not self.schema:
             return self.table_name
-        return '{}.{}'.format(self.schema, self.table_name)
+        return "{}.{}".format(self.schema, self.table_name)
 
     @property
     def full_name(self):
         return utils.get_datasource_full_name(
-            self.database, self.table_name, schema=self.schema)
+            self.database, self.table_name, schema=self.schema
+        )
 
     @property
     def dttm_cols(self):
@@ -369,31 +435,30 @@ class SqlaTable(Model, BaseDatasource):
     def html(self):
         t = ((c.column_name, c.type) for c in self.columns)
         df = pd.DataFrame(t)
-        df.columns = ['field', 'type']
+        df.columns = ["field", "type"]
         return df.to_html(
             index=False,
-            classes=(
-                'dataframe table table-striped table-bordered '
-                'table-condensed'))
+            classes=("dataframe table table-striped table-bordered " "table-condensed"),
+        )
 
     @property
     def sql_url(self):
-        return self.database.sql_url + '?table_name=' + str(self.table_name)
+        return self.database.sql_url + "?table_name=" + str(self.table_name)
 
     def external_metadata(self):
         cols = self.database.get_columns(self.table_name, schema=self.schema)
         for col in cols:
             try:
-                col['type'] = str(col['type'])
+                col["type"] = str(col["type"])
             except CompileError:
-                col['type'] = 'UNKNOWN'
+                col["type"] = "UNKNOWN"
         return cols
 
     @property
     def time_column_grains(self):
         return {
-            'time_columns': self.dttm_cols,
-            'time_grains': [grain.name for grain in self.database.grains()],
+            "time_columns": self.dttm_cols,
+            "time_grains": [grain.name for grain in self.database.grains()],
         }
 
     @property
@@ -401,7 +466,8 @@ class SqlaTable(Model, BaseDatasource):
         # show_cols and latest_partition set to false to avoid
         # the expensive cost of inspecting the DB
         return self.database.select_star(
-            self.name, show_cols=False, latest_partition=False)
+            self.name, show_cols=False, latest_partition=False
+        )
 
     def get_col(self, col_name):
         columns = self.columns
@@ -412,14 +478,14 @@ class SqlaTable(Model, BaseDatasource):
     @property
     def data(self):
         d = super(SqlaTable, self).data
-        if self.type == 'table':
+        if self.type == "table":
             grains = self.database.grains() or []
             if grains:
                 grains = [(g.duration, g.name) for g in grains]
-            d['granularity_sqla'] = utils.choicify(self.dttm_cols)
-            d['time_grain_sqla'] = grains
-            d['main_dttm_col'] = self.main_dttm_col
-            d['fetch_values_predicate'] = self.fetch_values_predicate
+            d["granularity_sqla"] = utils.choicify(self.dttm_cols)
+            d["time_grain_sqla"] = grains
+            d["main_dttm_col"] = self.main_dttm_col
+            d["fetch_values_predicate"] = self.fetch_values_predicate
         return d
 
     def values_for_column(self, column_name, limit=10000):
@@ -443,9 +509,7 @@ class SqlaTable(Model, BaseDatasource):
             qry = qry.where(tp.process_template(self.fetch_values_predicate))
 
         engine = self.database.get_sqla_engine()
-        sql = '{}'.format(
-            qry.compile(engine, compile_kwargs={'literal_binds': True}),
-        )
+        sql = "{}".format(qry.compile(engine, compile_kwargs={"literal_binds": True}))
         sql = self.mutate_query_from_config(sql)
 
         df = pd.read_sql_query(sql=sql, con=engine)
@@ -455,23 +519,22 @@ class SqlaTable(Model, BaseDatasource):
         """Apply config's SQL_QUERY_MUTATOR
 
         Typically adds comments to the query with context"""
-        SQL_QUERY_MUTATOR = config.get('SQL_QUERY_MUTATOR')
+        SQL_QUERY_MUTATOR = config.get("SQL_QUERY_MUTATOR")
         if SQL_QUERY_MUTATOR:
             username = utils.get_username()
             sql = SQL_QUERY_MUTATOR(sql, username, security_manager, self.database)
         return sql
 
     def get_template_processor(self, **kwargs):
-        return get_template_processor(
-            table=self, database=self.database, **kwargs)
+        return get_template_processor(table=self, database=self.database, **kwargs)
 
     def get_query_str(self, query_obj):
         qry = self.get_sqla_query(**query_obj)
         sql = self.database.compile_sqla_query(qry)
         logging.info(sql)
         sql = sqlparse.format(sql, reindent=True)
-        if query_obj['is_prequery']:
-            query_obj['prequeries'].append(sql)
+        if query_obj["is_prequery"]:
+            query_obj["prequeries"].append(sql)
         sql = self.mutate_query_from_config(sql)
         return sql
 
@@ -488,7 +551,7 @@ class SqlaTable(Model, BaseDatasource):
             if template_processor:
                 from_sql = template_processor.process_template(from_sql)
             from_sql = sqlparse.format(from_sql, strip_comments=True)
-            return TextAsFrom(sa.text(from_sql), []).alias('expr_qry')
+            return TextAsFrom(sa.text(from_sql), []).alias("expr_qry")
         return self.get_sqla_table()
 
     def adhoc_metric_to_sqla(self, metric, cols):
@@ -500,56 +563,58 @@ class SqlaTable(Model, BaseDatasource):
         :returns: The metric defined as a sqlalchemy column
         :rtype: sqlalchemy.sql.column
         """
-        expression_type = metric.get('expressionType')
+        expression_type = metric.get("expressionType")
         db_engine_spec = self.database.db_engine_spec
-        label = db_engine_spec.make_label_compatible(metric.get('label'))
+        label = db_engine_spec.make_label_compatible(metric.get("label"))
 
-        if expression_type == utils.ADHOC_METRIC_EXPRESSION_TYPES['SIMPLE']:
-            column_name = metric.get('column').get('column_name')
+        if expression_type == utils.ADHOC_METRIC_EXPRESSION_TYPES["SIMPLE"]:
+            column_name = metric.get("column").get("column_name")
             sqla_column = column(column_name)
             table_column = cols.get(column_name)
 
             if table_column:
                 sqla_column = table_column.get_sqla_col()
 
-            sqla_metric = self.sqla_aggregations[metric.get('aggregate')](sqla_column)
+            sqla_metric = self.sqla_aggregations[metric.get("aggregate")](sqla_column)
             sqla_metric = sqla_metric.label(label)
             return sqla_metric
-        elif expression_type == utils.ADHOC_METRIC_EXPRESSION_TYPES['SQL']:
-            sqla_metric = literal_column(metric.get('sqlExpression'))
+        elif expression_type == utils.ADHOC_METRIC_EXPRESSION_TYPES["SQL"]:
+            sqla_metric = literal_column(metric.get("sqlExpression"))
             sqla_metric = sqla_metric.label(label)
             return sqla_metric
         else:
             return None
 
     def get_sqla_query(  # sqla
-            self,
-            groupby, metrics,
-            granularity,
-            from_dttm, to_dttm,
-            filter=None,  # noqa
-            is_timeseries=True,
-            timeseries_limit=15,
-            timeseries_limit_metric=None,
-            row_limit=None,
-            inner_from_dttm=None,
-            inner_to_dttm=None,
-            orderby=None,
-            extras=None,
-            columns=None,
-            order_desc=True,
-            prequeries=None,
-            is_prequery=False,
-        ):
+        self,
+        groupby,
+        metrics,
+        granularity,
+        from_dttm,
+        to_dttm,
+        filter=None,  # noqa
+        is_timeseries=True,
+        timeseries_limit=15,
+        timeseries_limit_metric=None,
+        row_limit=None,
+        inner_from_dttm=None,
+        inner_to_dttm=None,
+        orderby=None,
+        extras=None,
+        columns=None,
+        order_desc=True,
+        prequeries=None,
+        is_prequery=False,
+    ):
         """Querying any sqla table from this common interface"""
         template_kwargs = {
-            'from_dttm': from_dttm,
-            'groupby': groupby,
-            'metrics': metrics,
-            'row_limit': row_limit,
-            'to_dttm': to_dttm,
-            'filter': filter,
-            'columns': {col.column_name: col for col in self.columns},
+            "from_dttm": from_dttm,
+            "groupby": groupby,
+            "metrics": metrics,
+            "row_limit": row_limit,
+            "to_dttm": to_dttm,
+            "filter": filter,
+            "columns": {col.column_name: col for col in self.columns},
         }
         template_kwargs.update(self.template_params_dict)
         template_processor = self.get_template_processor(**template_kwargs)
@@ -568,11 +633,14 @@ class SqlaTable(Model, BaseDatasource):
         metrics_dict = {m.metric_name: m for m in self.metrics}
 
         if not granularity and is_timeseries:
-            raise Exception(_(
-                'Datetime column not provided as part table configuration '
-                'and is required by this type of chart'))
+            raise Exception(
+                _(
+                    "Datetime column not provided as part table configuration "
+                    "and is required by this type of chart"
+                )
+            )
         if not groupby and not metrics and not columns:
-            raise Exception(_('Empty query?'))
+            raise Exception(_("Empty query?"))
         metrics_exprs = []
         for m in metrics:
             if utils.is_adhoc_metric(m):
@@ -584,8 +652,9 @@ class SqlaTable(Model, BaseDatasource):
         if metrics_exprs:
             main_metric_expr = metrics_exprs[0]
         else:
-            main_metric_expr = literal_column('COUNT(*)').label(
-                db_engine_spec.make_label_compatible('count'))
+            main_metric_expr = literal_column("COUNT(*)").label(
+                db_engine_spec.make_label_compatible("count")
+            )
 
         select_exprs = []
         groupby_exprs = []
@@ -597,7 +666,7 @@ class SqlaTable(Model, BaseDatasource):
             for s in groupby:
                 col = cols[s]
                 outer = col.get_sqla_col()
-                inner = col.get_sqla_col(col.column_name + '__')
+                inner = col.get_sqla_col(col.column_name + "__")
 
                 groupby_exprs.append(outer)
                 select_exprs.append(outer)
@@ -610,7 +679,7 @@ class SqlaTable(Model, BaseDatasource):
 
         if granularity:
             dttm_col = cols[granularity]
-            time_grain = extras.get('time_grain_sqla')
+            time_grain = extras.get("time_grain_sqla")
             time_filters = []
 
             if is_timeseries:
@@ -619,11 +688,14 @@ class SqlaTable(Model, BaseDatasource):
                 groupby_exprs += [timestamp]
 
             # Use main dttm column to support index with secondary dttm columns
-            if db_engine_spec.time_secondary_columns and \
-                    self.main_dttm_col in self.dttm_cols and \
-                    self.main_dttm_col != dttm_col.column_name:
-                time_filters.append(cols[self.main_dttm_col].
-                                    get_time_filter(from_dttm, to_dttm))
+            if (
+                db_engine_spec.time_secondary_columns
+                and self.main_dttm_col in self.dttm_cols
+                and self.main_dttm_col != dttm_col.column_name
+            ):
+                time_filters.append(
+                    cols[self.main_dttm_col].get_time_filter(from_dttm, to_dttm)
+                )
             time_filters.append(dttm_col.get_time_filter(from_dttm, to_dttm))
 
         select_exprs += metrics_exprs
@@ -637,55 +709,55 @@ class SqlaTable(Model, BaseDatasource):
         where_clause_and = []
         having_clause_and = []
         for flt in filter:
-            if not all([flt.get(s) for s in ['col', 'op']]):
+            if not all([flt.get(s) for s in ["col", "op"]]):
                 continue
-            col = flt['col']
-            op = flt['op']
+            col = flt["col"]
+            op = flt["op"]
             col_obj = cols.get(col)
             if col_obj:
-                is_list_target = op in ('in', 'not in')
+                is_list_target = op in ("in", "not in")
                 eq = self.filter_values_handler(
-                    flt.get('val'),
+                    flt.get("val"),
                     target_column_is_numeric=col_obj.is_num,
-                    is_list_target=is_list_target)
-                if op in ('in', 'not in'):
+                    is_list_target=is_list_target,
+                )
+                if op in ("in", "not in"):
                     cond = col_obj.get_sqla_col().in_(eq)
-                    if '<NULL>' in eq:
+                    if "<NULL>" in eq:
                         cond = or_(cond, col_obj.get_sqla_col() == None)  # noqa
-                    if op == 'not in':
+                    if op == "not in":
                         cond = ~cond
                     where_clause_and.append(cond)
                 else:
                     if col_obj.is_num:
-                        eq = utils.string_to_num(flt['val'])
-                    if op == '==':
+                        eq = utils.string_to_num(flt["val"])
+                    if op == "==":
                         where_clause_and.append(col_obj.get_sqla_col() == eq)
-                    elif op == '!=':
+                    elif op == "!=":
                         where_clause_and.append(col_obj.get_sqla_col() != eq)
-                    elif op == '>':
+                    elif op == ">":
                         where_clause_and.append(col_obj.get_sqla_col() > eq)
-                    elif op == '<':
+                    elif op == "<":
                         where_clause_and.append(col_obj.get_sqla_col() < eq)
-                    elif op == '>=':
+                    elif op == ">=":
                         where_clause_and.append(col_obj.get_sqla_col() >= eq)
-                    elif op == '<=':
+                    elif op == "<=":
                         where_clause_and.append(col_obj.get_sqla_col() <= eq)
-                    elif op == 'LIKE':
+                    elif op == "LIKE":
                         where_clause_and.append(col_obj.get_sqla_col().like(eq))
-                    elif op == 'IS NULL':
+                    elif op == "IS NULL":
                         where_clause_and.append(col_obj.get_sqla_col() == None)  # noqa
-                    elif op == 'IS NOT NULL':
-                        where_clause_and.append(
-                            col_obj.get_sqla_col() != None)  # noqa
+                    elif op == "IS NOT NULL":
+                        where_clause_and.append(col_obj.get_sqla_col() != None)  # noqa
         if extras:
-            where = extras.get('where')
+            where = extras.get("where")
             if where:
                 where = template_processor.process_template(where)
-                where_clause_and += [sa.text('({})'.format(where))]
-            having = extras.get('having')
+                where_clause_and += [sa.text("({})".format(where))]
+            having = extras.get("having")
             if having:
                 having = template_processor.process_template(having)
-                having_clause_and += [sa.text('({})'.format(having))]
+                having_clause_and += [sa.text("({})".format(having))]
         if granularity:
             qry = qry.where(and_(*(time_filters + where_clause_and)))
         else:
@@ -704,19 +776,17 @@ class SqlaTable(Model, BaseDatasource):
         if row_limit:
             qry = qry.limit(row_limit)
 
-        if is_timeseries and \
-                timeseries_limit and groupby and not time_groupby_inline:
+        if is_timeseries and timeseries_limit and groupby and not time_groupby_inline:
             if self.database.db_engine_spec.inner_joins:
                 # some sql dialects require for order by expressions
                 # to also be in the select clause -- others, e.g. vertica,
                 # require a unique inner alias
-                inner_main_metric_expr = main_metric_expr.label('mme_inner__')
+                inner_main_metric_expr = main_metric_expr.label("mme_inner__")
                 inner_select_exprs += [inner_main_metric_expr]
                 subq = select(inner_select_exprs)
                 subq = subq.select_from(tbl)
                 inner_time_filter = dttm_col.get_time_filter(
-                    inner_from_dttm or from_dttm,
-                    inner_to_dttm or to_dttm,
+                    inner_from_dttm or from_dttm, inner_to_dttm or to_dttm
                 )
                 subq = subq.where(and_(*(where_clause_and + [inner_time_filter])))
                 subq = subq.group_by(*inner_groupby_exprs)
@@ -727,7 +797,7 @@ class SqlaTable(Model, BaseDatasource):
                         ob = self.adhoc_metric_to_sqla(timeseries_limit_metric, cols)
                     elif timeseries_limit_metric in metrics_dict:
                         timeseries_limit_metric = metrics_dict.get(
-                            timeseries_limit_metric,
+                            timeseries_limit_metric
                         )
                         ob = timeseries_limit_metric.get_sqla_col()
                     else:
@@ -738,33 +808,31 @@ class SqlaTable(Model, BaseDatasource):
 
                 on_clause = []
                 for i, gb in enumerate(groupby):
-                    on_clause.append(
-                        groupby_exprs[i] == column(gb + '__'))
+                    on_clause.append(groupby_exprs[i] == column(gb + "__"))
 
                 tbl = tbl.join(subq.alias(), and_(*on_clause))
             else:
                 # run subquery to get top groups
                 subquery_obj = {
-                    'prequeries': prequeries,
-                    'is_prequery': True,
-                    'is_timeseries': False,
-                    'row_limit': timeseries_limit,
-                    'groupby': groupby,
-                    'metrics': metrics,
-                    'granularity': granularity,
-                    'from_dttm': inner_from_dttm or from_dttm,
-                    'to_dttm': inner_to_dttm or to_dttm,
-                    'filter': filter,
-                    'orderby': orderby,
-                    'extras': extras,
-                    'columns': columns,
-                    'order_desc': True,
+                    "prequeries": prequeries,
+                    "is_prequery": True,
+                    "is_timeseries": False,
+                    "row_limit": timeseries_limit,
+                    "groupby": groupby,
+                    "metrics": metrics,
+                    "granularity": granularity,
+                    "from_dttm": inner_from_dttm or from_dttm,
+                    "to_dttm": inner_to_dttm or to_dttm,
+                    "filter": filter,
+                    "orderby": orderby,
+                    "extras": extras,
+                    "columns": columns,
+                    "order_desc": True,
                 }
                 result = self.query(subquery_obj)
                 cols = {col.column_name: col for col in self.columns}
                 dimensions = [
-                    c for c in result.df.columns
-                    if c not in metrics and c in cols
+                    c for c in result.df.columns if c not in metrics and c in cols
                 ]
                 top_groups = self._get_top_groups(result.df, dimensions)
                 qry = qry.where(top_groups)
@@ -794,21 +862,21 @@ class SqlaTable(Model, BaseDatasource):
         except Exception as e:
             status = utils.QueryStatus.FAILED
             logging.exception(e)
-            error_message = (
-                self.database.db_engine_spec.extract_error_message(e))
+            error_message = self.database.db_engine_spec.extract_error_message(e)
 
         # if this is a main query with prequeries, combine them together
-        if not query_obj['is_prequery']:
-            query_obj['prequeries'].append(sql)
-            sql = ';\n\n'.join(query_obj['prequeries'])
-        sql += ';'
+        if not query_obj["is_prequery"]:
+            query_obj["prequeries"].append(sql)
+            sql = ";\n\n".join(query_obj["prequeries"])
+        sql += ";"
 
         return QueryResult(
             status=status,
             df=df,
             duration=datetime.now() - qry_start_dttm,
             query=sql,
-            error_message=error_message)
+            error_message=error_message,
+        )
 
     def get_sqla_table_object(self):
         return self.database.get_table(self.table_name, schema=self.schema)
@@ -819,9 +887,12 @@ class SqlaTable(Model, BaseDatasource):
             table = self.get_sqla_table_object()
         except Exception as e:
             logging.exception(e)
-            raise Exception(_(
-                "Table [{}] doesn't seem to exist in the specified database, "
-                "couldn't fetch column information").format(self.table_name))
+            raise Exception(
+                _(
+                    "Table [{}] doesn't seem to exist in the specified database, "
+                    "couldn't fetch column information"
+                ).format(self.table_name)
+            )
 
         M = SqlMetric  # noqa
         metrics = []
@@ -830,8 +901,8 @@ class SqlaTable(Model, BaseDatasource):
         dbcols = (
             db.session.query(TableColumn)
             .filter(TableColumn.table == self)
-            .filter(or_(TableColumn.column_name == col.name
-                        for col in table.columns)))
+            .filter(or_(TableColumn.column_name == col.name for col in table.columns))
+        )
         dbcols = {dbcol.column_name: dbcol for dbcol in dbcols}
         db_engine_spec = self.database.db_engine_spec
 
@@ -839,9 +910,8 @@ class SqlaTable(Model, BaseDatasource):
             try:
                 datatype = col.type.compile(dialect=db_dialect).upper()
             except Exception as e:
-                datatype = 'UNKNOWN'
-                logging.error(
-                    'Unrecognized data type in {}.{}'.format(table, col.name))
+                datatype = "UNKNOWN"
+                logging.error("Unrecognized data type in {}.{}".format(table, col.name))
                 logging.exception(e)
             dbcol = dbcols.get(col.name, None)
             if not dbcol:
@@ -858,17 +928,20 @@ class SqlaTable(Model, BaseDatasource):
                 any_date_col = col.name
             metrics += dbcol.get_metrics().values()
 
-        metrics.append(M(
-            metric_name='count',
-            verbose_name='COUNT(*)',
-            metric_type='count',
-            expression='COUNT(*)',
-        ))
+        metrics.append(
+            M(
+                metric_name="count",
+                verbose_name="COUNT(*)",
+                metric_type="count",
+                expression="COUNT(*)",
+            )
+        )
         if not self.main_dttm_col:
             self.main_dttm_col = any_date_col
         for metric in metrics:
             metric.metric_name = db_engine_spec.mutate_expression_label(
-                metric.metric_name)
+                metric.metric_name
+            )
         self.add_missing_metrics(metrics)
         db.session.merge(self)
         db.session.commit()
@@ -881,23 +954,32 @@ class SqlaTable(Model, BaseDatasource):
          This function can be used to import/export dashboards between multiple
          superset instances. Audit metadata isn't copies over.
         """
+
         def lookup_sqlatable(table):
-            return db.session.query(SqlaTable).join(Database).filter(
-                SqlaTable.table_name == table.table_name,
-                SqlaTable.schema == table.schema,
-                Database.id == table.database_id,
-            ).first()
+            return (
+                db.session.query(SqlaTable)
+                .join(Database)
+                .filter(
+                    SqlaTable.table_name == table.table_name,
+                    SqlaTable.schema == table.schema,
+                    Database.id == table.database_id,
+                )
+                .first()
+            )
 
         def lookup_database(table):
-            return db.session.query(Database).filter_by(
-                database_name=table.params_dict['database_name']).one()
+            return (
+                db.session.query(Database)
+                .filter_by(database_name=table.params_dict["database_name"])
+                .one()
+            )
+
         return import_datasource.import_datasource(
-            db.session, i_datasource, lookup_database, lookup_sqlatable,
-            import_time)
+            db.session, i_datasource, lookup_database, lookup_sqlatable, import_time
+        )
 
     @classmethod
-    def query_datasources_by_name(
-            cls, session, database, datasource_name, schema=None):
+    def query_datasources_by_name(cls, session, database, datasource_name, schema=None):
         query = (
             session.query(cls)
             .filter_by(database_id=database.id)
@@ -912,5 +994,5 @@ class SqlaTable(Model, BaseDatasource):
         return qry.filter_by(is_sqllab_view=False)
 
 
-sa.event.listen(SqlaTable, 'after_insert', security_manager.set_perm)
-sa.event.listen(SqlaTable, 'after_update', security_manager.set_perm)
+sa.event.listen(SqlaTable, "after_insert", security_manager.set_perm)
+sa.event.listen(SqlaTable, "after_update", security_manager.set_perm)
diff --git a/superset/connectors/sqla/views.py b/superset/connectors/sqla/views.py
index c085958..070a68e 100644
--- a/superset/connectors/sqla/views.py
+++ b/superset/connectors/sqla/views.py
@@ -13,8 +13,12 @@ from superset import appbuilder, db, security_manager
 from superset.connectors.base.views import DatasourceModelView
 from superset.utils import core as utils
 from superset.views.base import (
-    DatasourceFilter, DeleteMixin, get_datasource_exist_error_msg,
-    ListWidgetWithCheckboxes, SupersetModelView, YamlExportMixin,
+    DatasourceFilter,
+    DeleteMixin,
+    get_datasource_exist_error_msg,
+    ListWidgetWithCheckboxes,
+    SupersetModelView,
+    YamlExportMixin,
 )
 from . import models
 
@@ -22,70 +26,94 @@ from . import models
 class TableColumnInlineView(CompactCRUDMixin, SupersetModelView):  # noqa
     datamodel = SQLAInterface(models.TableColumn)
 
-    list_title = _('List Columns')
-    show_title = _('Show Column')
-    add_title = _('Add Column')
-    edit_title = _('Edit Column')
+    list_title = _("List Columns")
+    show_title = _("Show Column")
+    add_title = _("Add Column")
+    edit_title = _("Edit Column")
 
     can_delete = False
     list_widget = ListWidgetWithCheckboxes
     edit_columns = [
-        'column_name', 'verbose_name', 'description',
-        'type', 'groupby', 'filterable',
-        'table', 'expression',
-        'is_dttm', 'python_date_format', 'database_expression']
+        "column_name",
+        "verbose_name",
+        "description",
+        "type",
+        "groupby",
+        "filterable",
+        "table",
+        "expression",
+        "is_dttm",
+        "python_date_format",
+        "database_expression",
+    ]
     add_columns = edit_columns
     list_columns = [
-        'column_name', 'verbose_name', 'type', 'groupby', 'filterable',
-        'is_dttm']
+        "column_name",
+        "verbose_name",
+        "type",
+        "groupby",
+        "filterable",
+        "is_dttm",
+    ]
     page_size = 500
     description_columns = {
-        'is_dttm': _(
-            'Whether to make this column available as a '
-            '[Time Granularity] option, column has to be DATETIME or '
-            'DATETIME-like'),
-        'filterable': _(
-            'Whether this column is exposed in the `Filters` section '
-            'of the explore view.'),
-        'type': _(
-            'The data type that was inferred by the database. '
-            'It may be necessary to input a type manually for '
-            'expression-defined columns in some cases. In most case '
-            'users should not need to alter this.'),
-        'expression': utils.markdown(
-            'a valid, *non-aggregating* SQL expression as supported by the '
-            'underlying backend. Example: `substr(name, 1, 1)`', True),
-        'python_date_format': utils.markdown(Markup(
-            'The pattern of timestamp format, use '
-            '<a href="https://docs.python.org/2/library/'
-            'datetime.html#strftime-strptime-behavior">'
-            'python datetime string pattern</a> '
-            'expression. If time is stored in epoch '
-            'format, put `epoch_s` or `epoch_ms`. Leave `Database Expression` '
-            'below empty if timestamp is stored in '
-            'String or Integer(epoch) type'), True),
-        'database_expression': utils.markdown(
-            'The database expression to cast internal datetime '
-            'constants to database date/timestamp type according to the DBAPI. '
-            'The expression should follow the pattern of '
-            '%Y-%m-%d %H:%M:%S, based on different DBAPI. '
-            'The string should be a python string formatter \n'
+        "is_dttm": _(
+            "Whether to make this column available as a "
+            "[Time Granularity] option, column has to be DATETIME or "
+            "DATETIME-like"
+        ),
+        "filterable": _(
+            "Whether this column is exposed in the `Filters` section "
+            "of the explore view."
+        ),
+        "type": _(
+            "The data type that was inferred by the database. "
+            "It may be necessary to input a type manually for "
+            "expression-defined columns in some cases. In most case "
+            "users should not need to alter this."
+        ),
+        "expression": utils.markdown(
+            "a valid, *non-aggregating* SQL expression as supported by the "
+            "underlying backend. Example: `substr(name, 1, 1)`",
+            True,
+        ),
+        "python_date_format": utils.markdown(
+            Markup(
+                "The pattern of timestamp format, use "
+                '<a href="https://docs.python.org/2/library/'
+                'datetime.html#strftime-strptime-behavior">'
+                "python datetime string pattern</a> "
+                "expression. If time is stored in epoch "
+                "format, put `epoch_s` or `epoch_ms`. Leave `Database Expression` "
+                "below empty if timestamp is stored in "
+                "String or Integer(epoch) type"
+            ),
+            True,
+        ),
+        "database_expression": utils.markdown(
+            "The database expression to cast internal datetime "
+            "constants to database date/timestamp type according to the DBAPI. "
+            "The expression should follow the pattern of "
+            "%Y-%m-%d %H:%M:%S, based on different DBAPI. "
+            "The string should be a python string formatter \n"
             "`Ex: TO_DATE('{}', 'YYYY-MM-DD HH24:MI:SS')` for Oracle "
-            'Superset uses default expression based on DB URI if this '
-            'field is blank.', True),
+            "Superset uses default expression based on DB URI if this "
+            "field is blank.",
+            True,
+        ),
     }
     label_columns = {
-        'column_name': _('Column'),
-        'verbose_name': _('Verbose Name'),
-        'description': _('Description'),
-        'groupby': _('Groupable'),
-        'filterable': _('Filterable'),
-        'table': _('Table'),
-        'expression': _('Expression'),
-        'is_dttm': _('Is temporal'),
-        'python_date_format': _('Datetime Format'),
-        'database_expression': _('Database Expression'),
-        'type': _('Type'),
+        "column_name": _("Column"),
+        "verbose_name": _("Verbose Name"),
+        "description": _("Description"),
+        "groupby": _("Groupable"),
+        "filterable": _("Filterable"),
+        "table": _("Table"),
+        "expression": _("Expression"),
+        "is_dttm": _("Is temporal"),
+        "python_date_format": _("Datetime Format"),
+        "database_expression": _("Database Expression"),
+        "type": _("Type"),
     }
 
 
@@ -95,52 +123,65 @@ appbuilder.add_view_no_menu(TableColumnInlineView)
 class SqlMetricInlineView(CompactCRUDMixin, SupersetModelView):  # noqa
     datamodel = SQLAInterface(models.SqlMetric)
 
-    list_title = _('List Metrics')
-    show_title = _('Show Metric')
-    add_title = _('Add Metric')
-    edit_title = _('Edit Metric')
+    list_title = _("List Metrics")
+    show_title = _("Show Metric")
+    add_title = _("Add Metric")
+    edit_title = _("Edit Metric")
 
-    list_columns = ['metric_name', 'verbose_name', 'metric_type']
+    list_columns = ["metric_name", "verbose_name", "metric_type"]
     edit_columns = [
-        'metric_name', 'description', 'verbose_name', 'metric_type',
-        'expression', 'table', 'd3format', 'is_restricted', 'warning_text']
+        "metric_name",
+        "description",
+        "verbose_name",
+        "metric_type",
+        "expression",
+        "table",
+        "d3format",
+        "is_restricted",
+        "warning_text",
+    ]
     description_columns = {
-        'expression': utils.markdown(
-            'a valid, *aggregating* SQL expression as supported by the '
-            'underlying backend. Example: `count(DISTINCT userid)`', True),
-        'is_restricted': _('Whether access to this metric is restricted '
-                           'to certain roles. Only roles with the permission '
-                           "'metric access on XXX (the name of this metric)' "
-                           'are allowed to access this metric'),
-        'd3format': utils.markdown(
-            'd3 formatting string as defined [here]'
-            '(https://github.com/d3/d3-format/blob/master/README.md#format). '
-            'For instance, this default formatting applies in the Table '
-            'visualization and allow for different metric to use different '
-            'formats', True,
+        "expression": utils.markdown(
+            "a valid, *aggregating* SQL expression as supported by the "
+            "underlying backend. Example: `count(DISTINCT userid)`",
+            True,
+        ),
+        "is_restricted": _(
+            "Whether access to this metric is restricted "
+            "to certain roles. Only roles with the permission "
+            "'metric access on XXX (the name of this metric)' "
+            "are allowed to access this metric"
+        ),
+        "d3format": utils.markdown(
+            "d3 formatting string as defined [here]"
+            "(https://github.com/d3/d3-format/blob/master/README.md#format). "
+            "For instance, this default formatting applies in the Table "
+            "visualization and allow for different metric to use different "
+            "formats",
+            True,
         ),
     }
     add_columns = edit_columns
     page_size = 500
     label_columns = {
-        'metric_name': _('Metric'),
-        'description': _('Description'),
-        'verbose_name': _('Verbose Name'),
-        'metric_type': _('Type'),
-        'expression': _('SQL Expression'),
-        'table': _('Table'),
-        'd3format': _('D3 Format'),
-        'is_restricted': _('Is Restricted'),
-        'warning_text': _('Warning Message'),
+        "metric_name": _("Metric"),
+        "description": _("Description"),
+        "verbose_name": _("Verbose Name"),
+        "metric_type": _("Type"),
+        "expression": _("SQL Expression"),
+        "table": _("Table"),
+        "d3format": _("D3 Format"),
+        "is_restricted": _("Is Restricted"),
+        "warning_text": _("Warning Message"),
     }
 
     def post_add(self, metric):
         if metric.is_restricted:
-            security_manager.merge_perm('metric_access', metric.get_perm())
+            security_manager.merge_perm("metric_access", metric.get_perm())
 
     def post_update(self, metric):
         if metric.is_restricted:
-            security_manager.merge_perm('metric_access', metric.get_perm())
+            security_manager.merge_perm("metric_access", metric.get_perm())
 
 
 appbuilder.add_view_no_menu(SqlMetricInlineView)
@@ -149,96 +190,106 @@ appbuilder.add_view_no_menu(SqlMetricInlineView)
 class TableModelView(DatasourceModelView, DeleteMixin, YamlExportMixin):  # noqa
     datamodel = SQLAInterface(models.SqlaTable)
 
-    list_title = _('List Tables')
-    show_title = _('Show Table')
-    add_title = _('Import a table definition')
-    edit_title = _('Edit Table')
+    list_title = _("List Tables")
+    show_title = _("Show Table")
+    add_title = _("Import a table definition")
+    edit_title = _("Edit Table")
 
-    list_columns = [
-        'link', 'database_name',
-        'changed_by_', 'modified']
-    order_columns = ['modified']
-    add_columns = ['database', 'schema', 'table_name']
+    list_columns = ["link", "database_name", "changed_by_", "modified"]
+    order_columns = ["modified"]
+    add_columns = ["database", "schema", "table_name"]
     edit_columns = [
-        'table_name', 'sql', 'filter_select_enabled',
-        'fetch_values_predicate', 'database', 'schema',
-        'description', 'owner',
-        'main_dttm_col', 'default_endpoint', 'offset', 'cache_timeout',
-        'is_sqllab_view', 'template_params',
+        "table_name",
+        "sql",
+        "filter_select_enabled",
+        "fetch_values_predicate",
+        "database",
+        "schema",
+        "description",
+        "owner",
+        "main_dttm_col",
+        "default_endpoint",
+        "offset",
+        "cache_timeout",
+        "is_sqllab_view",
+        "template_params",
     ]
-    base_filters = [['id', DatasourceFilter, lambda: []]]
-    show_columns = edit_columns + ['perm', 'slices']
+    base_filters = [["id", DatasourceFilter, lambda: []]]
+    show_columns = edit_columns + ["perm", "slices"]
     related_views = [TableColumnInlineView, SqlMetricInlineView]
-    base_order = ('changed_on', 'desc')
-    search_columns = (
-        'database', 'schema', 'table_name', 'owner', 'is_sqllab_view',
-    )
+    base_order = ("changed_on", "desc")
+    search_columns = ("database", "schema", "table_name", "owner", "is_sqllab_view")
     description_columns = {
-        'slices': _(
-            'The list of charts associated with this table. By '
-            'altering this datasource, you may change how these associated '
-            'charts behave. '
-            'Also note that charts need to point to a datasource, so '
-            'this form will fail at saving if removing charts from a '
-            'datasource. If you want to change the datasource for a chart, '
-            "overwrite the chart from the 'explore view'"),
-        'offset': _('Timezone offset (in hours) for this datasource'),
-        'table_name': _(
-            'Name of the table that exists in the source database'),
-        'schema': _(
-            'Schema, as used only in some databases like Postgres, Redshift '
-            'and DB2'),
-        'description': Markup(
+        "slices": _(
+            "The list of charts associated with this table. By "
+            "altering this datasource, you may change how these associated "
+            "charts behave. "
+            "Also note that charts need to point to a datasource, so "
+            "this form will fail at saving if removing charts from a "
+            "datasource. If you want to change the datasource for a chart, "
+            "overwrite the chart from the 'explore view'"
+        ),
+        "offset": _("Timezone offset (in hours) for this datasource"),
+        "table_name": _("Name of the table that exists in the source database"),
+        "schema": _(
+            "Schema, as used only in some databases like Postgres, Redshift " "and DB2"
+        ),
+        "description": Markup(
             'Supports <a href="https://daringfireball.net/projects/markdown/">'
-            'markdown</a>'),
-        'sql': _(
-            'This fields acts a Superset view, meaning that Superset will '
-            'run a query against this string as a subquery.',
+            "markdown</a>"
         ),
-        'fetch_values_predicate': _(
-            'Predicate applied when fetching distinct value to '
-            'populate the filter control component. Supports '
-            'jinja template syntax. Applies only when '
-            '`Enable Filter Select` is on.',
+        "sql": _(
+            "This fields acts a Superset view, meaning that Superset will "
+            "run a query against this string as a subquery."
         ),
-        'default_endpoint': _(
-            'Redirects to this endpoint when clicking on the table '
-            'from the table list'),
-        'filter_select_enabled': _(
+        "fetch_values_predicate": _(
+            "Predicate applied when fetching distinct value to "
+            "populate the filter control component. Supports "
+            "jinja template syntax. Applies only when "
+            "`Enable Filter Select` is on."
+        ),
+        "default_endpoint": _(
+            "Redirects to this endpoint when clicking on the table "
+            "from the table list"
+        ),
+        "filter_select_enabled": _(
             "Whether to populate the filter's dropdown in the explore "
             "view's filter section with a list of distinct values fetched "
-            'from the backend on the fly'),
-        'is_sqllab_view': _(
-            "Whether the table was generated by the 'Visualize' flow "
-            'in SQL Lab'),
-        'template_params': _(
-            'A set of parameters that become available in the query using '
-            'Jinja templating syntax'),
-        'cache_timeout': _(
-            'Duration (in seconds) of the caching timeout for this table. '
-            'A timeout of 0 indicates that the cache never expires. '
-            'Note this defaults to the database timeout if undefined.'),
+            "from the backend on the fly"
+        ),
+        "is_sqllab_view": _(
+            "Whether the table was generated by the 'Visualize' flow " "in SQL Lab"
+        ),
+        "template_params": _(
+            "A set of parameters that become available in the query using "
+            "Jinja templating syntax"
+        ),
+        "cache_timeout": _(
+            "Duration (in seconds) of the caching timeout for this table. "
+            "A timeout of 0 indicates that the cache never expires. "
+            "Note this defaults to the database timeout if undefined."
+        ),
     }
     label_columns = {
-        'slices': _('Associated Charts'),
-        'link': _('Table'),
-        'changed_by_': _('Changed By'),
-        'database': _('Database'),
-        'database_name': _('Database'),
-        'changed_on_': _('Last Changed'),
-        'filter_select_enabled': _('Enable Filter Select'),
-        'schema': _('Schema'),
-        'default_endpoint': _('Default Endpoint'),
-        'offset': _('Offset'),
-        'cache_timeout': _('Cache Timeout'),
-        'table_name': _('Table Name'),
-        'fetch_values_predicate': _('Fetch Values Predicate'),
-        'owner': _('Owner'),
-        'main_dttm_col': _('Main Datetime Column'),
-        'description': _('Description'),
-        'is_sqllab_view': _('SQL Lab View'),
-        'template_params': _('Template parameters'),
-        'modified': _('Modified'),
+        "slices": _("Associated Charts"),
+        "link": _("Table"),
+        "changed_by_": _("Changed By"),
+        "database": _("Database"),
+        "database_name": _("Database"),
+        "changed_on_": _("Last Changed"),
+        "filter_select_enabled": _("Enable Filter Select"),
+        "schema": _("Schema"),
+        "default_endpoint": _("Default Endpoint"),
+        "offset": _("Offset"),
+        "cache_timeout": _("Cache Timeout"),
+        "table_name": _("Table Name"),
+        "fetch_values_predicate": _("Fetch Values Predicate"),
+        "owner": _("Owner"),
+        "main_dttm_col": _("Main Datetime Column"),
+        "description": _("Description"),
+        "is_sqllab_view": _("SQL Lab View"),
+        "template_params": _("Template parameters"),
+        "modified": _("Modified"),
     }
 
     def pre_add(self, table):
@@ -246,33 +297,40 @@ class TableModelView(DatasourceModelView, DeleteMixin, YamlExportMixin):  # noqa
             table_query = db.session.query(models.SqlaTable).filter(
                 models.SqlaTable.table_name == table.table_name,
                 models.SqlaTable.schema == table.schema,
-                models.SqlaTable.database_id == table.database.id)
+                models.SqlaTable.database_id == table.database.id,
+            )
             if db.session.query(table_query.exists()).scalar():
-                raise Exception(
-                    get_datasource_exist_error_msg(table.full_name))
+                raise Exception(get_datasource_exist_error_msg(table.full_name))
 
         # Fail before adding if the table can't be found
         try:
             table.get_sqla_table_object()
         except Exception:
-            raise Exception(_(
-                'Table [{}] could not be found, '
-                'please double check your '
-                'database connection, schema, and '
-                'table name').format(table.name))
+            raise Exception(
+                _(
+                    "Table [{}] could not be found, "
+                    "please double check your "
+                    "database connection, schema, and "
+                    "table name"
+                ).format(table.name)
+            )
 
     def post_add(self, table, flash_message=True):
         table.fetch_metadata()
-        security_manager.merge_perm('datasource_access', table.get_perm())
+        security_manager.merge_perm("datasource_access", table.get_perm())
         if table.schema:
-            security_manager.merge_perm('schema_access', table.schema_perm)
+            security_manager.merge_perm("schema_access", table.schema_perm)
 
         if flash_message:
-            flash(_(
-                'The table was created. '
-                'As part of this two phase configuration '
-                'process, you should now click the edit button by '
-                'the new table to configure it.'), 'info')
+            flash(
+                _(
+                    "The table was created. "
+                    "As part of this two phase configuration "
+                    "process, you should now click the edit button by "
+                    "the new table to configure it."
+                ),
+                "info",
+            )
 
     def post_update(self, table):
         self.post_add(table, flash_message=False)
@@ -280,20 +338,18 @@ class TableModelView(DatasourceModelView, DeleteMixin, YamlExportMixin):  # noqa
     def _delete(self, pk):
         DeleteMixin._delete(self, pk)
 
-    @expose('/edit/<pk>', methods=['GET', 'POST'])
+    @expose("/edit/<pk>", methods=["GET", "POST"])
     @has_access
     def edit(self, pk):
         """Simple hack to redirect to explore view after saving"""
         resp = super(TableModelView, self).edit(pk)
         if isinstance(resp, basestring):
             return resp
-        return redirect('/superset/explore/table/{}/'.format(pk))
+        return redirect("/superset/explore/table/{}/".format(pk))
 
     @action(
-        'refresh',
-        __('Refresh Metadata'),
-        __('Refresh column metadata'),
-        'fa-refresh')
+        "refresh", __("Refresh Metadata"), __("Refresh column metadata"), "fa-refresh"
+    )
     def refresh(self, tables):
         if not isinstance(tables, list):
             tables = [tables]
@@ -308,26 +364,29 @@ class TableModelView(DatasourceModelView, DeleteMixin, YamlExportMixin):  # noqa
 
         if len(successes) > 0:
             success_msg = _(
-                'Metadata refreshed for the following table(s): %(tables)s',
-                tables=', '.join([t.table_name for t in successes]))
-            flash(success_msg, 'info')
+                "Metadata refreshed for the following table(s): %(tables)s",
+                tables=", ".join([t.table_name for t in successes]),
+            )
+            flash(success_msg, "info")
         if len(failures) > 0:
             failure_msg = _(
-                'Unable to retrieve metadata for the following table(s): %(tables)s',
-                tables=', '.join([t.table_name for t in failures]))
-            flash(failure_msg, 'danger')
+                "Unable to retrieve metadata for the following table(s): %(tables)s",
+                tables=", ".join([t.table_name for t in failures]),
+            )
+            flash(failure_msg, "danger")
 
-        return redirect('/tablemodelview/list/')
+        return redirect("/tablemodelview/list/")
 
 
 appbuilder.add_view_no_menu(TableModelView)
 appbuilder.add_link(
-    'Tables',
-    label=__('Tables'),
-    href='/tablemodelview/list/?_flt_1_is_sqllab_view=y',
-    icon='fa-table',
-    category='Sources',
-    category_label=__('Sources'),
-    category_icon='fa-table')
-
-appbuilder.add_separator('Sources')
+    "Tables",
+    label=__("Tables"),
+    href="/tablemodelview/list/?_flt_1_is_sqllab_view=y",
+    icon="fa-table",
+    category="Sources",
+    category_label=__("Sources"),
+    category_icon="fa-table",
+)
+
+appbuilder.add_separator("Sources")
diff --git a/superset/data/bart_lines.py b/superset/data/bart_lines.py
index 8ae8cf4..9e9c5a0 100644
--- a/superset/data/bart_lines.py
+++ b/superset/data/bart_lines.py
@@ -12,29 +12,30 @@ from .helpers import DATA_FOLDER, TBL
 
 
 def load_bart_lines():
-    tbl_name = 'bart_lines'
-    with gzip.open(os.path.join(DATA_FOLDER, 'bart-lines.json.gz')) as f:
-        df = pd.read_json(f, encoding='latin-1')
-        df['path_json'] = df.path.map(json.dumps)
-        df['polyline'] = df.path.map(polyline.encode)
-        del df['path']
+    tbl_name = "bart_lines"
+    with gzip.open(os.path.join(DATA_FOLDER, "bart-lines.json.gz")) as f:
+        df = pd.read_json(f, encoding="latin-1")
+        df["path_json"] = df.path.map(json.dumps)
+        df["polyline"] = df.path.map(polyline.encode)
+        del df["path"]
     df.to_sql(
         tbl_name,
         db.engine,
-        if_exists='replace',
+        if_exists="replace",
         chunksize=500,
         dtype={
-            'color': String(255),
-            'name': String(255),
-            'polyline': Text,
-            'path_json': Text,
+            "color": String(255),
+            "name": String(255),
+            "polyline": Text,
+            "path_json": Text,
         },
-        index=False)
-    print('Creating table {} reference'.format(tbl_name))
+        index=False,
+    )
+    print("Creating table {} reference".format(tbl_name))
     tbl = db.session.query(TBL).filter_by(table_name=tbl_name).first()
     if not tbl:
         tbl = TBL(table_name=tbl_name)
-    tbl.description = 'BART lines'
+    tbl.description = "BART lines"
     tbl.database = get_or_create_main_db()
     db.session.merge(tbl)
     db.session.commit()
diff --git a/superset/data/birth_names.py b/superset/data/birth_names.py
index b697c31..6d6a727 100644
--- a/superset/data/birth_names.py
+++ b/superset/data/birth_names.py
@@ -23,37 +23,40 @@ from .helpers import (
 
 def load_birth_names():
     """Loading birth name dataset from a zip file in the repo"""
-    with gzip.open(os.path.join(DATA_FOLDER, 'birth_names.json.gz')) as f:
+    with gzip.open(os.path.join(DATA_FOLDER, "birth_names.json.gz")) as f:
         pdf = pd.read_json(f)
-    pdf.ds = pd.to_datetime(pdf.ds, unit='ms')
+    pdf.ds = pd.to_datetime(pdf.ds, unit="ms")
     pdf.to_sql(
-        'birth_names',
+        "birth_names",
         db.engine,
-        if_exists='replace',
+        if_exists="replace",
         chunksize=500,
         dtype={
-            'ds': DateTime,
-            'gender': String(16),
-            'state': String(10),
-            'name': String(255),
+            "ds": DateTime,
+            "gender": String(16),
+            "state": String(10),
+            "name": String(255),
         },
-        index=False)
-    print('Done loading table!')
-    print('-' * 80)
+        index=False,
+    )
+    print("Done loading table!")
+    print("-" * 80)
 
-    print('Creating table [birth_names] reference')
-    obj = db.session.query(TBL).filter_by(table_name='birth_names').first()
+    print("Creating table [birth_names] reference")
+    obj = db.session.query(TBL).filter_by(table_name="birth_names").first()
     if not obj:
-        obj = TBL(table_name='birth_names')
-    obj.main_dttm_col = 'ds'
+        obj = TBL(table_name="birth_names")
+    obj.main_dttm_col = "ds"
     obj.database = get_or_create_main_db()
     obj.filter_select_enabled = True
 
-    if not any(col.column_name == 'num_california' for col in obj.columns):
-        obj.columns.append(TableColumn(
-            column_name='num_california',
-            expression="CASE WHEN state = 'CA' THEN num ELSE 0 END",
-        ))
+    if not any(col.column_name == "num_california" for col in obj.columns):
+        obj.columns.append(
+            TableColumn(
+                column_name="num_california",
+                expression="CASE WHEN state = 'CA' THEN num ELSE 0 END",
+            )
+        )
 
     db.session.merge(obj)
     db.session.commit()
@@ -61,147 +64,147 @@ def load_birth_names():
     tbl = obj
 
     defaults = {
-        'compare_lag': '10',
-        'compare_suffix': 'o10Y',
-        'limit': '25',
-        'granularity_sqla': 'ds',
-        'groupby': [],
-        'metric': 'sum__num',
-        'metrics': ['sum__num'],
-        'row_limit': config.get('ROW_LIMIT'),
-        'since': '100 years ago',
-        'until': 'now',
-        'viz_type': 'table',
-        'where': '',
-        'markup_type': 'markdown',
+        "compare_lag": "10",
+        "compare_suffix": "o10Y",
+        "limit": "25",
+        "granularity_sqla": "ds",
+        "groupby": [],
+        "metric": "sum__num",
+        "metrics": ["sum__num"],
+        "row_limit": config.get("ROW_LIMIT"),
+        "since": "100 years ago",
+        "until": "now",
+        "viz_type": "table",
+        "where": "",
+        "markup_type": "markdown",
     }
 
-    print('Creating some slices')
+    print("Creating some slices")
     slices = [
         Slice(
-            slice_name='Girls',
-            viz_type='table',
-            datasource_type='table',
+            slice_name="Girls",
+            viz_type="table",
+            datasource_type="table",
             datasource_id=tbl.id,
             params=get_slice_json(
                 defaults,
-                groupby=['name'],
-                filters=[{
-                    'col': 'gender',
-                    'op': 'in',
-                    'val': ['girl'],
-                }],
+                groupby=["name"],
+                filters=[{"col": "gender", "op": "in", "val": ["girl"]}],
                 row_limit=50,
-                timeseries_limit_metric='sum__num')),
+                timeseries_limit_metric="sum__num",
+            ),
+        ),
         Slice(
-            slice_name='Boys',
-            viz_type='table',
-            datasource_type='table',
+            slice_name="Boys",
+            viz_type="table",
+            datasource_type="table",
             datasource_id=tbl.id,
             params=get_slice_json(
                 defaults,
-                groupby=['name'],
-                filters=[{
-                    'col': 'gender',
-                    'op': 'in',
-                    'val': ['boy'],
-                }],
-                row_limit=50)),
+                groupby=["name"],
+                filters=[{"col": "gender", "op": "in", "val": ["boy"]}],
+                row_limit=50,
+            ),
+        ),
         Slice(
-            slice_name='Participants',
-            viz_type='big_number',
-            datasource_type='table',
+            slice_name="Participants",
+            viz_type="big_number",
+            datasource_type="table",
             datasource_id=tbl.id,
             params=get_slice_json(
                 defaults,
-                viz_type='big_number', granularity_sqla='ds',
-                compare_lag='5', compare_suffix='over 5Y')),
+                viz_type="big_number",
+                granularity_sqla="ds",
+                compare_lag="5",
+                compare_suffix="over 5Y",
+            ),
+        ),
         Slice(
-            slice_name='Genders',
-            viz_type='pie',
-            datasource_type='table',
+            slice_name="Genders",
+            viz_type="pie",
+            datasource_type="table",
             datasource_id=tbl.id,
-            params=get_slice_json(
-                defaults,
-                viz_type='pie', groupby=['gender'])),
+            params=get_slice_json(defaults, viz_type="pie", groupby=["gender"]),
+        ),
         Slice(
-            slice_name='Genders by State',
-            viz_type='dist_bar',
-            datasource_type='table',
+            slice_name="Genders by State",
+            viz_type="dist_bar",
+            datasource_type="table",
             datasource_id=tbl.id,
             params=get_slice_json(
                 defaults,
                 adhoc_filters=[
                     {
-                        'clause': 'WHERE',
-                        'expressionType': 'SIMPLE',
-                        'filterOptionName': '2745eae5',
-                        'comparator': ['other'],
-                        'operator': 'not in',
-                        'subject': 'state',
-                    },
+                        "clause": "WHERE",
+                        "expressionType": "SIMPLE",
+                        "filterOptionName": "2745eae5",
+                        "comparator": ["other"],
+                        "operator": "not in",
+                        "subject": "state",
+                    }
                 ],
-                viz_type='dist_bar',
+                viz_type="dist_bar",
                 metrics=[
                     {
-                        'expressionType': 'SIMPLE',
-                        'column': {
-                            'column_name': 'sum_boys',
-                            'type': 'BIGINT(20)',
-                        },
-                        'aggregate': 'SUM',
-                        'label': 'Boys',
-                        'optionName': 'metric_11',
+                        "expressionType": "SIMPLE",
+                        "column": {"column_name": "sum_boys", "type": "BIGINT(20)"},
+                        "aggregate": "SUM",
+                        "label": "Boys",
+                        "optionName": "metric_11",
                     },
                     {
-                        'expressionType': 'SIMPLE',
-                        'column': {
-                            'column_name': 'sum_girls',
-                            'type': 'BIGINT(20)',
-                        },
-                        'aggregate': 'SUM',
-                        'label': 'Girls',
-                        'optionName': 'metric_12',
+                        "expressionType": "SIMPLE",
+                        "column": {"column_name": "sum_girls", "type": "BIGINT(20)"},
+                        "aggregate": "SUM",
+                        "label": "Girls",
+                        "optionName": "metric_12",
                     },
                 ],
-                groupby=['state'])),
+                groupby=["state"],
+            ),
+        ),
         Slice(
-            slice_name='Trends',
-            viz_type='line',
-            datasource_type='table',
+            slice_name="Trends",
+            viz_type="line",
+            datasource_type="table",
             datasource_id=tbl.id,
             params=get_slice_json(
                 defaults,
-                viz_type='line', groupby=['name'],
-                granularity_sqla='ds', rich_tooltip=True, show_legend=True)),
+                viz_type="line",
+                groupby=["name"],
+                granularity_sqla="ds",
+                rich_tooltip=True,
+                show_legend=True,
+            ),
+        ),
         Slice(
-            slice_name='Average and Sum Trends',
-            viz_type='dual_line',
-            datasource_type='table',
+            slice_name="Average and Sum Trends",
+            viz_type="dual_line",
+            datasource_type="table",
             datasource_id=tbl.id,
             params=get_slice_json(
                 defaults,
-                viz_type='dual_line',
+                viz_type="dual_line",
                 metric={
-                    'expressionType': 'SIMPLE',
-                    'column': {
-                        'column_name': 'num',
-                        'type': 'BIGINT(20)',
-                    },
-                    'aggregate': 'AVG',
-                    'label': 'AVG(num)',
-                    'optionName': 'metric_vgops097wej_g8uff99zhk7',
+                    "expressionType": "SIMPLE",
+                    "column": {"column_name": "num", "type": "BIGINT(20)"},
+                    "aggregate": "AVG",
+                    "label": "AVG(num)",
+                    "optionName": "metric_vgops097wej_g8uff99zhk7",
                 },
-                metric_2='sum__num',
-                granularity_sqla='ds')),
+                metric_2="sum__num",
+                granularity_sqla="ds",
+            ),
+        ),
         Slice(
-            slice_name='Title',
-            viz_type='markup',
-            datasource_type='table',
+            slice_name="Title",
+            viz_type="markup",
+            datasource_type="table",
             datasource_id=tbl.id,
             params=get_slice_json(
                 defaults,
-                viz_type='markup', markup_type='html',
+                viz_type="markup",
+                markup_type="html",
                 code="""\
     <div style='text-align:center'>
         <h1>Birth Names Dashboard</h1>
@@ -211,123 +214,141 @@ def load_birth_names():
         </p>
         <img src='/static/assets/images/babytux.jpg'>
     </div>
-    """)),
+    """,
+            ),
+        ),
         Slice(
-            slice_name='Name Cloud',
-            viz_type='word_cloud',
-            datasource_type='table',
+            slice_name="Name Cloud",
+            viz_type="word_cloud",
+            datasource_type="table",
             datasource_id=tbl.id,
             params=get_slice_json(
                 defaults,
-                viz_type='word_cloud', size_from='10',
-                series='name', size_to='70', rotation='square',
-                limit='100')),
+                viz_type="word_cloud",
+                size_from="10",
+                series="name",
+                size_to="70",
+                rotation="square",
+                limit="100",
+            ),
+        ),
         Slice(
-            slice_name='Pivot Table',
-            viz_type='pivot_table',
-            datasource_type='table',
+            slice_name="Pivot Table",
+            viz_type="pivot_table",
+            datasource_type="table",
             datasource_id=tbl.id,
             params=get_slice_json(
                 defaults,
-                viz_type='pivot_table', metrics=['sum__num'],
-                groupby=['name'], columns=['state'])),
+                viz_type="pivot_table",
+                metrics=["sum__num"],
+                groupby=["name"],
+                columns=["state"],
+            ),
+        ),
         Slice(
-            slice_name='Number of Girls',
-            viz_type='big_number_total',
-            datasource_type='table',
+            slice_name="Number of Girls",
+            viz_type="big_number_total",
+            datasource_type="table",
             datasource_id=tbl.id,
             params=get_slice_json(
                 defaults,
-                viz_type='big_number_total', granularity_sqla='ds',
-                filters=[{
-                    'col': 'gender',
-                    'op': 'in',
-                    'val': ['girl'],
-                }],
-                subheader='total female participants')),
+                viz_type="big_number_total",
+                granularity_sqla="ds",
+                filters=[{"col": "gender", "op": "in", "val": ["girl"]}],
+                subheader="total female participants",
+            ),
+        ),
         Slice(
-            slice_name='Number of California Births',
-            viz_type='big_number_total',
-            datasource_type='table',
+            slice_name="Number of California Births",
+            viz_type="big_number_total",
+            datasource_type="table",
             datasource_id=tbl.id,
             params=get_slice_json(
                 defaults,
                 metric={
-                    'expressionType': 'SIMPLE',
-                    'column': {
-                        'column_name': 'num_california',
-                        'expression': "CASE WHEN state = 'CA' THEN num ELSE 0 END",
+                    "expressionType": "SIMPLE",
+                    "column": {
+                        "column_name": "num_california",
+                        "expression": "CASE WHEN state = 'CA' THEN num ELSE 0 END",
                     },
-                    'aggregate': 'SUM',
-                    'label': 'SUM(num_california)',
+                    "aggregate": "SUM",
+                    "label": "SUM(num_california)",
                 },
-                viz_type='big_number_total',
-                granularity_sqla='ds')),
+                viz_type="big_number_total",
+                granularity_sqla="ds",
+            ),
+        ),
         Slice(
-            slice_name='Top 10 California Names Timeseries',
-            viz_type='line',
-            datasource_type='table',
+            slice_name="Top 10 California Names Timeseries",
+            viz_type="line",
+            datasource_type="table",
             datasource_id=tbl.id,
             params=get_slice_json(
                 defaults,
-                metrics=[{
-                    'expressionType': 'SIMPLE',
-                    'column': {
-                        'column_name': 'num_california',
-                        'expression': "CASE WHEN state = 'CA' THEN num ELSE 0 END",
-                    },
-                    'aggregate': 'SUM',
-                    'label': 'SUM(num_california)',
-                }],
-                viz_type='line',
-                granularity_sqla='ds',
-                groupby=['name'],
+                metrics=[
+                    {
+                        "expressionType": "SIMPLE",
+                        "column": {
+                            "column_name": "num_california",
+                            "expression": "CASE WHEN state = 'CA' THEN num ELSE 0 END",
+                        },
+                        "aggregate": "SUM",
+                        "label": "SUM(num_california)",
+                    }
+                ],
+                viz_type="line",
+                granularity_sqla="ds",
+                groupby=["name"],
                 timeseries_limit_metric={
-                    'expressionType': 'SIMPLE',
-                    'column': {
-                        'column_name': 'num_california',
-                        'expression': "CASE WHEN state = 'CA' THEN num ELSE 0 END",
+                    "expressionType": "SIMPLE",
+                    "column": {
+                        "column_name": "num_california",
+                        "expression": "CASE WHEN state = 'CA' THEN num ELSE 0 END",
                     },
-                    'aggregate': 'SUM',
-                    'label': 'SUM(num_california)',
+                    "aggregate": "SUM",
+                    "label": "SUM(num_california)",
                 },
-                limit='10')),
+                limit="10",
+            ),
+        ),
         Slice(
-            slice_name='Names Sorted by Num in California',
-            viz_type='table',
-            datasource_type='table',
+            slice_name="Names Sorted by Num in California",
+            viz_type="table",
+            datasource_type="table",
             datasource_id=tbl.id,
             params=get_slice_json(
                 defaults,
-                groupby=['name'],
+                groupby=["name"],
                 row_limit=50,
                 timeseries_limit_metric={
-                    'expressionType': 'SIMPLE',
-                    'column': {
-                        'column_name': 'num_california',
-                        'expression': "CASE WHEN state = 'CA' THEN num ELSE 0 END",
+                    "expressionType": "SIMPLE",
+                    "column": {
+                        "column_name": "num_california",
+                        "expression": "CASE WHEN state = 'CA' THEN num ELSE 0 END",
                     },
-                    'aggregate': 'SUM',
-                    'label': 'SUM(num_california)',
-                })),
+                    "aggregate": "SUM",
+                    "label": "SUM(num_california)",
+                },
+            ),
+        ),
         Slice(
-            slice_name='Num Births Trend',
-            viz_type='line',
-            datasource_type='table',
+            slice_name="Num Births Trend",
+            viz_type="line",
+            datasource_type="table",
             datasource_id=tbl.id,
-            params=get_slice_json(
-                defaults,
-                viz_type='line')),
+            params=get_slice_json(defaults, viz_type="line"),
+        ),
     ]
     for slc in slices:
         merge_slice(slc)
 
-    print('Creating a dashboard')
-    dash = db.session.query(Dash).filter_by(dashboard_title='Births').first()
+    print("Creating a dashboard")
+    dash = db.session.query(Dash).filter_by(dashboard_title="Births").first()
 
     if not dash:
         dash = Dash()
-    js = textwrap.dedent("""\
+    js = textwrap.dedent(
+        """\
 {
     "CHART-0dd270f0": {
         "meta": {
@@ -576,13 +597,14 @@ def load_birth_names():
     },
     "DASHBOARD_VERSION_KEY": "v2"
 }
-        """)
+        """
+    )
     pos = json.loads(js)
     # dashboard v2 doesn't allow add markup slice
-    dash.slices = [slc for slc in slices if slc.viz_type != 'markup']
+    dash.slices = [slc for slc in slices if slc.viz_type != "markup"]
     update_slice_ids(pos, dash.slices)
-    dash.dashboard_title = 'Births'
+    dash.dashboard_title = "Births"
     dash.position_json = json.dumps(pos, indent=4)
-    dash.slug = 'births'
+    dash.slug = "births"
     db.session.merge(dash)
     db.session.commit()
diff --git a/superset/data/countries.py b/superset/data/countries.py
index c0dd8d6..0ae62dc 100644
--- a/superset/data/countries.py
+++ b/superset/data/countries.py
@@ -8,7 +8,7 @@ countries = [
         "capital": "Luanda",
         "lat": -12.5,
         "lng": 18.5,
-        "cca3": "AGO"
+        "cca3": "AGO",
     },
     {
         "name": "Algeria",
@@ -18,7 +18,7 @@ countries = [
         "capital": "Algiers",
         "lat": 28,
         "lng": 3,
-        "cca3": "DZA"
+        "cca3": "DZA",
     },
     {
         "name": "Egypt",
@@ -28,7 +28,7 @@ countries = [
         "capital": "Cairo",
         "lat": 27,
         "lng": 30,
-        "cca3": "EGY"
+        "cca3": "EGY",
     },
     {
         "name": "Bangladesh",
@@ -38,7 +38,7 @@ countries = [
         "capital": "Dhaka",
         "lat": 24,
         "lng": 90,
-        "cca3": "BGD"
+        "cca3": "BGD",
     },
     {
         "name": "Niger",
@@ -48,7 +48,7 @@ countries = [
         "capital": "Niamey",
         "lat": 16,
         "lng": 8,
-        "cca3": "NER"
+        "cca3": "NER",
     },
     {
         "name": "Liechtenstein",
@@ -58,7 +58,7 @@ countries = [
         "capital": "Vaduz",
         "lat": 47.26666666,
         "lng": 9.53333333,
-        "cca3": "LIE"
+        "cca3": "LIE",
     },
     {
         "name": "Namibia",
@@ -68,7 +68,7 @@ countries = [
         "capital": "Windhoek",
         "lat": -22,
         "lng": 17,
-        "cca3": "NAM"
+        "cca3": "NAM",
     },
     {
         "name": "Bulgaria",
@@ -78,7 +78,7 @@ countries = [
         "capital": "Sofia",
         "lat": 43,
         "lng": 25,
-        "cca3": "BGR"
+        "cca3": "BGR",
     },
     {
         "name": "Bolivia",
@@ -88,7 +88,7 @@ countries = [
         "capital": "Sucre",
         "lat": -17,
         "lng": -65,
-        "cca3": "BOL"
+        "cca3": "BOL",
     },
     {
         "name": "Ghana",
@@ -98,7 +98,7 @@ countries = [
         "capital": "Accra",
         "lat": 8,
         "lng": -2,
-        "cca3": "GHA"
+        "cca3": "GHA",
     },
     {
         "name": "Cocos (Keeling) Islands",
@@ -108,7 +108,7 @@ countries = [
         "capital": "West Island",
         "lat": -12.5,
         "lng": 96.83333333,
-        "cca3": "CCK"
+        "cca3": "CCK",
     },
     {
         "name": "Pakistan",
@@ -118,7 +118,7 @@ countries = [
         "capital": "Islamabad",
         "lat": 30,
         "lng": 70,
-        "cca3": "PAK"
+        "cca3": "PAK",
     },
     {
         "name": "Cape Verde",
@@ -128,7 +128,7 @@ countries = [
         "capital": "Praia",
         "lat": 16,
         "lng": -24,
-        "cca3": "CPV"
+        "cca3": "CPV",
     },
     {
         "name": "Jordan",
@@ -138,7 +138,7 @@ countries = [
         "capital": "Amman",
         "lat": 31,
         "lng": 36,
-        "cca3": "JOR"
+        "cca3": "JOR",
     },
     {
         "name": "Liberia",
@@ -148,7 +148,7 @@ countries = [
         "capital": "Monrovia",
         "lat": 6.5,
         "lng": -9.5,
-        "cca3": "LBR"
+        "cca3": "LBR",
     },
     {
         "name": "Libya",
@@ -158,7 +158,7 @@ countries = [
         "capital": "Tripoli",
         "lat": 25,
         "lng": 17,
-        "cca3": "LBY"
+        "cca3": "LBY",
     },
     {
         "name": "Malaysia",
@@ -168,7 +168,7 @@ countries = [
         "capital": "Kuala Lumpur",
         "lat": 2.5,
         "lng": 112.5,
-        "cca3": "MYS"
+        "cca3": "MYS",
     },
     {
         "name": "Dominican Republic",
@@ -178,7 +178,7 @@ countries = [
         "capital": "Santo Domingo",
         "lat": 19,
         "lng": -70.66666666,
-        "cca3": "DOM"
+        "cca3": "DOM",
     },
     {
         "name": "Puerto Rico",
@@ -188,7 +188,7 @@ countries = [
         "capital": "San Juan",
         "lat": 18.25,
         "lng": -66.5,
-        "cca3": "PRI"
+        "cca3": "PRI",
     },
     {
         "name": "Mayotte",
@@ -198,7 +198,7 @@ countries = [
         "capital": "Mamoudzou",
         "lat": -12.83333333,
         "lng": 45.16666666,
-        "cca3": "MYT"
+        "cca3": "MYT",
     },
     {
         "name": "North Korea",
@@ -208,7 +208,7 @@ countries = [
         "capital": "Pyongyang",
         "lat": 40,
         "lng": 127,
-        "cca3": "PRK"
+        "cca3": "PRK",
     },
     {
         "name": "Palestine",
@@ -218,7 +218,7 @@ countries = [
         "capital": "Ramallah",
         "lat": 31.9,
         "lng": 35.2,
-        "cca3": "PSE"
+        "cca3": "PSE",
     },
     {
         "name": "Tanzania",
@@ -228,7 +228,7 @@ countries = [
         "capital": "Dodoma",
         "lat": -6,
         "lng": 35,
-        "cca3": "TZA"
+        "cca3": "TZA",
     },
     {
         "name": "Botswana",
@@ -238,7 +238,7 @@ countries = [
         "capital": "Gaborone",
         "lat": -22,
         "lng": 24,
-        "cca3": "BWA"
+        "cca3": "BWA",
     },
     {
         "name": "Cambodia",
@@ -248,7 +248,7 @@ countries = [
         "capital": "Phnom Penh",
         "lat": 13,
         "lng": 105,
-        "cca3": "KHM"
+        "cca3": "KHM",
     },
     {
         "name": "Nicaragua",
@@ -258,7 +258,7 @@ countries = [
         "capital": "Managua",
         "lat": 13,
         "lng": -85,
-        "cca3": "NIC"
+        "cca3": "NIC",
     },
     {
         "name": "Trinidad and Tobago",
@@ -268,7 +268,7 @@ countries = [
         "capital": "Port of Spain",
         "lat": 11,
         "lng": -61,
-        "cca3": "TTO"
+        "cca3": "TTO",
     },
     {
         "name": "Ethiopia",
@@ -278,7 +278,7 @@ countries = [
         "capital": "Addis Ababa",
         "lat": 8,
         "lng": 38,
-        "cca3": "ETH"
+        "cca3": "ETH",
     },
     {
         "name": "Paraguay",
@@ -288,7 +288,7 @@ countries = [
         "capital": "Asuncion",
         "lat": -23,
         "lng": -58,
-        "cca3": "PRY"
+        "cca3": "PRY",
     },
     {
         "name": "Hong Kong",
@@ -298,7 +298,7 @@ countries = [
         "capital": "City of Victoria",
         "lat": 22.267,
         "lng": 114.188,
-        "cca3": "HKG"
+        "cca3": "HKG",
     },
     {
         "name": "Saudi Arabia",
@@ -308,7 +308,7 @@ countries = [
         "capital": "Riyadh",
         "lat": 25,
         "lng": 45,
-        "cca3": "SAU"
+        "cca3": "SAU",
     },
     {
         "name": "Lebanon",
@@ -318,7 +318,7 @@ countries = [
         "capital": "Beirut",
         "lat": 33.83333333,
         "lng": 35.83333333,
-        "cca3": "LBN"
+        "cca3": "LBN",
     },
     {
         "name": "Slovenia",
@@ -328,7 +328,7 @@ countries = [
         "capital": "Ljubljana",
         "lat": 46.11666666,
         "lng": 14.81666666,
-        "cca3": "SVN"
+        "cca3": "SVN",
     },
     {
         "name": "Burkina Faso",
@@ -338,7 +338,7 @@ countries = [
         "capital": "Ouagadougou",
         "lat": 13,
         "lng": -2,
-        "cca3": "BFA"
+        "cca3": "BFA",
     },
     {
         "name": "Switzerland",
@@ -348,7 +348,7 @@ countries = [
         "capital": "Bern",
         "lat": 47,
         "lng": 8,
-        "cca3": "CHE"
+        "cca3": "CHE",
     },
     {
         "name": "Mauritania",
@@ -358,7 +358,7 @@ countries = [
         "capital": "Nouakchott",
         "lat": 20,
         "lng": -12,
-        "cca3": "MRT"
+        "cca3": "MRT",
     },
     {
         "name": "Croatia",
@@ -368,7 +368,7 @@ countries = [
         "capital": "Zagreb",
         "lat": 45.16666666,
         "lng": 15.5,
-        "cca3": "HRV"
+        "cca3": "HRV",
     },
     {
         "name": "Chile",
@@ -378,7 +378,7 @@ countries = [
         "capital": "Santiago",
         "lat": -30,
         "lng": -71,
-        "cca3": "CHL"
+        "cca3": "CHL",
     },
     {
         "name": "China",
@@ -388,7 +388,7 @@ countries = [
         "capital": "Beijing",
         "lat": 35,
         "lng": 105,
-        "cca3": "CHN"
+        "cca3": "CHN",
     },
     {
         "name": "Saint Kitts and Nevis",
@@ -398,7 +398,7 @@ countries = [
         "capital": "Basseterre",
         "lat": 17.33333333,
         "lng": -62.75,
-        "cca3": "KNA"
+        "cca3": "KNA",
     },
     {
         "name": "Sierra Leone",
@@ -408,7 +408,7 @@ countries = [
         "capital": "Freetown",
         "lat": 8.5,
         "lng": -11.5,
-        "cca3": "SLE"
+        "cca3": "SLE",
     },
     {
         "name": "Jamaica",
@@ -418,7 +418,7 @@ countries = [
         "capital": "Kingston",
         "lat": 18.25,
         "lng": -77.5,
-        "cca3": "JAM"
+        "cca3": "JAM",
     },
     {
         "name": "San Marino",
@@ -428,7 +428,7 @@ countries = [
         "capital": "City of San Marino",
         "lat": 43.76666666,
         "lng": 12.41666666,
-        "cca3": "SMR"
+        "cca3": "SMR",
     },
     {
         "name": "Gibraltar",
@@ -438,7 +438,7 @@ countries = [
         "capital": "Gibraltar",
         "lat": 36.13333333,
         "lng": -5.35,
-        "cca3": "GIB"
+        "cca3": "GIB",
     },
     {
         "name": "Djibouti",
@@ -448,7 +448,7 @@ countries = [
         "capital": "Djibouti",
         "lat": 11.5,
         "lng": 43,
-        "cca3": "DJI"
+        "cca3": "DJI",
     },
     {
         "name": "Guinea",
@@ -458,7 +458,7 @@ countries = [
         "capital": "Conakry",
         "lat": 11,
         "lng": -10,
-        "cca3": "GIN"
+        "cca3": "GIN",
     },
     {
         "name": "Finland",
@@ -468,7 +468,7 @@ countries = [
         "capital": "Helsinki",
         "lat": 64,
         "lng": 26,
-        "cca3": "FIN"
+        "cca3": "FIN",
     },
     {
         "name": "Uruguay",
@@ -478,7 +478,7 @@ countries = [
         "capital": "Montevideo",
         "lat": -33,
         "lng": -56,
-        "cca3": "URY"
+        "cca3": "URY",
     },
     {
         "name": "Thailand",
@@ -488,7 +488,7 @@ countries = [
         "capital": "Bangkok",
         "lat": 15,
         "lng": 100,
-        "cca3": "THA"
+        "cca3": "THA",
     },
     {
         "name": "Sao Tome and Principe",
@@ -498,7 +498,7 @@ countries = [
         "capital": "Sao Tome",
         "lat": 1,
         "lng": 7,
-        "cca3": "STP"
+        "cca3": "STP",
     },
     {
         "name": "Seychelles",
@@ -508,7 +508,7 @@ countries = [
         "capital": "Victoria",
         "lat": -4.58333333,
         "lng": 55.66666666,
-        "cca3": "SYC"
+        "cca3": "SYC",
     },
     {
         "name": "Nepal",
@@ -518,7 +518,7 @@ countries = [
         "capital": "Kathmandu",
         "lat": 28,
         "lng": 84,
-        "cca3": "NPL"
+        "cca3": "NPL",
     },
     {
         "name": "Christmas Island",
@@ -528,7 +528,7 @@ countries = [
         "capital": "Flying Fish Cove",
         "lat": -10.5,
         "lng": 105.66666666,
-        "cca3": "CXR"
+        "cca3": "CXR",
     },
     {
         "name": "Laos",
@@ -538,7 +538,7 @@ countries = [
         "capital": "Vientiane",
         "lat": 18,
         "lng": 105,
-        "cca3": "LAO"
+        "cca3": "LAO",
     },
     {
         "name": "Yemen",
@@ -548,7 +548,7 @@ countries = [
         "capital": "Sana'a",
         "lat": 15,
         "lng": 48,
-        "cca3": "YEM"
+        "cca3": "YEM",
     },
     {
         "name": "Bouvet Island",
@@ -558,7 +558,7 @@ countries = [
         "capital": "",
         "lat": -54.43333333,
         "lng": 3.4,
-        "cca3": "BVT"
+        "cca3": "BVT",
     },
     {
         "name": "South Africa",
@@ -568,7 +568,7 @@ countries = [
         "capital": "Pretoria",
         "lat": -29,
         "lng": 24,
-        "cca3": "ZAF"
+        "cca3": "ZAF",
     },
     {
         "name": "Kiribati",
@@ -578,7 +578,7 @@ countries = [
         "capital": "South Tarawa",
         "lat": 1.41666666,
         "lng": 173,
-        "cca3": "KIR"
+        "cca3": "KIR",
     },
     {
         "name": "Philippines",
@@ -588,7 +588,7 @@ countries = [
         "capital": "Manila",
         "lat": 13,
         "lng": 122,
-        "cca3": "PHL"
+        "cca3": "PHL",
     },
     {
         "name": "Sint Maarten",
@@ -598,7 +598,7 @@ countries = [
         "capital": "Philipsburg",
         "lat": 18.033333,
         "lng": -63.05,
-        "cca3": "SXM"
+        "cca3": "SXM",
     },
     {
         "name": "Romania",
@@ -608,7 +608,7 @@ countries = [
         "capital": "Bucharest",
         "lat": 46,
         "lng": 25,
-        "cca3": "ROU"
+        "cca3": "ROU",
     },
     {
         "name": "United States Virgin Islands",
@@ -618,7 +618,7 @@ countries = [
         "capital": "Charlotte Amalie",
         "lat": 18.35,
         "lng": -64.933333,
-        "cca3": "VIR"
+        "cca3": "VIR",
     },
     {
         "name": "Syria",
@@ -628,7 +628,7 @@ countries = [
         "capital": "Damascus",
         "lat": 35,
         "lng": 38,
-        "cca3": "SYR"
+        "cca3": "SYR",
     },
     {
         "name": "Macau",
@@ -638,7 +638,7 @@ countries = [
         "capital": "",
         "lat": 22.16666666,
         "lng": 113.55,
-        "cca3": "MAC"
+        "cca3": "MAC",
     },
     {
         "name": "Saint Martin",
@@ -648,7 +648,7 @@ countries = [
         "capital": "Marigot",
         "lat": 18.08333333,
         "lng": -63.95,
-        "cca3": "MAF"
+        "cca3": "MAF",
     },
     {
         "name": "Malta",
@@ -658,7 +658,7 @@ countries = [
         "capital": "Valletta",
         "lat": 35.83333333,
         "lng": 14.58333333,
-        "cca3": "MLT"
+        "cca3": "MLT",
     },
     {
         "name": "Kazakhstan",
@@ -668,7 +668,7 @@ countries = [
         "capital": "Astana",
         "lat": 48,
         "lng": 68,
-        "cca3": "KAZ"
+        "cca3": "KAZ",
     },
     {
         "name": "Turks and Caicos Islands",
@@ -678,7 +678,7 @@ countries = [
         "capital": "Cockburn Town",
         "lat": 21.75,
         "lng": -71.58333333,
-        "cca3": "TCA"
+        "cca3": "TCA",
     },
     {
         "name": "French Polynesia",
@@ -688,7 +688,7 @@ countries = [
         "capital": "Papeete",
         "lat": -15,
         "lng": -140,
-        "cca3": "PYF"
+        "cca3": "PYF",
     },
     {
         "name": "Niue",
@@ -698,7 +698,7 @@ countries = [
         "capital": "Alofi",
         "lat": -19.03333333,
         "lng": -169.86666666,
-        "cca3": "NIU"
+        "cca3": "NIU",
     },
     {
         "name": "Dominica",
@@ -708,7 +708,7 @@ countries = [
         "capital": "Roseau",
         "lat": 15.41666666,
         "lng": -61.33333333,
-        "cca3": "DMA"
+        "cca3": "DMA",
     },
     {
         "name": "Benin",
@@ -718,7 +718,7 @@ countries = [
         "capital": "Porto-Novo",
         "lat": 9.5,
         "lng": 2.25,
-        "cca3": "BEN"
+        "cca3": "BEN",
     },
     {
         "name": "French Guiana",
@@ -728,7 +728,7 @@ countries = [
         "capital": "Cayenne",
         "lat": 4,
         "lng": -53,
-        "cca3": "GUF"
+        "cca3": "GUF",
     },
     {
         "name": "Belgium",
@@ -738,7 +738,7 @@ countries = [
         "capital": "Brussels",
         "lat": 50.83333333,
         "lng": 4,
-        "cca3": "BEL"
+        "cca3": "BEL",
     },
     {
         "name": "Montserrat",
@@ -748,7 +748,7 @@ countries = [
         "capital": "Plymouth",
         "lat": 16.75,
         "lng": -62.2,
-        "cca3": "MSR"
+        "cca3": "MSR",
     },
     {
         "name": "Togo",
@@ -758,7 +758,7 @@ countries = [
         "capital": "Lome",
         "lat": 8,
         "lng": 1.16666666,
-        "cca3": "TGO"
+        "cca3": "TGO",
     },
     {
         "name": "Germany",
@@ -768,7 +768,7 @@ countries = [
         "capital": "Berlin",
         "lat": 51,
         "lng": 9,
-        "cca3": "DEU"
+        "cca3": "DEU",
     },
     {
         "name": "Guam",
@@ -778,7 +778,7 @@ countries = [
         "capital": "Hagatna",
         "lat": 13.46666666,
         "lng": 144.78333333,
-        "cca3": "GUM"
+        "cca3": "GUM",
     },
     {
         "name": "Sri Lanka",
@@ -788,7 +788,7 @@ countries = [
         "capital": "Colombo",
         "lat": 7,
         "lng": 81,
-        "cca3": "LKA"
+        "cca3": "LKA",
     },
     {
         "name": "South Sudan",
@@ -798,7 +798,7 @@ countries = [
         "capital": "Juba",
         "lat": 7,
         "lng": 30,
-        "cca3": "SSD"
+        "cca3": "SSD",
     },
     {
         "name": "Falkland Islands",
@@ -808,7 +808,7 @@ countries = [
         "capital": "Stanley",
         "lat": -51.75,
         "lng": -59,
-        "cca3": "FLK"
+        "cca3": "FLK",
     },
     {
         "name": "United Kingdom",
@@ -818,7 +818,7 @@ countries = [
         "capital": "London",
         "lat": 54,
         "lng": -2,
-        "cca3": "GBR"
+        "cca3": "GBR",
     },
     {
         "name": "Guyana",
@@ -828,7 +828,7 @@ countries = [
         "capital": "Georgetown",
         "lat": 5,
         "lng": -59,
-        "cca3": "GUY"
+        "cca3": "GUY",
     },
     {
         "name": "Costa Rica",
@@ -838,7 +838,7 @@ countries = [
         "capital": "San Jose",
         "lat": 10,
         "lng": -84,
-        "cca3": "CRI"
+        "cca3": "CRI",
     },
     {
         "name": "Cameroon",
@@ -848,7 +848,7 @@ countries = [
         "capital": "Yaounde",
         "lat": 6,
         "lng": 12,
-        "cca3": "CMR"
+        "cca3": "CMR",
     },
     {
         "name": "Morocco",
@@ -858,7 +858,7 @@ countries = [
         "capital": "Rabat",
         "lat": 32,
         "lng": -5,
-        "cca3": "MAR"
+        "cca3": "MAR",
     },
     {
         "name": "Northern Mariana Islands",
@@ -868,7 +868,7 @@ countries = [
         "capital": "Saipan",
         "lat": 15.2,
         "lng": 145.75,
-        "cca3": "MNP"
+        "cca3": "MNP",
     },
     {
         "name": "Lesotho",
@@ -878,7 +878,7 @@ countries = [
         "capital": "Maseru",
         "lat": -29.5,
         "lng": 28.5,
-        "cca3": "LSO"
+        "cca3": "LSO",
     },
     {
         "name": "Hungary",
@@ -888,7 +888,7 @@ countries = [
         "capital": "Budapest",
         "lat": 47,
         "lng": 20,
-        "cca3": "HUN"
+        "cca3": "HUN",
     },
     {
         "name": "Turkmenistan",
@@ -898,7 +898,7 @@ countries = [
         "capital": "Ashgabat",
         "lat": 40,
         "lng": 60,
-        "cca3": "TKM"
+        "cca3": "TKM",
     },
     {
         "name": "Suriname",
@@ -908,7 +908,7 @@ countries = [
         "capital": "Paramaribo",
         "lat": 4,
         "lng": -56,
-        "cca3": "SUR"
+        "cca3": "SUR",
     },
     {
         "name": "Netherlands",
@@ -918,7 +918,7 @@ countries = [
         "capital": "Amsterdam",
         "lat": 52.5,
         "lng": 5.75,
-        "cca3": "NLD"
+        "cca3": "NLD",
     },
     {
         "name": "Bermuda",
@@ -928,7 +928,7 @@ countries = [
         "capital": "Hamilton",
         "lat": 32.33333333,
         "lng": -64.75,
-        "cca3": "BMU"
+        "cca3": "BMU",
     },
     {
         "name": "Heard Island and McDonald Islands",
@@ -938,7 +938,7 @@ countries = [
         "capital": "",
         "lat": -53.1,
         "lng": 72.51666666,
-        "cca3": "HMD"
+        "cca3": "HMD",
     },
     {
         "name": "Chad",
@@ -948,7 +948,7 @@ countries = [
         "capital": "N'Djamena",
         "lat": 15,
         "lng": 19,
-        "cca3": "TCD"
+        "cca3": "TCD",
     },
     {
         "name": "Georgia",
@@ -958,7 +958,7 @@ countries = [
         "capital": "Tbilisi",
         "lat": 42,
         "lng": 43.5,
-        "cca3": "GEO"
+        "cca3": "GEO",
     },
     {
         "name": "Montenegro",
@@ -968,7 +968,7 @@ countries = [
         "capital": "Podgorica",
         "lat": 42.5,
         "lng": 19.3,
-        "cca3": "MNE"
+        "cca3": "MNE",
     },
     {
         "name": "Mongolia",
@@ -978,7 +978,7 @@ countries = [
         "capital": "Ulan Bator",
         "lat": 46,
         "lng": 105,
-        "cca3": "MNG"
+        "cca3": "MNG",
     },
     {
         "name": "Marshall Islands",
@@ -988,7 +988,7 @@ countries = [
         "capital": "Majuro",
         "lat": 9,
         "lng": 168,
-        "cca3": "MHL"
+        "cca3": "MHL",
     },
     {
         "name": "Martinique",
@@ -998,7 +998,7 @@ countries = [
         "capital": "Fort-de-France",
         "lat": 14.666667,
         "lng": -61,
-        "cca3": "MTQ"
+        "cca3": "MTQ",
     },
     {
         "name": "Belize",
@@ -1008,7 +1008,7 @@ countries = [
         "capital": "Belmopan",
         "lat": 17.25,
         "lng": -88.75,
-        "cca3": "BLZ"
+        "cca3": "BLZ",
     },
     {
         "name": "Norfolk Island",
@@ -1018,7 +1018,7 @@ countries = [
         "capital": "Kingston",
         "lat": -29.03333333,
         "lng": 167.95,
-        "cca3": "NFK"
+        "cca3": "NFK",
     },
     {
         "name": "Myanmar",
@@ -1028,7 +1028,7 @@ countries = [
         "capital": "Naypyidaw",
         "lat": 22,
         "lng": 98,
-        "cca3": "MMR"
+        "cca3": "MMR",
     },
     {
         "name": "Afghanistan",
@@ -1038,7 +1038,7 @@ countries = [
         "capital": "Kabul",
         "lat": 33,
         "lng": 65,
-        "cca3": "AFG"
+        "cca3": "AFG",
     },
     {
         "name": "Burundi",
@@ -1048,7 +1048,7 @@ countries = [
         "capital": "Bujumbura",
         "lat": -3.5,
         "lng": 30,
-        "cca3": "BDI"
+        "cca3": "BDI",
     },
     {
         "name": "British Virgin Islands",
@@ -1058,7 +1058,7 @@ countries = [
         "capital": "Road Town",
         "lat": 18.431383,
         "lng": -64.62305,
-        "cca3": "VGB"
+        "cca3": "VGB",
     },
     {
         "name": "Belarus",
@@ -1068,7 +1068,7 @@ countries = [
         "capital": "Minsk",
         "lat": 53,
         "lng": 28,
-        "cca3": "BLR"
+        "cca3": "BLR",
     },
     {
         "name": "Saint Barthelemy",
@@ -1078,7 +1078,7 @@ countries = [
         "capital": "Gustavia",
         "lat": 18.5,
         "lng": -63.41666666,
-        "cca3": "BLM"
+        "cca3": "BLM",
     },
     {
         "name": "Grenada",
@@ -1088,7 +1088,7 @@ countries = [
         "capital": "St. George's",
         "lat": 12.11666666,
         "lng": -61.66666666,
-        "cca3": "GRD"
+        "cca3": "GRD",
     },
     {
         "name": "Tokelau",
@@ -1098,7 +1098,7 @@ countries = [
         "capital": "Fakaofo",
         "lat": -9,
         "lng": -172,
-        "cca3": "TKL"
+        "cca3": "TKL",
     },
     {
         "name": "Greece",
@@ -1108,7 +1108,7 @@ countries = [
         "capital": "Athens",
         "lat": 39,
         "lng": 22,
-        "cca3": "GRC"
+        "cca3": "GRC",
     },
     {
         "name": "Russia",
@@ -1118,7 +1118,7 @@ countries = [
         "capital": "Moscow",
         "lat": 60,
         "lng": 100,
-        "cca3": "RUS"
+        "cca3": "RUS",
     },
     {
         "name": "Greenland",
@@ -1128,7 +1128,7 @@ countries = [
         "capital": "Nuuk",
         "lat": 72,
         "lng": -40,
-        "cca3": "GRL"
+        "cca3": "GRL",
     },
     {
         "name": "Andorra",
@@ -1138,7 +1138,7 @@ countries = [
         "capital": "Andorra la Vella",
         "lat": 42.5,
         "lng": 1.5,
-        "cca3": "AND"
+        "cca3": "AND",
     },
     {
         "name": "Mozambique",
@@ -1148,7 +1148,7 @@ countries = [
         "capital": "Maputo",
         "lat": -18.25,
         "lng": 35,
-        "cca3": "MOZ"
+        "cca3": "MOZ",
     },
     {
         "name": "Tajikistan",
@@ -1158,7 +1158,7 @@ countries = [
         "capital": "Dushanbe",
         "lat": 39,
         "lng": 71,
-        "cca3": "TJK"
+        "cca3": "TJK",
     },
     {
         "name": "Haiti",
@@ -1168,7 +1168,7 @@ countries = [
         "capital": "Port-au-Prince",
         "lat": 19,
         "lng": -72.41666666,
-        "cca3": "HTI"
+        "cca3": "HTI",
     },
     {
         "name": "Mexico",
@@ -1178,7 +1178,7 @@ countries = [
         "capital": "Mexico City",
         "lat": 23,
         "lng": -102,
-        "cca3": "MEX"
+        "cca3": "MEX",
     },
     {
         "name": "Zimbabwe",
@@ -1188,7 +1188,7 @@ countries = [
         "capital": "Harare",
         "lat": -20,
         "lng": 30,
-        "cca3": "ZWE"
+        "cca3": "ZWE",
     },
     {
         "name": "Saint Lucia",
@@ -1198,7 +1198,7 @@ countries = [
         "capital": "Castries",
         "lat": 13.88333333,
         "lng": -60.96666666,
-        "cca3": "LCA"
+        "cca3": "LCA",
     },
     {
         "name": "India",
@@ -1208,7 +1208,7 @@ countries = [
         "capital": "New Delhi",
         "lat": 20,
         "lng": 77,
-        "cca3": "IND"
+        "cca3": "IND",
     },
     {
         "name": "Latvia",
@@ -1218,7 +1218,7 @@ countries = [
         "capital": "Riga",
         "lat": 57,
         "lng": 25,
-        "cca3": "LVA"
+        "cca3": "LVA",
     },
     {
         "name": "Bhutan",
@@ -1228,7 +1228,7 @@ countries = [
         "capital": "Thimphu",
         "lat": 27.5,
         "lng": 90.5,
-        "cca3": "BTN"
+        "cca3": "BTN",
     },
     {
         "name": "Saint Vincent and the Grenadines",
@@ -1238,7 +1238,7 @@ countries = [
         "capital": "Kingstown",
         "lat": 13.25,
         "lng": -61.2,
-        "cca3": "VCT"
+        "cca3": "VCT",
     },
     {
         "name": "Vietnam",
@@ -1248,7 +1248,7 @@ countries = [
         "capital": "Hanoi",
         "lat": 16.16666666,
         "lng": 107.83333333,
-        "cca3": "VNM"
+        "cca3": "VNM",
     },
     {
         "name": "Norway",
@@ -1258,7 +1258,7 @@ countries = [
         "capital": "Oslo",
         "lat": 62,
         "lng": 10,
-        "cca3": "NOR"
+        "cca3": "NOR",
     },
     {
         "name": "Czech Republic",
@@ -1268,7 +1268,7 @@ countries = [
         "capital": "Prague",
         "lat": 49.75,
         "lng": 15.5,
-        "cca3": "CZE"
+        "cca3": "CZE",
     },
     {
         "name": "French Southern and Antarctic Lands",
@@ -1278,7 +1278,7 @@ countries = [
         "capital": "Port-aux-Francais",
         "lat": -49.25,
         "lng": 69.167,
-        "cca3": "ATF"
+        "cca3": "ATF",
     },
     {
         "name": "Antigua and Barbuda",
@@ -1288,7 +1288,7 @@ countries = [
         "capital": "Saint John's",
         "lat": 17.05,
         "lng": -61.8,
-        "cca3": "ATG"
+        "cca3": "ATG",
     },
     {
         "name": "Fiji",
@@ -1298,7 +1298,7 @@ countries = [
         "capital": "Suva",
         "lat": -18,
         "lng": 175,
-        "cca3": "FJI"
+        "cca3": "FJI",
     },
     {
         "name": "British Indian Ocean Territory",
@@ -1308,7 +1308,7 @@ countries = [
         "capital": "Diego Garcia",
         "lat": -6,
         "lng": 71.5,
-        "cca3": "IOT"
+        "cca3": "IOT",
     },
     {
         "name": "Honduras",
@@ -1318,7 +1318,7 @@ countries = [
         "capital": "Tegucigalpa",
         "lat": 15,
         "lng": -86.5,
-        "cca3": "HND"
+        "cca3": "HND",
     },
     {
         "name": "Mauritius",
@@ -1328,7 +1328,7 @@ countries = [
         "capital": "Port Louis",
         "lat": -20.28333333,
         "lng": 57.55,
-        "cca3": "MUS"
+        "cca3": "MUS",
     },
     {
         "name": "Antarctica",
@@ -1338,7 +1338,7 @@ countries = [
         "capital": "",
         "lat": -90,
         "lng": 0,
-        "cca3": "ATA"
+        "cca3": "ATA",
     },
     {
         "name": "Luxembourg",
@@ -1348,7 +1348,7 @@ countries = [
         "capital": "Luxembourg",
         "lat": 49.75,
         "lng": 6.16666666,
-        "cca3": "LUX"
+        "cca3": "LUX",
     },
     {
         "name": "Israel",
@@ -1358,7 +1358,7 @@ countries = [
         "capital": "Jerusalem",
         "lat": 31.47,
         "lng": 35.13,
-        "cca3": "ISR"
+        "cca3": "ISR",
     },
     {
         "name": "Micronesia",
@@ -1368,7 +1368,7 @@ countries = [
         "capital": "Palikir",
         "lat": 6.91666666,
         "lng": 158.25,
-        "cca3": "FSM"
+        "cca3": "FSM",
     },
     {
         "name": "Peru",
@@ -1378,7 +1378,7 @@ countries = [
         "capital": "Lima",
         "lat": -10,
         "lng": -76,
-        "cca3": "PER"
+        "cca3": "PER",
     },
     {
         "name": "Reunion",
@@ -1388,7 +1388,7 @@ countries = [
         "capital": "Saint-Denis",
         "lat": -21.15,
         "lng": 55.5,
-        "cca3": "REU"
+        "cca3": "REU",
     },
     {
         "name": "Indonesia",
@@ -1398,7 +1398,7 @@ countries = [
         "capital": "Jakarta",
         "lat": -5,
         "lng": 120,
-        "cca3": "IDN"
+        "cca3": "IDN",
     },
     {
         "name": "Vanuatu",
@@ -1408,7 +1408,7 @@ countries = [
         "capital": "Port Vila",
         "lat": -16,
         "lng": 167,
-        "cca3": "VUT"
+        "cca3": "VUT",
     },
     {
         "name": "Macedonia",
@@ -1418,7 +1418,7 @@ countries = [
         "capital": "Skopje",
         "lat": 41.83333333,
         "lng": 22,
-        "cca3": "MKD"
+        "cca3": "MKD",
     },
     {
         "name": "DR Congo",
@@ -1428,7 +1428,7 @@ countries = [
         "capital": "Kinshasa",
         "lat": 0,
         "lng": 25,
-        "cca3": "COD"
+        "cca3": "COD",
     },
     {
         "name": "Republic of the Congo",
@@ -1438,7 +1438,7 @@ countries = [
         "capital": "Brazzaville",
         "lat": -1,
         "lng": 15,
-        "cca3": "COG"
+        "cca3": "COG",
     },
     {
         "name": "Iceland",
@@ -1448,7 +1448,7 @@ countries = [
         "capital": "Reykjavik",
         "lat": 65,
         "lng": -18,
-        "cca3": "ISL"
+        "cca3": "ISL",
     },
     {
         "name": "Guadeloupe",
@@ -1458,7 +1458,7 @@ countries = [
         "capital": "Basse-Terre",
         "lat": 16.25,
         "lng": -61.583333,
-        "cca3": "GLP"
+        "cca3": "GLP",
     },
     {
         "name": "Cook Islands",
@@ -1468,7 +1468,7 @@ countries = [
         "capital": "Avarua",
         "lat": -21.23333333,
         "lng": -159.76666666,
-        "cca3": "COK"
+        "cca3": "COK",
     },
     {
         "name": "Comoros",
@@ -1478,7 +1478,7 @@ countries = [
         "capital": "Moroni",
         "lat": -12.16666666,
         "lng": 44.25,
-        "cca3": "COM"
+        "cca3": "COM",
     },
     {
         "name": "Colombia",
@@ -1488,7 +1488,7 @@ countries = [
         "capital": "Bogota",
         "lat": 4,
         "lng": -72,
-        "cca3": "COL"
+        "cca3": "COL",
     },
     {
         "name": "Nigeria",
@@ -1498,7 +1498,7 @@ countries = [
         "capital": "Abuja",
         "lat": 10,
         "lng": 8,
-        "cca3": "NGA"
+        "cca3": "NGA",
     },
     {
         "name": "Timor-Leste",
@@ -1508,7 +1508,7 @@ countries = [
         "capital": "Dili",
         "lat": -8.83333333,
         "lng": 125.91666666,
-        "cca3": "TLS"
+        "cca3": "TLS",
     },
     {
         "name": "Taiwan",
@@ -1518,7 +1518,7 @@ countries = [
         "capital": "Taipei",
         "lat": 23.5,
         "lng": 121,
-        "cca3": "TWN"
+        "cca3": "TWN",
     },
     {
         "name": "Portugal",
@@ -1528,7 +1528,7 @@ countries = [
         "capital": "Lisbon",
         "lat": 39.5,
         "lng": -8,
-        "cca3": "PRT"
+        "cca3": "PRT",
     },
     {
         "name": "Moldova",
@@ -1538,7 +1538,7 @@ countries = [
         "capital": "Chisinau",
         "lat": 47,
         "lng": 29,
-        "cca3": "MDA"
+        "cca3": "MDA",
     },
     {
         "name": "Guernsey",
@@ -1548,7 +1548,7 @@ countries = [
         "capital": "St. Peter Port",
         "lat": 49.46666666,
         "lng": -2.58333333,
-        "cca3": "GGY"
+        "cca3": "GGY",
     },
     {
         "name": "Madagascar",
@@ -1558,7 +1558,7 @@ countries = [
         "capital": "Antananarivo",
         "lat": -20,
         "lng": 47,
-        "cca3": "MDG"
+        "cca3": "MDG",
     },
     {
         "name": "Ecuador",
@@ -1568,7 +1568,7 @@ countries = [
         "capital": "Quito",
         "lat": -2,
         "lng": -77.5,
-        "cca3": "ECU"
+        "cca3": "ECU",
     },
     {
         "name": "Senegal",
@@ -1578,7 +1578,7 @@ countries = [
         "capital": "Dakar",
         "lat": 14,
         "lng": -14,
-        "cca3": "SEN"
+        "cca3": "SEN",
     },
     {
         "name": "New Zealand",
@@ -1588,7 +1588,7 @@ countries = [
         "capital": "Wellington",
         "lat": -41,
         "lng": 174,
-        "cca3": "NZL"
+        "cca3": "NZL",
     },
     {
         "name": "Maldives",
@@ -1598,7 +1598,7 @@ countries = [
         "capital": "Male",
         "lat": 3.25,
         "lng": 73,
-        "cca3": "MDV"
+        "cca3": "MDV",
     },
     {
         "name": "American Samoa",
@@ -1608,7 +1608,7 @@ countries = [
         "capital": "Pago Pago",
         "lat": -14.33333333,
         "lng": -170,
-        "cca3": "ASM"
+        "cca3": "ASM",
     },
     {
         "name": "Saint Pierre and Miquelon",
@@ -1618,7 +1618,7 @@ countries = [
         "capital": "Saint-Pierre",
         "lat": 46.83333333,
         "lng": -56.33333333,
-        "cca3": "SPM"
+        "cca3": "SPM",
     },
     {
         "name": "Curacao",
@@ -1628,7 +1628,7 @@ countries = [
         "capital": "Willemstad",
         "lat": 12.116667,
         "lng": -68.933333,
-        "cca3": "CUW"
+        "cca3": "CUW",
     },
     {
         "name": "France",
@@ -1638,7 +1638,7 @@ countries = [
         "capital": "Paris",
         "lat": 46,
         "lng": 2,
-        "cca3": "FRA"
+        "cca3": "FRA",
     },
     {
         "name": "Lithuania",
@@ -1648,7 +1648,7 @@ countries = [
         "capital": "Vilnius",
         "lat": 56,
         "lng": 24,
-        "cca3": "LTU"
+        "cca3": "LTU",
     },
     {
         "name": "Rwanda",
@@ -1658,7 +1658,7 @@ countries = [
         "capital": "Kigali",
         "lat": -2,
         "lng": 30,
-        "cca3": "RWA"
+        "cca3": "RWA",
     },
     {
         "name": "Zambia",
@@ -1668,7 +1668,7 @@ countries = [
         "capital": "Lusaka",
         "lat": -15,
         "lng": 30,
-        "cca3": "ZMB"
+        "cca3": "ZMB",
     },
     {
         "name": "Gambia",
@@ -1678,7 +1678,7 @@ countries = [
         "capital": "Banjul",
         "lat": 13.46666666,
         "lng": -16.56666666,
-        "cca3": "GMB"
+        "cca3": "GMB",
     },
     {
         "name": "Wallis and Futuna",
@@ -1688,7 +1688,7 @@ countries = [
         "capital": "Mata-Utu",
         "lat": -13.3,
         "lng": -176.2,
-        "cca3": "WLF"
+        "cca3": "WLF",
     },
     {
         "name": "Jersey",
@@ -1698,7 +1698,7 @@ countries = [
         "capital": "Saint Helier",
         "lat": 49.25,
         "lng": -2.16666666,
-        "cca3": "JEY"
+        "cca3": "JEY",
     },
     {
         "name": "Faroe Islands",
@@ -1708,7 +1708,7 @@ countries = [
         "capital": "Torshavn",
         "lat": 62,
         "lng": -7,
-        "cca3": "FRO"
+        "cca3": "FRO",
     },
     {
         "name": "Guatemala",
@@ -1718,7 +1718,7 @@ countries = [
         "capital": "Guatemala City",
         "lat": 15.5,
         "lng": -90.25,
-        "cca3": "GTM"
+        "cca3": "GTM",
     },
     {
         "name": "Denmark",
@@ -1728,7 +1728,7 @@ countries = [
         "capital": "Copenhagen",
         "lat": 56,
         "lng": 10,
-        "cca3": "DNK"
+        "cca3": "DNK",
     },
     {
         "name": "Isle of Man",
@@ -1738,7 +1738,7 @@ countries = [
         "capital": "Douglas",
         "lat": 54.25,
         "lng": -4.5,
-        "cca3": "IMN"
+        "cca3": "IMN",
     },
     {
         "name": "Australia",
@@ -1748,7 +1748,7 @@ countries = [
         "capital": "Canberra",
         "lat": -27,
         "lng": 133,
-        "cca3": "AUS"
+        "cca3": "AUS",
     },
     {
         "name": "Austria",
@@ -1758,7 +1758,7 @@ countries = [
         "capital": "Vienna",
         "lat": 47.33333333,
         "lng": 13.33333333,
-        "cca3": "AUT"
+        "cca3": "AUT",
     },
     {
         "name": "Svalbard and Jan Mayen",
@@ -1768,7 +1768,7 @@ countries = [
         "capital": "Longyearbyen",
         "lat": 78,
         "lng": 20,
-        "cca3": "SJM"
+        "cca3": "SJM",
     },
     {
         "name": "Venezuela",
@@ -1778,7 +1778,7 @@ countries = [
         "capital": "Caracas",
         "lat": 8,
         "lng": -66,
-        "cca3": "VEN"
+        "cca3": "VEN",
     },
     {
         "name": "Kosovo",
@@ -1788,7 +1788,7 @@ countries = [
         "capital": "Pristina",
         "lat": 42.666667,
         "lng": 21.166667,
-        "cca3": "UNK"
+        "cca3": "UNK",
     },
     {
         "name": "Palau",
@@ -1798,7 +1798,7 @@ countries = [
         "capital": "Ngerulmud",
         "lat": 7.5,
         "lng": 134.5,
-        "cca3": "PLW"
+        "cca3": "PLW",
     },
     {
         "name": "Kenya",
@@ -1808,7 +1808,7 @@ countries = [
         "capital": "Nairobi",
         "lat": 1,
         "lng": 38,
-        "cca3": "KEN"
+        "cca3": "KEN",
     },
     {
         "name": "Samoa",
@@ -1818,7 +1818,7 @@ countries = [
         "capital": "Apia",
         "lat": -13.58333333,
         "lng": -172.33333333,
-        "cca3": "WSM"
+        "cca3": "WSM",
     },
     {
         "name": "Turkey",
@@ -1828,7 +1828,7 @@ countries = [
         "capital": "Ankara",
         "lat": 39,
         "lng": 35,
-        "cca3": "TUR"
+        "cca3": "TUR",
     },
     {
         "name": "Albania",
@@ -1838,7 +1838,7 @@ countries = [
         "capital": "Tirana",
         "lat": 41,
         "lng": 20,
-        "cca3": "ALB"
+        "cca3": "ALB",
     },
     {
         "name": "Oman",
@@ -1848,7 +1848,7 @@ countries = [
         "capital": "Muscat",
         "lat": 21,
         "lng": 57,
-        "cca3": "OMN"
+        "cca3": "OMN",
     },
     {
         "name": "Tuvalu",
@@ -1858,7 +1858,7 @@ countries = [
         "capital": "Funafuti",
         "lat": -8,
         "lng": 178,
-        "cca3": "TUV"
+        "cca3": "TUV",
     },
     {
         "name": "Aland Islands",
@@ -1868,7 +1868,7 @@ countries = [
         "capital": "Mariehamn",
         "lat": 60.116667,
         "lng": 19.9,
-        "cca3": "ALA"
+        "cca3": "ALA",
     },
     {
         "name": "Brunei",
@@ -1878,7 +1878,7 @@ countries = [
         "capital": "Bandar Seri Begawan",
         "lat": 4.5,
         "lng": 114.66666666,
-        "cca3": "BRN"
+        "cca3": "BRN",
     },
     {
         "name": "Tunisia",
@@ -1888,7 +1888,7 @@ countries = [
         "capital": "Tunis",
         "lat": 34,
         "lng": 9,
-        "cca3": "TUN"
+        "cca3": "TUN",
     },
     {
         "name": "Pitcairn Islands",
@@ -1898,7 +1898,7 @@ countries = [
         "capital": "Adamstown",
         "lat": -25.06666666,
         "lng": -130.1,
-        "cca3": "PCN"
+        "cca3": "PCN",
     },
     {
         "name": "Barbados",
@@ -1908,7 +1908,7 @@ countries = [
         "capital": "Bridgetown",
         "lat": 13.16666666,
         "lng": -59.53333333,
-        "cca3": "BRB"
+        "cca3": "BRB",
     },
     {
         "name": "Brazil",
@@ -1918,7 +1918,7 @@ countries = [
         "capital": "Brasilia",
         "lat": -10,
         "lng": -55,
-        "cca3": "BRA"
+        "cca3": "BRA",
     },
     {
         "name": "Ivory Coast",
@@ -1928,7 +1928,7 @@ countries = [
         "capital": "Yamoussoukro",
         "lat": 8,
         "lng": -5,
-        "cca3": "CIV"
+        "cca3": "CIV",
     },
     {
         "name": "Serbia",
@@ -1938,7 +1938,7 @@ countries = [
         "capital": "Belgrade",
         "lat": 44,
         "lng": 21,
-        "cca3": "SRB"
+        "cca3": "SRB",
     },
     {
         "name": "Equatorial Guinea",
@@ -1948,7 +1948,7 @@ countries = [
         "capital": "Malabo",
         "lat": 2,
         "lng": 10,
-        "cca3": "GNQ"
+        "cca3": "GNQ",
     },
     {
         "name": "United States",
@@ -1958,7 +1958,7 @@ countries = [
         "capital": "Washington D.C.",
         "lat": 38,
         "lng": -97,
-        "cca3": "USA"
+        "cca3": "USA",
     },
     {
         "name": "Qatar",
@@ -1968,7 +1968,7 @@ countries = [
         "capital": "Doha",
         "lat": 25.5,
         "lng": 51.25,
-        "cca3": "QAT"
+        "cca3": "QAT",
     },
     {
         "name": "Sweden",
@@ -1978,7 +1978,7 @@ countries = [
         "capital": "Stockholm",
         "lat": 62,
         "lng": 15,
-        "cca3": "SWE"
+        "cca3": "SWE",
     },
     {
         "name": "Azerbaijan",
@@ -1988,7 +1988,7 @@ countries = [
         "capital": "Baku",
         "lat": 40.5,
         "lng": 47.5,
-        "cca3": "AZE"
+        "cca3": "AZE",
     },
     {
         "name": "Guinea-Bissau",
@@ -1998,7 +1998,7 @@ countries = [
         "capital": "Bissau",
         "lat": 12,
         "lng": -15,
-        "cca3": "GNB"
+        "cca3": "GNB",
     },
     {
         "name": "Swaziland",
@@ -2008,7 +2008,7 @@ countries = [
         "capital": "Lobamba",
         "lat": -26.5,
         "lng": 31.5,
-        "cca3": "SWZ"
+        "cca3": "SWZ",
     },
     {
         "name": "Tonga",
@@ -2018,7 +2018,7 @@ countries = [
         "capital": "Nuku'alofa",
         "lat": -20,
         "lng": -175,
-        "cca3": "TON"
+        "cca3": "TON",
     },
     {
         "name": "Canada",
@@ -2028,7 +2028,7 @@ countries = [
         "capital": "Ottawa",
         "lat": 60,
         "lng": -95,
-        "cca3": "CAN"
+        "cca3": "CAN",
     },
     {
         "name": "Ukraine",
@@ -2038,7 +2038,7 @@ countries = [
         "capital": "Kiev",
         "lat": 49,
         "lng": 32,
-        "cca3": "UKR"
+        "cca3": "UKR",
     },
     {
         "name": "South Korea",
@@ -2048,7 +2048,7 @@ countries = [
         "capital": "Seoul",
         "lat": 37,
         "lng": 127.5,
-        "cca3": "KOR"
+        "cca3": "KOR",
     },
     {
         "name": "Anguilla",
@@ -2058,7 +2058,7 @@ countries = [
         "capital": "The Valley",
         "lat": 18.25,
         "lng": -63.16666666,
-        "cca3": "AIA"
+        "cca3": "AIA",
     },
     {
         "name": "Central African Republic",
@@ -2068,7 +2068,7 @@ countries = [
         "capital": "Bangui",
         "lat": 7,
         "lng": 21,
-        "cca3": "CAF"
+        "cca3": "CAF",
     },
     {
         "name": "Slovakia",
@@ -2078,7 +2078,7 @@ countries = [
         "capital": "Bratislava",
         "lat": 48.66666666,
         "lng": 19.5,
-        "cca3": "SVK"
+        "cca3": "SVK",
     },
     {
         "name": "Cyprus",
@@ -2088,7 +2088,7 @@ countries = [
         "capital": "Nicosia",
         "lat": 35,
         "lng": 33,
-        "cca3": "CYP"
+        "cca3": "CYP",
     },
     {
         "name": "Bosnia and Herzegovina",
@@ -2098,7 +2098,7 @@ countries = [
         "capital": "Sarajevo",
         "lat": 44,
         "lng": 18,
-        "cca3": "BIH"
+        "cca3": "BIH",
     },
     {
         "name": "Singapore",
@@ -2108,7 +2108,7 @@ countries = [
         "capital": "Singapore",
         "lat": 1.36666666,
         "lng": 103.8,
-        "cca3": "SGP"
+        "cca3": "SGP",
     },
     {
         "name": "South Georgia",
@@ -2118,7 +2118,7 @@ countries = [
         "capital": "King Edward Point",
         "lat": -54.5,
         "lng": -37,
-        "cca3": "SGS"
+        "cca3": "SGS",
     },
     {
         "name": "Somalia",
@@ -2128,7 +2128,7 @@ countries = [
         "capital": "Mogadishu",
         "lat": 10,
         "lng": 49,
-        "cca3": "SOM"
+        "cca3": "SOM",
     },
     {
         "name": "Uzbekistan",
@@ -2138,7 +2138,7 @@ countries = [
         "capital": "Tashkent",
         "lat": 41,
         "lng": 64,
-        "cca3": "UZB"
+        "cca3": "UZB",
     },
     {
         "name": "Eritrea",
@@ -2148,7 +2148,7 @@ countries = [
         "capital": "Asmara",
         "lat": 15,
         "lng": 39,
-        "cca3": "ERI"
+        "cca3": "ERI",
     },
     {
         "name": "Poland",
@@ -2158,7 +2158,7 @@ countries = [
         "capital": "Warsaw",
         "lat": 52,
         "lng": 20,
-        "cca3": "POL"
+        "cca3": "POL",
     },
     {
         "name": "Kuwait",
@@ -2168,7 +2168,7 @@ countries = [
         "capital": "Kuwait City",
         "lat": 29.5,
         "lng": 45.75,
-        "cca3": "KWT"
+        "cca3": "KWT",
     },
     {
         "name": "Gabon",
@@ -2178,7 +2178,7 @@ countries = [
         "capital": "Libreville",
         "lat": -1,
         "lng": 11.75,
-        "cca3": "GAB"
+        "cca3": "GAB",
     },
     {
         "name": "Cayman Islands",
@@ -2188,7 +2188,7 @@ countries = [
         "capital": "George Town",
         "lat": 19.5,
         "lng": -80.5,
-        "cca3": "CYM"
+        "cca3": "CYM",
     },
     {
         "name": "Vatican City",
@@ -2198,7 +2198,7 @@ countries = [
         "capital": "Vatican City",
         "lat": 41.9,
         "lng": 12.45,
-        "cca3": "VAT"
+        "cca3": "VAT",
     },
     {
         "name": "Estonia",
@@ -2208,7 +2208,7 @@ countries = [
         "capital": "Tallinn",
         "lat": 59,
         "lng": 26,
-        "cca3": "EST"
+        "cca3": "EST",
     },
     {
         "name": "Malawi",
@@ -2218,7 +2218,7 @@ countries = [
         "capital": "Lilongwe",
         "lat": -13.5,
         "lng": 34,
-        "cca3": "MWI"
+        "cca3": "MWI",
     },
     {
         "name": "Spain",
@@ -2228,7 +2228,7 @@ countries = [
         "capital": "Madrid",
         "lat": 40,
         "lng": -4,
-        "cca3": "ESP"
+        "cca3": "ESP",
     },
     {
         "name": "Iraq",
@@ -2238,7 +2238,7 @@ countries = [
         "capital": "Baghdad",
         "lat": 33,
         "lng": 44,
-        "cca3": "IRQ"
+        "cca3": "IRQ",
     },
     {
         "name": "El Salvador",
@@ -2248,7 +2248,7 @@ countries = [
         "capital": "San Salvador",
         "lat": 13.83333333,
         "lng": -88.91666666,
-        "cca3": "SLV"
+        "cca3": "SLV",
     },
     {
         "name": "Mali",
@@ -2258,7 +2258,7 @@ countries = [
         "capital": "Bamako",
         "lat": 17,
         "lng": -4,
-        "cca3": "MLI"
+        "cca3": "MLI",
     },
     {
         "name": "Ireland",
@@ -2268,7 +2268,7 @@ countries = [
         "capital": "Dublin",
         "lat": 53,
         "lng": -8,
-        "cca3": "IRL"
+        "cca3": "IRL",
     },
     {
         "name": "Iran",
@@ -2278,7 +2278,7 @@ countries = [
         "capital": "Tehran",
         "lat": 32,
         "lng": 53,
-        "cca3": "IRN"
+        "cca3": "IRN",
     },
     {
         "name": "Aruba",
@@ -2288,7 +2288,7 @@ countries = [
         "capital": "Oranjestad",
         "lat": 12.5,
         "lng": -69.96666666,
-        "cca3": "ABW"
+        "cca3": "ABW",
     },
     {
         "name": "Papua New Guinea",
@@ -2298,7 +2298,7 @@ countries = [
         "capital": "Port Moresby",
         "lat": -6,
         "lng": 147,
-        "cca3": "PNG"
+        "cca3": "PNG",
     },
     {
         "name": "Panama",
@@ -2308,7 +2308,7 @@ countries = [
         "capital": "Panama City",
         "lat": 9,
         "lng": -80,
-        "cca3": "PAN"
+        "cca3": "PAN",
     },
     {
         "name": "Sudan",
@@ -2318,7 +2318,7 @@ countries = [
         "capital": "Khartoum",
         "lat": 15,
         "lng": 30,
-        "cca3": "SDN"
+        "cca3": "SDN",
     },
     {
         "name": "Solomon Islands",
@@ -2328,7 +2328,7 @@ countries = [
         "capital": "Honiara",
         "lat": -8,
         "lng": 159,
-        "cca3": "SLB"
+        "cca3": "SLB",
     },
     {
         "name": "Western Sahara",
@@ -2338,7 +2338,7 @@ countries = [
         "capital": "El Aaiun",
         "lat": 24.5,
         "lng": -13,
-        "cca3": "ESH"
+        "cca3": "ESH",
     },
     {
         "name": "Monaco",
@@ -2348,7 +2348,7 @@ countries = [
         "capital": "Monaco",
         "lat": 43.73333333,
         "lng": 7.4,
-        "cca3": "MCO"
+        "cca3": "MCO",
     },
     {
         "name": "Italy",
@@ -2358,7 +2358,7 @@ countries = [
         "capital": "Rome",
         "lat": 42.83333333,
         "lng": 12.83333333,
-        "cca3": "ITA"
+        "cca3": "ITA",
     },
     {
         "name": "Japan",
@@ -2368,7 +2368,7 @@ countries = [
         "capital": "Tokyo",
         "lat": 36,
         "lng": 138,
-        "cca3": "JPN"
+        "cca3": "JPN",
     },
     {
         "name": "Kyrgyzstan",
@@ -2378,7 +2378,7 @@ countries = [
         "capital": "Bishkek",
         "lat": 41,
         "lng": 75,
-        "cca3": "KGZ"
+        "cca3": "KGZ",
     },
     {
         "name": "Uganda",
@@ -2388,7 +2388,7 @@ countries = [
         "capital": "Kampala",
         "lat": 1,
         "lng": 32,
-        "cca3": "UGA"
+        "cca3": "UGA",
     },
     {
         "name": "New Caledonia",
@@ -2398,7 +2398,7 @@ countries = [
         "capital": "Noumea",
         "lat": -21.5,
         "lng": 165.5,
-        "cca3": "NCL"
+        "cca3": "NCL",
     },
     {
         "name": "United Arab Emirates",
@@ -2408,7 +2408,7 @@ countries = [
         "capital": "Abu Dhabi",
         "lat": 24,
         "lng": 54,
-        "cca3": "ARE"
+        "cca3": "ARE",
     },
     {
         "name": "Argentina",
@@ -2418,7 +2418,7 @@ countries = [
         "capital": "Buenos Aires",
         "lat": -34,
         "lng": -64,
-        "cca3": "ARG"
+        "cca3": "ARG",
     },
     {
         "name": "Bahamas",
@@ -2428,7 +2428,7 @@ countries = [
         "capital": "Nassau",
         "lat": 24.25,
         "lng": -76,
-        "cca3": "BHS"
+        "cca3": "BHS",
     },
     {
         "name": "Bahrain",
@@ -2438,7 +2438,7 @@ countries = [
         "capital": "Manama",
         "lat": 26,
         "lng": 50.55,
-        "cca3": "BHR"
+        "cca3": "BHR",
     },
     {
         "name": "Armenia",
@@ -2448,7 +2448,7 @@ countries = [
         "capital": "Yerevan",
         "lat": 40,
         "lng": 45,
-        "cca3": "ARM"
+        "cca3": "ARM",
     },
     {
         "name": "Nauru",
@@ -2458,7 +2458,7 @@ countries = [
         "capital": "Yaren",
         "lat": -0.53333333,
         "lng": 166.91666666,
-        "cca3": "NRU"
+        "cca3": "NRU",
     },
     {
         "name": "Cuba",
@@ -2468,12 +2468,12 @@ countries = [
         "capital": "Havana",
         "lat": 21.5,
         "lng": -80,
-        "cca3": "CUB"
-    }
+        "cca3": "CUB",
+    },
 ]
 
 all_lookups = {}
-lookups = ['cioc', 'cca2', 'cca3', 'name']
+lookups = ["cioc", "cca2", "cca3", "name"]
 for lookup in lookups:
     all_lookups[lookup] = {}
     for country in countries:
diff --git a/superset/data/country_map.py b/superset/data/country_map.py
index 6b32abe..8076fd5 100644
--- a/superset/data/country_map.py
+++ b/superset/data/country_map.py
@@ -18,38 +18,39 @@ from .helpers import (
 
 def load_country_map_data():
     """Loading data for map with country map"""
-    csv_path = os.path.join(DATA_FOLDER, 'birth_france_data_for_country_map.csv')
-    data = pd.read_csv(csv_path, encoding='utf-8')
-    data['dttm'] = datetime.datetime.now().date()
+    csv_path = os.path.join(DATA_FOLDER, "birth_france_data_for_country_map.csv")
+    data = pd.read_csv(csv_path, encoding="utf-8")
+    data["dttm"] = datetime.datetime.now().date()
     data.to_sql(  # pylint: disable=no-member
-        'birth_france_by_region',
+        "birth_france_by_region",
         db.engine,
-        if_exists='replace',
+        if_exists="replace",
         chunksize=500,
         dtype={
-            'DEPT_ID': String(10),
-            '2003': BigInteger,
-            '2004': BigInteger,
-            '2005': BigInteger,
-            '2006': BigInteger,
-            '2007': BigInteger,
-            '2008': BigInteger,
-            '2009': BigInteger,
-            '2010': BigInteger,
-            '2011': BigInteger,
-            '2012': BigInteger,
-            '2013': BigInteger,
-            '2014': BigInteger,
-            'dttm': Date(),
+            "DEPT_ID": String(10),
+            "2003": BigInteger,
+            "2004": BigInteger,
+            "2005": BigInteger,
+            "2006": BigInteger,
+            "2007": BigInteger,
+            "2008": BigInteger,
+            "2009": BigInteger,
+            "2010": BigInteger,
+            "2011": BigInteger,
+            "2012": BigInteger,
+            "2013": BigInteger,
+            "2014": BigInteger,
+            "dttm": Date(),
         },
-        index=False)
-    print('Done loading table!')
-    print('-' * 80)
-    print('Creating table reference')
-    obj = db.session.query(TBL).filter_by(table_name='birth_france_by_region').first()
+        index=False,
+    )
+    print("Done loading table!")
+    print("-" * 80)
+    print("Creating table reference")
+    obj = db.session.query(TBL).filter_by(table_name="birth_france_by_region").first()
     if not obj:
-        obj = TBL(table_name='birth_france_by_region')
-    obj.main_dttm_col = 'dttm'
+        obj = TBL(table_name="birth_france_by_region")
+    obj.main_dttm_col = "dttm"
     obj.database = utils.get_or_create_main_db()
     db.session.merge(obj)
     db.session.commit()
@@ -57,21 +58,21 @@ def load_country_map_data():
     tbl = obj
 
     slice_data = {
-        'granularity_sqla': '',
-        'since': '',
-        'until': '',
-        'where': '',
-        'viz_type': 'country_map',
-        'entity': 'DEPT_ID',
-        'metric': 'avg__2004',
-        'row_limit': 500000,
+        "granularity_sqla": "",
+        "since": "",
+        "until": "",
+        "where": "",
+        "viz_type": "country_map",
+        "entity": "DEPT_ID",
+        "metric": "avg__2004",
+        "row_limit": 500000,
     }
 
-    print('Creating a slice')
+    print("Creating a slice")
     slc = Slice(
-        slice_name='Birth in France by department in 2016',
-        viz_type='country_map',
-        datasource_type='table',
+        slice_name="Birth in France by department in 2016",
+        viz_type="country_map",
+        datasource_type="table",
         datasource_id=tbl.id,
         params=get_slice_json(slice_data),
     )
diff --git a/superset/data/css_templates.py b/superset/data/css_templates.py
index a991736..d846e74 100644
--- a/superset/data/css_templates.py
+++ b/superset/data/css_templates.py
@@ -6,12 +6,13 @@ from superset.models.core import CssTemplate
 
 def load_css_templates():
     """Loads 2 css templates to demonstrate the feature"""
-    print('Creating default CSS templates')
+    print("Creating default CSS templates")
 
-    obj = db.session.query(CssTemplate).filter_by(template_name='Flat').first()
+    obj = db.session.query(CssTemplate).filter_by(template_name="Flat").first()
     if not obj:
-        obj = CssTemplate(template_name='Flat')
-    css = textwrap.dedent("""\
+        obj = CssTemplate(template_name="Flat")
+    css = textwrap.dedent(
+        """\
     .gridster div.widget {
         transition: background-color 0.5s ease;
         background-color: #FAFAFA;
@@ -42,16 +43,17 @@ def load_css_templates():
         '#ff3339', '#ff1ab1', '#005c66', '#00b3a5', '#55d12e', '#b37e00', '#988b4e',
      ];
     */
-    """)
+    """
+    )
     obj.css = css
     db.session.merge(obj)
     db.session.commit()
 
-    obj = (
-        db.session.query(CssTemplate).filter_by(template_name='Courier Black').first())
+    obj = db.session.query(CssTemplate).filter_by(template_name="Courier Black").first()
     if not obj:
-        obj = CssTemplate(template_name='Courier Black')
-    css = textwrap.dedent("""\
+        obj = CssTemplate(template_name="Courier Black")
+    css = textwrap.dedent(
+        """\
     .gridster div.widget {
         transition: background-color 0.5s ease;
         background-color: #EEE;
@@ -97,7 +99,8 @@ def load_css_templates():
         '#ff3339', '#ff1ab1', '#005c66', '#00b3a5', '#55d12e', '#b37e00', '#988b4e',
      ];
     */
-    """)
+    """
+    )
     obj.css = css
     db.session.merge(obj)
     db.session.commit()
diff --git a/superset/data/deck.py b/superset/data/deck.py
index 3307e9e..0bb4499 100644
--- a/superset/data/deck.py
+++ b/superset/data/deck.py
@@ -2,21 +2,9 @@
 import json
 
 from superset import db
-from .helpers import (
-    Dash,
-    get_slice_json,
-    merge_slice,
-    Slice,
-    TBL,
-    update_slice_ids,
-)
+from .helpers import Dash, get_slice_json, merge_slice, Slice, TBL, update_slice_ids
 
-COLOR_RED = {
-    'r': 205,
-    'g': 0,
-    'b': 3,
-    'a': 0.82,
-}
+COLOR_RED = {"r": 205, "g": 0, "b": 3, "a": 0.82}
 POSITION_JSON = """\
 {
     "CHART-3afd9d70": {
@@ -161,46 +149,42 @@ POSITION_JSON = """\
 
 
 def load_deck_dash():
-    print('Loading deck.gl dashboard')
+    print("Loading deck.gl dashboard")
     slices = []
-    tbl = db.session.query(TBL).filter_by(table_name='long_lat').first()
+    tbl = db.session.query(TBL).filter_by(table_name="long_lat").first()
     slice_data = {
-        'spatial': {
-            'type': 'latlong',
-            'lonCol': 'LON',
-            'latCol': 'LAT',
-        },
-        'color_picker': COLOR_RED,
-        'datasource': '5__table',
-        'filters': [],
-        'granularity_sqla': None,
-        'groupby': [],
-        'having': '',
-        'mapbox_style': 'mapbox://styles/mapbox/light-v9',
-        'multiplier': 10,
-        'point_radius_fixed': {'type': 'metric', 'value': 'count'},
-        'point_unit': 'square_m',
-        'min_radius': 1,
-        'row_limit': 5000,
-        'time_range': ' : ',
-        'size': 'count',
-        'time_grain_sqla': None,
-        'viewport': {
-            'bearing': -4.952916738791771,
-            'latitude': 37.78926922909199,
-            'longitude': -122.42613341901688,
-            'pitch': 4.750411100577438,
-            'zoom': 12.729132798697304,
-        },
-        'viz_type': 'deck_scatter',
-        'where': '',
+        "spatial": {"type": "latlong", "lonCol": "LON", "latCol": "LAT"},
+        "color_picker": COLOR_RED,
+        "datasource": "5__table",
+        "filters": [],
+        "granularity_sqla": None,
+        "groupby": [],
+        "having": "",
+        "mapbox_style": "mapbox://styles/mapbox/light-v9",
+        "multiplier": 10,
+        "point_radius_fixed": {"type": "metric", "value": "count"},
+        "point_unit": "square_m",
+        "min_radius": 1,
+        "row_limit": 5000,
+        "time_range": " : ",
+        "size": "count",
+        "time_grain_sqla": None,
+        "viewport": {
+            "bearing": -4.952916738791771,
+            "latitude": 37.78926922909199,
+            "longitude": -122.42613341901688,
+            "pitch": 4.750411100577438,
+            "zoom": 12.729132798697304,
+        },
+        "viz_type": "deck_scatter",
+        "where": "",
     }
 
-    print('Creating Scatterplot slice')
+    print("Creating Scatterplot slice")
     slc = Slice(
-        slice_name='Scatterplot',
-        viz_type='deck_scatter',
-        datasource_type='table',
+        slice_name="Scatterplot",
+        viz_type="deck_scatter",
+        datasource_type="table",
         datasource_id=tbl.id,
         params=get_slice_json(slice_data),
     )
@@ -208,46 +192,37 @@ def load_deck_dash():
     slices.append(slc)
 
     slice_data = {
-        'point_unit': 'square_m',
-        'filters': [],
-        'row_limit': 5000,
-        'spatial': {
-            'type': 'latlong',
-            'lonCol': 'LON',
-            'latCol': 'LAT',
-        },
-        'mapbox_style': 'mapbox://styles/mapbox/dark-v9',
-        'granularity_sqla': None,
-        'size': 'count',
-        'viz_type': 'deck_screengrid',
-        'time_range': 'No filter',
-        'point_radius': 'Auto',
-        'color_picker': {
-            'a': 1,
-            'r': 14,
-            'b': 0,
-            'g': 255,
-        },
-        'grid_size': 20,
-        'where': '',
-        'having': '',
-        'viewport': {
-            'zoom': 14.161641703941438,
-            'longitude': -122.41827069521386,
-            'bearing': -4.952916738791771,
-            'latitude': 37.76024135844065,
-            'pitch': 4.750411100577438,
-        },
-        'point_radius_fixed': {'type': 'fix', 'value': 2000},
-        'datasource': '5__table',
-        'time_grain_sqla': None,
-        'groupby': [],
+        "point_unit": "square_m",
+        "filters": [],
+        "row_limit": 5000,
+        "spatial": {"type": "latlong", "lonCol": "LON", "latCol": "LAT"},
+        "mapbox_style": "mapbox://styles/mapbox/dark-v9",
+        "granularity_sqla": None,
+        "size": "count",
+        "viz_type": "deck_screengrid",
+        "time_range": "No filter",
+        "point_radius": "Auto",
+        "color_picker": {"a": 1, "r": 14, "b": 0, "g": 255},
+        "grid_size": 20,
+        "where": "",
+        "having": "",
+        "viewport": {
+            "zoom": 14.161641703941438,
+            "longitude": -122.41827069521386,
+            "bearing": -4.952916738791771,
+            "latitude": 37.76024135844065,
+            "pitch": 4.750411100577438,
+        },
+        "point_radius_fixed": {"type": "fix", "value": 2000},
+        "datasource": "5__table",
+        "time_grain_sqla": None,
+        "groupby": [],
     }
-    print('Creating Screen Grid slice')
+    print("Creating Screen Grid slice")
     slc = Slice(
-        slice_name='Screen grid',
-        viz_type='deck_screengrid',
-        datasource_type='table',
+        slice_name="Screen grid",
+        viz_type="deck_screengrid",
+        datasource_type="table",
         datasource_id=tbl.id,
         params=get_slice_json(slice_data),
     )
@@ -255,47 +230,38 @@ def load_deck_dash():
     slices.append(slc)
 
     slice_data = {
-        'spatial': {
-            'type': 'latlong',
-            'lonCol': 'LON',
-            'latCol': 'LAT',
-        },
-        'filters': [],
-        'row_limit': 5000,
-        'mapbox_style': 'mapbox://styles/mapbox/streets-v9',
-        'granularity_sqla': None,
-        'size': 'count',
-        'viz_type': 'deck_hex',
-        'time_range': 'No filter',
-        'point_radius_unit': 'Pixels',
-        'point_radius': 'Auto',
-        'color_picker': {
-            'a': 1,
-            'r': 14,
-            'b': 0,
-            'g': 255,
-        },
-        'grid_size': 40,
-        'extruded': True,
-        'having': '',
-        'viewport': {
-            'latitude': 37.789795085160335,
-            'pitch': 54.08961642447763,
-            'zoom': 13.835465702403654,
-            'longitude': -122.40632230075536,
-            'bearing': -2.3984797349335167,
-        },
-        'where': '',
-        'point_radius_fixed': {'type': 'fix', 'value': 2000},
-        'datasource': '5__table',
-        'time_grain_sqla': None,
-        'groupby': [],
+        "spatial": {"type": "latlong", "lonCol": "LON", "latCol": "LAT"},
+        "filters": [],
+        "row_limit": 5000,
+        "mapbox_style": "mapbox://styles/mapbox/streets-v9",
+        "granularity_sqla": None,
+        "size": "count",
+        "viz_type": "deck_hex",
+        "time_range": "No filter",
+        "point_radius_unit": "Pixels",
+        "point_radius": "Auto",
+        "color_picker": {"a": 1, "r": 14, "b": 0, "g": 255},
+        "grid_size": 40,
+        "extruded": True,
+        "having": "",
+        "viewport": {
+            "latitude": 37.789795085160335,
+            "pitch": 54.08961642447763,
+            "zoom": 13.835465702403654,
+            "longitude": -122.40632230075536,
+            "bearing": -2.3984797349335167,
+        },
+        "where": "",
+        "point_radius_fixed": {"type": "fix", "value": 2000},
+        "datasource": "5__table",
+        "time_grain_sqla": None,
+        "groupby": [],
     }
-    print('Creating Hex slice')
+    print("Creating Hex slice")
     slc = Slice(
-        slice_name='Hexagons',
-        viz_type='deck_hex',
-        datasource_type='table',
+        slice_name="Hexagons",
+        viz_type="deck_hex",
+        datasource_type="table",
         datasource_id=tbl.id,
         params=get_slice_json(slice_data),
     )
@@ -303,119 +269,97 @@ def load_deck_dash():
     slices.append(slc)
 
     slice_data = {
-        'spatial': {
-            'type': 'latlong',
-            'lonCol': 'LON',
-            'latCol': 'LAT',
-        },
-        'filters': [],
-        'row_limit': 5000,
-        'mapbox_style': 'mapbox://styles/mapbox/satellite-streets-v9',
-        'granularity_sqla': None,
-        'size': 'count',
-        'viz_type': 'deck_grid',
-        'point_radius_unit': 'Pixels',
-        'point_radius': 'Auto',
-        'time_range': 'No filter',
-        'color_picker': {
-            'a': 1,
-            'r': 14,
-            'b': 0,
-            'g': 255,
-        },
-        'grid_size': 120,
-        'extruded': True,
-        'having': '',
-        'viewport': {
-            'longitude': -122.42066918995666,
-            'bearing': 155.80099696026355,
-            'zoom': 12.699690845482069,
-            'latitude': 37.7942314882596,
-            'pitch': 53.470800300695146,
-        },
-        'where': '',
-        'point_radius_fixed': {'type': 'fix', 'value': 2000},
-        'datasource': '5__table',
-        'time_grain_sqla': None,
-        'groupby': [],
+        "spatial": {"type": "latlong", "lonCol": "LON", "latCol": "LAT"},
+        "filters": [],
+        "row_limit": 5000,
+        "mapbox_style": "mapbox://styles/mapbox/satellite-streets-v9",
+        "granularity_sqla": None,
+        "size": "count",
+        "viz_type": "deck_grid",
+        "point_radius_unit": "Pixels",
+        "point_radius": "Auto",
+        "time_range": "No filter",
+        "color_picker": {"a": 1, "r": 14, "b": 0, "g": 255},
+        "grid_size": 120,
+        "extruded": True,
+        "having": "",
+        "viewport": {
+            "longitude": -122.42066918995666,
+            "bearing": 155.80099696026355,
+            "zoom": 12.699690845482069,
+            "latitude": 37.7942314882596,
+            "pitch": 53.470800300695146,
+        },
+        "where": "",
+        "point_radius_fixed": {"type": "fix", "value": 2000},
+        "datasource": "5__table",
+        "time_grain_sqla": None,
+        "groupby": [],
     }
-    print('Creating Grid slice')
+    print("Creating Grid slice")
     slc = Slice(
-        slice_name='Grid',
-        viz_type='deck_grid',
-        datasource_type='table',
+        slice_name="Grid",
+        viz_type="deck_grid",
+        datasource_type="table",
         datasource_id=tbl.id,
         params=get_slice_json(slice_data),
     )
     merge_slice(slc)
     slices.append(slc)
 
-    polygon_tbl = db.session.query(TBL) \
-                    .filter_by(table_name='sf_population_polygons').first()
+    polygon_tbl = (
+        db.session.query(TBL).filter_by(table_name="sf_population_polygons").first()
+    )
     slice_data = {
-        'datasource': '11__table',
-        'viz_type': 'deck_polygon',
-        'slice_id': 41,
-        'granularity_sqla': None,
-        'time_grain_sqla': None,
-        'time_range': ' : ',
-        'line_column': 'contour',
-        'metric': None,
-        'line_type': 'json',
-        'mapbox_style': 'mapbox://styles/mapbox/light-v9',
-        'viewport': {
-            'longitude': -122.43388541747726,
-            'latitude': 37.752020331384834,
-            'zoom': 11.133995608594631,
-            'bearing': 37.89506450385642,
-            'pitch': 60,
-            'width': 667,
-            'height': 906,
-            'altitude': 1.5,
-            'maxZoom': 20,
-            'minZoom': 0,
-            'maxPitch': 60,
-            'minPitch': 0,
-            'maxLatitude': 85.05113,
-            'minLatitude': -85.05113,
-        },
-        'reverse_long_lat': False,
-        'fill_color_picker': {
-            'r': 3,
-            'g': 65,
-            'b': 73,
-            'a': 1,
-        },
-        'stroke_color_picker': {
-            'r': 0,
-            'g': 122,
-            'b': 135,
-            'a': 1,
-        },
-        'filled': True,
-        'stroked': False,
-        'extruded': True,
-        'point_radius_scale': 100,
-        'js_columns': [
-            'population',
-            'area',
-        ],
-        'js_datapoint_mutator':
-            '(d) => {\n    d.elevation = d.extraProps.population/d.extraProps.area/10\n \
+        "datasource": "11__table",
+        "viz_type": "deck_polygon",
+        "slice_id": 41,
+        "granularity_sqla": None,
+        "time_grain_sqla": None,
+        "time_range": " : ",
+        "line_column": "contour",
+        "metric": None,
+        "line_type": "json",
+        "mapbox_style": "mapbox://styles/mapbox/light-v9",
+        "viewport": {
+            "longitude": -122.43388541747726,
+            "latitude": 37.752020331384834,
+            "zoom": 11.133995608594631,
+            "bearing": 37.89506450385642,
+            "pitch": 60,
+            "width": 667,
+            "height": 906,
+            "altitude": 1.5,
+            "maxZoom": 20,
+            "minZoom": 0,
+            "maxPitch": 60,
+            "minPitch": 0,
+            "maxLatitude": 85.05113,
+            "minLatitude": -85.05113,
+        },
+        "reverse_long_lat": False,
+        "fill_color_picker": {"r": 3, "g": 65, "b": 73, "a": 1},
+        "stroke_color_picker": {"r": 0, "g": 122, "b": 135, "a": 1},
+        "filled": True,
+        "stroked": False,
+        "extruded": True,
+        "point_radius_scale": 100,
+        "js_columns": ["population", "area"],
+        "js_datapoint_mutator": "(d) => {\n    d.elevation = d.extraProps.population/d.extraProps.area/10\n \
          d.fillColor = [d.extraProps.population/d.extraProps.area/60,140,0]\n \
-         return d;\n}',
-        'js_tooltip': '',
-        'js_onclick_href': '',
-        'where': '',
-        'having': '',
-        'filters': [],
+         return d;\n}",
+        "js_tooltip": "",
+        "js_onclick_href": "",
+        "where": "",
+        "having": "",
+        "filters": [],
     }
 
-    print('Creating Polygon slice')
+    print("Creating Polygon slice")
     slc = Slice(
-        slice_name='Polygons',
-        viz_type='deck_polygon',
-        datasource_type='table',
+        slice_name="Polygons",
+        viz_type="deck_polygon",
+        datasource_type="table",
         datasource_id=polygon_tbl.id,
         params=get_slice_json(slice_data),
     )
@@ -423,123 +367,114 @@ def load_deck_dash():
     slices.append(slc)
 
     slice_data = {
-        'datasource': '10__table',
-        'viz_type': 'deck_arc',
-        'slice_id': 42,
-        'granularity_sqla': None,
-        'time_grain_sqla': None,
-        'time_range': ' : ',
-        'start_spatial': {
-            'type': 'latlong',
-            'latCol': 'LATITUDE',
-            'lonCol': 'LONGITUDE',
-        },
-        'end_spatial': {
-            'type': 'latlong',
-            'latCol': 'LATITUDE_DEST',
-            'lonCol': 'LONGITUDE_DEST',
-        },
-        'row_limit': 5000,
-        'mapbox_style': 'mapbox://styles/mapbox/light-v9',
-        'viewport': {
-            'altitude': 1.5,
-            'bearing': 8.546256357301871,
-            'height': 642,
-            'latitude': 44.596651438714254,
-            'longitude': -91.84340711201104,
-            'maxLatitude': 85.05113,
-            'maxPitch': 60,
-            'maxZoom': 20,
-            'minLatitude': -85.05113,
-            'minPitch': 0,
-            'minZoom': 0,
-            'pitch': 60,
-            'width': 997,
-            'zoom': 2.929837070560775,
-        },
-        'color_picker': {
-            'r': 0,
-            'g': 122,
-            'b': 135,
-            'a': 1,
-        },
-        'stroke_width': 1,
-        'where': '',
-        'having': '',
-        'filters': [],
+        "datasource": "10__table",
+        "viz_type": "deck_arc",
+        "slice_id": 42,
+        "granularity_sqla": None,
+        "time_grain_sqla": None,
+        "time_range": " : ",
+        "start_spatial": {
+            "type": "latlong",
+            "latCol": "LATITUDE",
+            "lonCol": "LONGITUDE",
+        },
+        "end_spatial": {
+            "type": "latlong",
+            "latCol": "LATITUDE_DEST",
+            "lonCol": "LONGITUDE_DEST",
+        },
+        "row_limit": 5000,
+        "mapbox_style": "mapbox://styles/mapbox/light-v9",
+        "viewport": {
+            "altitude": 1.5,
+            "bearing": 8.546256357301871,
+            "height": 642,
+            "latitude": 44.596651438714254,
+            "longitude": -91.84340711201104,
+            "maxLatitude": 85.05113,
+            "maxPitch": 60,
+            "maxZoom": 20,
+            "minLatitude": -85.05113,
+            "minPitch": 0,
+            "minZoom": 0,
+            "pitch": 60,
+            "width": 997,
+            "zoom": 2.929837070560775,
+        },
+        "color_picker": {"r": 0, "g": 122, "b": 135, "a": 1},
+        "stroke_width": 1,
+        "where": "",
+        "having": "",
+        "filters": [],
     }
 
-    print('Creating Arc slice')
+    print("Creating Arc slice")
     slc = Slice(
-        slice_name='Arcs',
-        viz_type='deck_arc',
-        datasource_type='table',
-        datasource_id=db.session.query(TBL).filter_by(table_name='flights').first().id,
+        slice_name="Arcs",
+        viz_type="deck_arc",
+        datasource_type="table",
+        datasource_id=db.session.query(TBL).filter_by(table_name="flights").first().id,
         params=get_slice_json(slice_data),
     )
     merge_slice(slc)
     slices.append(slc)
 
     slice_data = {
-        'datasource': '12__table',
-        'slice_id': 43,
-        'viz_type': 'deck_path',
-        'time_grain_sqla': None,
-        'time_range': ' : ',
-        'line_column': 'path_json',
-        'line_type': 'json',
-        'row_limit': 5000,
-        'mapbox_style': 'mapbox://styles/mapbox/light-v9',
-        'viewport': {
-            'longitude': -122.18885402582598,
-            'latitude': 37.73671752604488,
-            'zoom': 9.51847667620428,
-            'bearing': 0,
-            'pitch': 0,
-            'width': 669,
-            'height': 1094,
-            'altitude': 1.5,
-            'maxZoom': 20,
-            'minZoom': 0,
-            'maxPitch': 60,
-            'minPitch': 0,
-            'maxLatitude': 85.05113,
-            'minLatitude': -85.05113,
-        },
-        'color_picker': {
-            'r': 0,
-            'g': 122,
-            'b': 135,
-            'a': 1,
-        },
-        'line_width': 150,
-        'reverse_long_lat': False,
-        'js_columns': [
-            'color',
-        ],
-        'js_datapoint_mutator': 'd => {\n    return {\n        ...d,\n        color: \
-            colors.hexToRGB(d.extraProps.color),\n    }\n}',
-        'js_tooltip': '',
-        'js_onclick_href': '',
-        'where': '',
-        'having': '',
-        'filters': [],
+        "datasource": "12__table",
+        "slice_id": 43,
+        "viz_type": "deck_path",
+        "time_grain_sqla": None,
+        "time_range": " : ",
+        "line_column": "path_json",
+        "line_type": "json",
+        "row_limit": 5000,
+        "mapbox_style": "mapbox://styles/mapbox/light-v9",
+        "viewport": {
+            "longitude": -122.18885402582598,
+            "latitude": 37.73671752604488,
+            "zoom": 9.51847667620428,
+            "bearing": 0,
+            "pitch": 0,
+            "width": 669,
+            "height": 1094,
+            "altitude": 1.5,
+            "maxZoom": 20,
+            "minZoom": 0,
+            "maxPitch": 60,
+            "minPitch": 0,
+            "maxLatitude": 85.05113,
+            "minLatitude": -85.05113,
+        },
+        "color_picker": {"r": 0, "g": 122, "b": 135, "a": 1},
+        "line_width": 150,
+        "reverse_long_lat": False,
+        "js_columns": ["color"],
+        "js_datapoint_mutator": "d => {\n    return {\n        ...d,\n        color: \
+            colors.hexToRGB(d.extraProps.color),\n    }\n}",
+        "js_tooltip": "",
+        "js_onclick_href": "",
+        "where": "",
+        "having": "",
+        "filters": [],
     }
 
-    print('Creating Path slice')
+    print("Creating Path slice")
     slc = Slice(
-        slice_name='Path',
-        viz_type='deck_path',
-        datasource_type='table',
-        datasource_id=db.session.query(TBL).filter_by(table_name='bart_lines').first().id,
+        slice_name="Path",
+        viz_type="deck_path",
+        datasource_type="table",
+        datasource_id=db.session.query(TBL)
+        .filter_by(table_name="bart_lines")
+        .first()
+        .id,
         params=get_slice_json(slice_data),
     )
     merge_slice(slc)
     slices.append(slc)
-    slug = 'deck'
+    slug = "deck"
 
-    print('Creating a dashboard')
-    title = 'deck.gl Demo'
+    print("Creating a dashboard")
+    title = "deck.gl Demo"
     dash = db.session.query(Dash).filter_by(slug=slug).first()
 
     if not dash:
@@ -555,5 +490,5 @@ def load_deck_dash():
     db.session.commit()
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     load_deck_dash()
diff --git a/superset/data/energy.py b/superset/data/energy.py
index bbb4fba..0582ee4 100644
--- a/superset/data/energy.py
+++ b/superset/data/energy.py
@@ -14,37 +14,35 @@ from .helpers import DATA_FOLDER, merge_slice, misc_dash_slices, Slice, TBL
 
 def load_energy():
     """Loads an energy related dataset to use with sankey and graphs"""
-    tbl_name = 'energy_usage'
-    with gzip.open(os.path.join(DATA_FOLDER, 'energy.json.gz')) as f:
+    tbl_name = "energy_usage"
+    with gzip.open(os.path.join(DATA_FOLDER, "energy.json.gz")) as f:
         pdf = pd.read_json(f)
     pdf.to_sql(
         tbl_name,
         db.engine,
-        if_exists='replace',
+        if_exists="replace",
         chunksize=500,
-        dtype={
-            'source': String(255),
-            'target': String(255),
-            'value': Float(),
-        },
-        index=False)
+        dtype={"source": String(255), "target": String(255), "value": Float()},
+        index=False,
+    )
 
-    print('Creating table [wb_health_population] reference')
+    print("Creating table [wb_health_population] reference")
     tbl = db.session.query(TBL).filter_by(table_name=tbl_name).first()
     if not tbl:
         tbl = TBL(table_name=tbl_name)
-    tbl.description = 'Energy consumption'
+    tbl.description = "Energy consumption"
     tbl.database = utils.get_or_create_main_db()
     db.session.merge(tbl)
     db.session.commit()
     tbl.fetch_metadata()
 
     slc = Slice(
-        slice_name='Energy Sankey',
-        viz_type='sankey',
-        datasource_type='table',
+        slice_name="Energy Sankey",
+        viz_type="sankey",
+        datasource_type="table",
         datasource_id=tbl.id,
-        params=textwrap.dedent("""\
+        params=textwrap.dedent(
+            """\
         {
             "collapsed_fieldsets": "",
             "groupby": [
@@ -58,17 +56,19 @@ def load_energy():
             "viz_type": "sankey",
             "where": ""
         }
-        """),
+        """
+        ),
     )
     misc_dash_slices.add(slc.slice_name)
     merge_slice(slc)
 
     slc = Slice(
-        slice_name='Energy Force Layout',
-        viz_type='directed_force',
-        datasource_type='table',
+        slice_name="Energy Force Layout",
+        viz_type="directed_force",
+        datasource_type="table",
         datasource_id=tbl.id,
-        params=textwrap.dedent("""\
+        params=textwrap.dedent(
+            """\
         {
             "charge": "-500",
             "collapsed_fieldsets": "",
@@ -84,17 +84,19 @@ def load_energy():
             "viz_type": "directed_force",
             "where": ""
         }
-        """),
+        """
+        ),
     )
     misc_dash_slices.add(slc.slice_name)
     merge_slice(slc)
 
     slc = Slice(
-        slice_name='Heatmap',
-        viz_type='heatmap',
-        datasource_type='table',
+        slice_name="Heatmap",
+        viz_type="heatmap",
+        datasource_type="table",
         datasource_id=tbl.id,
-        params=textwrap.dedent("""\
+        params=textwrap.dedent(
+            """\
         {
             "all_columns_x": "source",
             "all_columns_y": "target",
@@ -110,7 +112,8 @@ def load_energy():
             "xscale_interval": "1",
             "yscale_interval": "1"
         }
-        """),
+        """
+        ),
     )
     misc_dash_slices.add(slc.slice_name)
     merge_slice(slc)
diff --git a/superset/data/flights.py b/superset/data/flights.py
index 986d75b..5673a81 100644
--- a/superset/data/flights.py
+++ b/superset/data/flights.py
@@ -11,38 +11,37 @@ from .helpers import DATA_FOLDER, TBL
 
 def load_flights():
     """Loading random time series data from a zip file in the repo"""
-    tbl_name = 'flights'
-    with gzip.open(os.path.join(DATA_FOLDER, 'flight_data.csv.gz')) as f:
-        pdf = pd.read_csv(f, encoding='latin-1')
+    tbl_name = "flights"
+    with gzip.open(os.path.join(DATA_FOLDER, "flight_data.csv.gz")) as f:
+        pdf = pd.read_csv(f, encoding="latin-1")
 
     # Loading airports info to join and get lat/long
-    with gzip.open(os.path.join(DATA_FOLDER, 'airports.csv.gz')) as f:
-        airports = pd.read_csv(f, encoding='latin-1')
-    airports = airports.set_index('IATA_CODE')
+    with gzip.open(os.path.join(DATA_FOLDER, "airports.csv.gz")) as f:
+        airports = pd.read_csv(f, encoding="latin-1")
+    airports = airports.set_index("IATA_CODE")
 
-    pdf['ds'] = pdf.YEAR.map(str) + '-0' + pdf.MONTH.map(str) + '-0' + pdf.DAY.map(str)
+    pdf["ds"] = pdf.YEAR.map(str) + "-0" + pdf.MONTH.map(str) + "-0" + pdf.DAY.map(str)
     pdf.ds = pd.to_datetime(pdf.ds)
-    del pdf['YEAR']
-    del pdf['MONTH']
-    del pdf['DAY']
+    del pdf["YEAR"]
+    del pdf["MONTH"]
+    del pdf["DAY"]
 
-    pdf = pdf.join(airports, on='ORIGIN_AIRPORT', rsuffix='_ORIG')
-    pdf = pdf.join(airports, on='DESTINATION_AIRPORT', rsuffix='_DEST')
+    pdf = pdf.join(airports, on="ORIGIN_AIRPORT", rsuffix="_ORIG")
+    pdf = pdf.join(airports, on="DESTINATION_AIRPORT", rsuffix="_DEST")
     pdf.to_sql(
         tbl_name,
         db.engine,
-        if_exists='replace',
+        if_exists="replace",
         chunksize=500,
-        dtype={
-            'ds': DateTime,
-        },
-        index=False)
+        dtype={"ds": DateTime},
+        index=False,
+    )
     tbl = db.session.query(TBL).filter_by(table_name=tbl_name).first()
     if not tbl:
         tbl = TBL(table_name=tbl_name)
-    tbl.description = 'Random set of flights in the US'
+    tbl.description = "Random set of flights in the US"
     tbl.database = utils.get_or_create_main_db()
     db.session.merge(tbl)
     db.session.commit()
     tbl.fetch_metadata()
-    print('Done loading table!')
+    print("Done loading table!")
diff --git a/superset/data/helpers.py b/superset/data/helpers.py
index d1ecccb..412edc3 100644
--- a/superset/data/helpers.py
+++ b/superset/data/helpers.py
@@ -12,24 +12,25 @@ DB = models.Database
 Slice = models.Slice
 Dash = models.Dashboard
 
-TBL = ConnectorRegistry.sources['table']
+TBL = ConnectorRegistry.sources["table"]
 
 config = app.config
 
-DATA_FOLDER = os.path.join(config.get('BASE_DIR'), 'data')
+DATA_FOLDER = os.path.join(config.get("BASE_DIR"), "data")
 
 misc_dash_slices = set()  # slices assembled in a 'Misc Chart' dashboard
 
 
 def update_slice_ids(layout_dict, slices):
     charts = [
-        component for component in layout_dict.values()
-        if isinstance(component, dict) and component['type'] == 'CHART'
+        component
+        for component in layout_dict.values()
+        if isinstance(component, dict) and component["type"] == "CHART"
     ]
-    sorted_charts = sorted(charts, key=lambda k: k['meta']['chartId'])
+    sorted_charts = sorted(charts, key=lambda k: k["meta"]["chartId"])
     for i, chart_component in enumerate(sorted_charts):
         if i < len(slices):
-            chart_component['meta']['chartId'] = int(slices[i].id)
+            chart_component["meta"]["chartId"] = int(slices[i].id)
 
 
 def merge_slice(slc):
diff --git a/superset/data/long_lat.py b/superset/data/long_lat.py
index c82452f..0886ef8 100644
--- a/superset/data/long_lat.py
+++ b/superset/data/long_lat.py
@@ -21,50 +21,49 @@ from .helpers import (
 
 def load_long_lat_data():
     """Loading lat/long data from a csv file in the repo"""
-    with gzip.open(os.path.join(DATA_FOLDER, 'san_francisco.csv.gz')) as f:
-        pdf = pd.read_csv(f, encoding='utf-8')
-    start = datetime.datetime.now().replace(
-        hour=0, minute=0, second=0, microsecond=0)
-    pdf['datetime'] = [
+    with gzip.open(os.path.join(DATA_FOLDER, "san_francisco.csv.gz")) as f:
+        pdf = pd.read_csv(f, encoding="utf-8")
+    start = datetime.datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
+    pdf["datetime"] = [
         start + datetime.timedelta(hours=i * 24 / (len(pdf) - 1))
         for i in range(len(pdf))
     ]
-    pdf['occupancy'] = [random.randint(1, 6) for _ in range(len(pdf))]
-    pdf['radius_miles'] = [random.uniform(1, 3) for _ in range(len(pdf))]
-    pdf['geohash'] = pdf[['LAT', 'LON']].apply(
-        lambda x: geohash.encode(*x), axis=1)
-    pdf['delimited'] = pdf['LAT'].map(str).str.cat(pdf['LON'].map(str), sep=',')
+    pdf["occupancy"] = [random.randint(1, 6) for _ in range(len(pdf))]
+    pdf["radius_miles"] = [random.uniform(1, 3) for _ in range(len(pdf))]
+    pdf["geohash"] = pdf[["LAT", "LON"]].apply(lambda x: geohash.encode(*x), axis=1)
+    pdf["delimited"] = pdf["LAT"].map(str).str.cat(pdf["LON"].map(str), sep=",")
     pdf.to_sql(  # pylint: disable=no-member
-        'long_lat',
+        "long_lat",
         db.engine,
-        if_exists='replace',
+        if_exists="replace",
         chunksize=500,
         dtype={
-            'longitude': Float(),
-            'latitude': Float(),
-            'number': Float(),
-            'street': String(100),
-            'unit': String(10),
-            'city': String(50),
-            'district': String(50),
-            'region': String(50),
-            'postcode': Float(),
-            'id': String(100),
-            'datetime': DateTime(),
-            'occupancy': Float(),
-            'radius_miles': Float(),
-            'geohash': String(12),
-            'delimited': String(60),
+            "longitude": Float(),
+            "latitude": Float(),
+            "number": Float(),
+            "street": String(100),
... 22033 lines suppressed ...