You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@superset.apache.org by jo...@apache.org on 2021/08/16 17:21:06 UTC

[superset] branch master updated: chore(pylint): Reenable import-outside-toplevel check (#16263)

This is an automated email from the ASF dual-hosted git repository.

johnbodley pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/superset.git


The following commit(s) were added to refs/heads/master by this push:
     new 0df15bf  chore(pylint): Reenable import-outside-toplevel check (#16263)
0df15bf is described below

commit 0df15bf2079e3958b4fb6bc070ec97bb770a6ad3
Author: John Bodley <45...@users.noreply.github.com>
AuthorDate: Mon Aug 16 10:20:13 2021 -0700

    chore(pylint): Reenable import-outside-toplevel check (#16263)
    
    Co-authored-by: John Bodley <jo...@airbnb.com>
---
 .pylintrc                                    |  1 -
 setup.cfg                                    |  2 +-
 superset/cli.py                              | 32 +++++++++++++++++++++-------
 superset/connectors/sqla/models.py           |  1 +
 superset/dashboards/commands/importers/v0.py |  1 +
 superset/db_engine_specs/bigquery.py         |  1 +
 superset/db_engine_specs/clickhouse.py       |  4 ++--
 superset/db_engine_specs/elasticsearch.py    |  3 ++-
 superset/db_engine_specs/hive.py             |  7 ++++--
 superset/db_engine_specs/mysql.py            |  1 +
 superset/db_engine_specs/presto.py           |  1 +
 superset/db_engines/hive.py                  |  1 +
 superset/initialization/__init__.py          |  9 ++++----
 superset/jinja_context.py                    |  4 ++++
 superset/reports/dao.py                      |  3 +--
 superset/security/manager.py                 | 29 ++++++++++++++-----------
 superset/sql_validators/presto_db.py         |  1 +
 superset/tasks/async_queries.py              |  1 +
 superset/utils/core.py                       | 16 ++++++--------
 superset/utils/decorators.py                 |  1 +
 superset/utils/log.py                        |  2 ++
 superset/utils/mock_data.py                  |  1 +
 superset/utils/pandas_postprocessing.py      |  6 +++---
 superset/utils/profiler.py                   |  2 +-
 superset/utils/webdriver.py                  |  4 +---
 25 files changed, 83 insertions(+), 51 deletions(-)

diff --git a/.pylintrc b/.pylintrc
index 87275f0..bf97ed2 100644
--- a/.pylintrc
+++ b/.pylintrc
@@ -84,7 +84,6 @@ confidence=
 disable=
     missing-docstring,
     too-many-lines,
-    import-outside-toplevel,
     raise-missing-from,
     too-few-public-methods,
     duplicate-code,
diff --git a/setup.cfg b/setup.cfg
index 1e16680..15817a8 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -30,7 +30,7 @@ combine_as_imports = true
 include_trailing_comma = true
 line_length = 88
 known_first_party = superset
-known_third_party =alembic,apispec,backoff,bleach,cachelib,celery,click,colorama,cron_descriptor,croniter,cryptography,dateutil,deprecation,flask,flask_appbuilder,flask_babel,flask_caching,flask_compress,flask_jwt_extended,flask_login,flask_migrate,flask_sqlalchemy,flask_talisman,flask_testing,flask_wtf,freezegun,geohash,geopy,graphlib,holidays,humanize,isodate,jinja2,jwt,markdown,markupsafe,marshmallow,marshmallow_enum,msgpack,numpy,pandas,parameterized,parsedatetime,pgsanity,pkg_resour [...]
+known_third_party =alembic,apispec,backoff,bleach,cachelib,celery,click,colorama,cron_descriptor,croniter,cryptography,dateutil,deprecation,flask,flask_appbuilder,flask_babel,flask_caching,flask_compress,flask_jwt_extended,flask_login,flask_migrate,flask_sqlalchemy,flask_talisman,flask_testing,flask_wtf,freezegun,geohash,geopy,graphlib,holidays,humanize,isodate,jinja2,jwt,markdown,markupsafe,marshmallow,marshmallow_enum,msgpack,numpy,pandas,parameterized,parsedatetime,pgsanity,pkg_resour [...]
 multi_line_output = 3
 order_by_type = false
 
diff --git a/superset/cli.py b/superset/cli.py
index 69051a3..6fa478e 100755
--- a/superset/cli.py
+++ b/superset/cli.py
@@ -17,6 +17,7 @@
 # under the License.
 import json
 import logging
+import os
 import sys
 from datetime import datetime, timedelta
 from pathlib import Path
@@ -26,11 +27,14 @@ from zipfile import is_zipfile, ZipFile
 
 import click
 import yaml
+from apispec import APISpec
+from apispec.ext.marshmallow import MarshmallowPlugin
 from celery.utils.abstract import CallableTask
 from colorama import Fore, Style
 from flask import current_app, g
 from flask.cli import FlaskGroup, with_appcontext
 from flask_appbuilder import Model
+from flask_appbuilder.api import BaseApi
 
 from superset import app, appbuilder, config, security_manager
 from superset.app import create_app
@@ -120,6 +124,7 @@ def load_examples_run(
         examples_db = utils.get_example_database()
         print(f"Loading examples metadata and related data into {examples_db}")
 
+    # pylint: disable=import-outside-toplevel
     from superset import examples
 
     examples.load_css_templates()
@@ -202,6 +207,7 @@ def load_examples(
 )
 def import_directory(directory: str, overwrite: bool, force: bool) -> None:
     """Imports configs from a given directory"""
+    # pylint: disable=import-outside-toplevel
     from superset.examples.utils import load_configs_from_directory
 
     load_configs_from_directory(
@@ -242,9 +248,11 @@ def set_database_uri(database_name: str, uri: str, skip_create: bool) -> None:
 )
 def refresh_druid(datasource: str, merge: bool) -> None:
     """Refresh druid datasources"""
-    session = db.session()
+    # pylint: disable=import-outside-toplevel
     from superset.connectors.druid.models import DruidCluster
 
+    session = db.session()
+
     for cluster in session.query(DruidCluster).all():
         try:
             cluster.refresh_datasources(datasource_name=datasource, merge_flag=merge)
@@ -265,6 +273,7 @@ if feature_flags.get("VERSIONED_EXPORT"):
     )
     def export_dashboards(dashboard_file: Optional[str] = None) -> None:
         """Export dashboards to ZIP file"""
+        # pylint: disable=import-outside-toplevel
         from superset.dashboards.commands.export import ExportDashboardsCommand
         from superset.models.dashboard import Dashboard
 
@@ -296,6 +305,7 @@ if feature_flags.get("VERSIONED_EXPORT"):
     )
     def export_datasources(datasource_file: Optional[str] = None) -> None:
         """Export datasources to ZIP file"""
+        # pylint: disable=import-outside-toplevel
         from superset.connectors.sqla.models import SqlaTable
         from superset.datasets.commands.export import ExportDatasetsCommand
 
@@ -330,6 +340,7 @@ if feature_flags.get("VERSIONED_EXPORT"):
     )
     def import_dashboards(path: str, username: Optional[str]) -> None:
         """Import dashboards from ZIP file"""
+        # pylint: disable=import-outside-toplevel
         from superset.commands.importers.v1.utils import get_contents_from_bundle
         from superset.dashboards.commands.importers.dispatcher import (
             ImportDashboardsCommand,
@@ -358,6 +369,7 @@ if feature_flags.get("VERSIONED_EXPORT"):
     )
     def import_datasources(path: str) -> None:
         """Import datasources from ZIP file"""
+        # pylint: disable=import-outside-toplevel
         from superset.commands.importers.v1.utils import get_contents_from_bundle
         from superset.datasets.commands.importers.dispatcher import (
             ImportDatasetsCommand,
@@ -396,6 +408,7 @@ else:
         dashboard_file: Optional[str], print_stdout: bool = False
     ) -> None:
         """Export dashboards to JSON"""
+        # pylint: disable=import-outside-toplevel
         from superset.utils import dashboard_import_export
 
         data = dashboard_import_export.export_dashboards(db.session)
@@ -443,6 +456,7 @@ else:
         include_defaults: bool = False,
     ) -> None:
         """Export datasources to YAML"""
+        # pylint: disable=import-outside-toplevel
         from superset.utils import dict_import_export
 
         data = dict_import_export.export_to_dict(
@@ -481,6 +495,7 @@ else:
     )
     def import_dashboards(path: str, recursive: bool, username: str) -> None:
         """Import dashboards from JSON file"""
+        # pylint: disable=import-outside-toplevel
         from superset.dashboards.commands.importers.v0 import ImportDashboardsCommand
 
         path_object = Path(path)
@@ -528,6 +543,7 @@ else:
     )
     def import_datasources(path: str, sync: str, recursive: bool) -> None:
         """Import datasources from YAML"""
+        # pylint: disable=import-outside-toplevel
         from superset.datasets.commands.importers.v0 import ImportDatasetsCommand
 
         sync_array = sync.split(",")
@@ -564,6 +580,7 @@ else:
     )
     def export_datasource_schema(back_references: bool) -> None:
         """Export datasource YAML schema to stdout"""
+        # pylint: disable=import-outside-toplevel
         from superset.utils import dict_import_export
 
         data = dict_import_export.export_schema_to_dict(back_references=back_references)
@@ -574,6 +591,7 @@ else:
 @with_appcontext
 def update_datasources_cache() -> None:
     """Refresh sqllab datasources cache"""
+    # pylint: disable=import-outside-toplevel
     from superset.models.core import Database
 
     for database in db.session.query(Database).all():
@@ -678,6 +696,7 @@ def compute_thumbnails(
     model_id: int,
 ) -> None:
     """Compute thumbnails"""
+    # pylint: disable=import-outside-toplevel
     from superset.models.dashboard import Dashboard
     from superset.models.slice import Slice
     from superset.tasks.thumbnails import (
@@ -790,6 +809,7 @@ def sync_tags() -> None:
     # pylint: disable=no-member
     metadata = Model.metadata
 
+    # pylint: disable=import-outside-toplevel
     from superset.common.tags import add_favorites, add_owners, add_types
 
     add_types(db.engine, metadata)
@@ -802,6 +822,7 @@ def sync_tags() -> None:
 def alert() -> None:
     """Run the alert scheduler loop"""
     # this command is just for testing purposes
+    # pylint: disable=import-outside-toplevel
     from superset.models.schedules import ScheduleType
     from superset.tasks.schedules import schedule_window
 
@@ -820,13 +841,8 @@ def alert() -> None:
 @with_appcontext
 def update_api_docs() -> None:
     """Regenerate the openapi.json file in docs"""
-    from apispec import APISpec
-    from apispec.ext.marshmallow import MarshmallowPlugin
-    from flask_appbuilder.api import BaseApi
-    from os import path
-
-    superset_dir = path.abspath(path.dirname(__file__))
-    openapi_json = path.join(
+    superset_dir = os.path.abspath(os.path.dirname(__file__))
+    openapi_json = os.path.join(
         superset_dir, "..", "docs", "src", "resources", "openapi.json"
     )
     api_version = "v1"
diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py
index c9b3085..c28b44e 100644
--- a/superset/connectors/sqla/models.py
+++ b/superset/connectors/sqla/models.py
@@ -1648,6 +1648,7 @@ class SqlaTable(  # pylint: disable=too-many-public-methods,too-many-instance-at
         :raises Exception: If the target table is not unique
         """
 
+        # pylint: disable=import-outside-toplevel
         from superset.datasets.commands.exceptions import get_dataset_exist_error_msg
         from superset.datasets.dao import DatasetDAO
 
diff --git a/superset/dashboards/commands/importers/v0.py b/superset/dashboards/commands/importers/v0.py
index 1d12aba..a37ccc1 100644
--- a/superset/dashboards/commands/importers/v0.py
+++ b/superset/dashboards/commands/importers/v0.py
@@ -275,6 +275,7 @@ def decode_dashboards(  # pylint: disable=too-many-return-statements
     Function to be passed into json.loads obj_hook parameter
     Recreates the dashboard object from a json representation.
     """
+    # pylint: disable=import-outside-toplevel
     from superset.connectors.druid.models import (
         DruidCluster,
         DruidColumn,
diff --git a/superset/db_engine_specs/bigquery.py b/superset/db_engine_specs/bigquery.py
index 4438d21..99dfdb7 100644
--- a/superset/db_engine_specs/bigquery.py
+++ b/superset/db_engine_specs/bigquery.py
@@ -324,6 +324,7 @@ class BigQueryEngineSpec(BaseEngineSpec):
         """
 
         try:
+            # pylint: disable=import-outside-toplevel
             import pandas_gbq
             from google.oauth2 import service_account
         except ImportError:
diff --git a/superset/db_engine_specs/clickhouse.py b/superset/db_engine_specs/clickhouse.py
index 4db5684..a425a71 100644
--- a/superset/db_engine_specs/clickhouse.py
+++ b/superset/db_engine_specs/clickhouse.py
@@ -17,6 +17,8 @@
 from datetime import datetime
 from typing import Dict, Optional, Type
 
+from urllib3.exceptions import NewConnectionError
+
 from superset.db_engine_specs.base import BaseEngineSpec
 from superset.db_engine_specs.exceptions import SupersetDBAPIDatabaseError
 from superset.utils import core as utils
@@ -48,8 +50,6 @@ class ClickHouseEngineSpec(BaseEngineSpec):  # pylint: disable=abstract-method
 
     @classmethod
     def get_dbapi_exception_mapping(cls) -> Dict[Type[Exception], Type[Exception]]:
-        from urllib3.exceptions import NewConnectionError
-
         return {NewConnectionError: SupersetDBAPIDatabaseError}
 
     @classmethod
diff --git a/superset/db_engine_specs/elasticsearch.py b/superset/db_engine_specs/elasticsearch.py
index cf96bf1..65042ed 100644
--- a/superset/db_engine_specs/elasticsearch.py
+++ b/superset/db_engine_specs/elasticsearch.py
@@ -49,7 +49,8 @@ class ElasticSearchEngineSpec(BaseEngineSpec):  # pylint: disable=abstract-metho
 
     @classmethod
     def get_dbapi_exception_mapping(cls) -> Dict[Type[Exception], Type[Exception]]:
-        import es.exceptions as es_exceptions  # pylint: disable=import-error
+        # pylint: disable=import-error,import-outside-toplevel
+        import es.exceptions as es_exceptions
 
         return {
             es_exceptions.DatabaseError: SupersetDBAPIDatabaseError,
diff --git a/superset/db_engine_specs/hive.py b/superset/db_engine_specs/hive.py
index 3915252..e33012e 100644
--- a/superset/db_engine_specs/hive.py
+++ b/superset/db_engine_specs/hive.py
@@ -62,8 +62,8 @@ def upload_to_s3(filename: str, upload_prefix: str, table: Table) -> str:
     :returns: The S3 location of the table
     """
 
-    # Optional dependency
-    import boto3  # pylint: disable=import-error
+    # pylint: disable=import-outside-toplevel
+    import boto3
 
     bucket_path = current_app.config["CSV_TO_HIVE_UPLOAD_S3_BUCKET"]
 
@@ -128,6 +128,7 @@ class HiveEngineSpec(PrestoEngineSpec):
 
     @classmethod
     def patch(cls) -> None:
+        # pylint: disable=import-outside-toplevel
         from pyhive import hive
         from TCLIService import (
             constants as patched_constants,
@@ -152,6 +153,7 @@ class HiveEngineSpec(PrestoEngineSpec):
     def fetch_data(
         cls, cursor: Any, limit: Optional[int] = None
     ) -> List[Tuple[Any, ...]]:
+        # pylint: disable=import-outside-toplevel
         import pyhive
         from TCLIService import ttypes
 
@@ -314,6 +316,7 @@ class HiveEngineSpec(PrestoEngineSpec):
         cls, cursor: Any, query: Query, session: Session
     ) -> None:
         """Updates progress information"""
+        # pylint: disable=import-outside-toplevel
         from pyhive import hive
 
         unfinished_states = (
diff --git a/superset/db_engine_specs/mysql.py b/superset/db_engine_specs/mysql.py
index 4f4485a..2fb1e97 100644
--- a/superset/db_engine_specs/mysql.py
+++ b/superset/db_engine_specs/mysql.py
@@ -171,6 +171,7 @@ class MySQLEngineSpec(BaseEngineSpec, BasicParametersMixin):
     def get_datatype(cls, type_code: Any) -> Optional[str]:
         if not cls.type_code_map:
             # only import and store if needed at least once
+            # pylint: disable=import-outside-toplevel
             import MySQLdb
 
             ft = MySQLdb.constants.FIELD_TYPE
diff --git a/superset/db_engine_specs/presto.py b/superset/db_engine_specs/presto.py
index d3567e9..eb0d2df 100644
--- a/superset/db_engine_specs/presto.py
+++ b/superset/db_engine_specs/presto.py
@@ -925,6 +925,7 @@ class PrestoEngineSpec(BaseEngineSpec):  # pylint: disable=too-many-public-metho
         :param schema: Schema name
         :param table: Table (view) name
         """
+        # pylint: disable=import-outside-toplevel
         from pyhive.exc import DatabaseError
 
         engine = cls.get_engine(database, schema)
diff --git a/superset/db_engines/hive.py b/superset/db_engines/hive.py
index 467a8df..b5f5cad 100644
--- a/superset/db_engines/hive.py
+++ b/superset/db_engines/hive.py
@@ -35,6 +35,7 @@ def fetch_logs(
     .. note::
         This is not a part of DB-API.
     """
+    # pylint: disable=import-outside-toplevel
     from pyhive import hive
     from TCLIService import ttypes
     from thrift import Thrift
diff --git a/superset/initialization/__init__.py b/superset/initialization/__init__.py
index 0193be4..f2c28a3 100644
--- a/superset/initialization/__init__.py
+++ b/superset/initialization/__init__.py
@@ -26,6 +26,7 @@ from flask import Flask, redirect
 from flask_appbuilder import expose, IndexView
 from flask_babel import gettext as __, lazy_gettext as _
 from flask_compress import Compress
+from werkzeug.middleware.proxy_fix import ProxyFix
 
 from superset.connectors.connector_registry import ConnectorRegistry
 from superset.extensions import (
@@ -111,9 +112,7 @@ class SupersetAppInitializer:
         # models which in turn try to import
         # the global Flask app
         #
-        # pylint: disable=too-many-locals
-        # pylint: disable=too-many-statements
-        # pylint: disable=too-many-branches
+        # pylint: disable=import-outside-toplevel,too-many-branches,too-many-locals,too-many-statements
         from superset.annotation_layers.api import AnnotationLayerRestApi
         from superset.annotation_layers.annotations.api import AnnotationRestApi
         from superset.async_events.api import AsyncEventsRestApi
@@ -623,6 +622,7 @@ class SupersetAppInitializer:
         # Doing local imports here as model importing causes a reference to
         # app.config to be invoked and we need the current_app to have been setup
         #
+        # pylint: disable=import-outside-toplevel
         from superset.utils.url_map_converters import (
             ObjectTypeConverter,
             RegexConverter,
@@ -633,13 +633,12 @@ class SupersetAppInitializer:
 
     def configure_middlewares(self) -> None:
         if self.config["ENABLE_CORS"]:
+            # pylint: disable=import-outside-toplevel
             from flask_cors import CORS
 
             CORS(self.superset_app, **self.config["CORS_OPTIONS"])
 
         if self.config["ENABLE_PROXY_FIX"]:
-            from werkzeug.middleware.proxy_fix import ProxyFix
-
             self.superset_app.wsgi_app = ProxyFix(  # type: ignore
                 self.superset_app.wsgi_app, **self.config["PROXY_FIX_CONFIG"]
             )
diff --git a/superset/jinja_context.py b/superset/jinja_context.py
index 84fa9b6..ff56fc9 100644
--- a/superset/jinja_context.py
+++ b/superset/jinja_context.py
@@ -169,6 +169,7 @@ class ExtraCache:
         :returns: The URL parameters
         """
 
+        # pylint: disable=import-outside-toplevel
         from superset.views.utils import get_form_data
 
         if request.args.get(param):
@@ -284,6 +285,7 @@ class ExtraCache:
             only apply to the inner query
         :return: returns a list of filters
         """
+        # pylint: disable=import-outside-toplevel
         from superset.utils.core import FilterOperator
         from superset.views.utils import get_form_data
 
@@ -496,6 +498,7 @@ class PrestoTemplateProcessor(JinjaTemplateProcessor):
         :return: the latest partition array
         """
 
+        # pylint: disable=import-outside-toplevel
         from superset.db_engine_specs.presto import PrestoEngineSpec
 
         table_name, schema = self._schema_table(table_name, self._schema)
@@ -506,6 +509,7 @@ class PrestoTemplateProcessor(JinjaTemplateProcessor):
     def latest_sub_partition(self, table_name: str, **kwargs: Any) -> Any:
         table_name, schema = self._schema_table(table_name, self._schema)
 
+        # pylint: disable=import-outside-toplevel
         from superset.db_engine_specs.presto import PrestoEngineSpec
 
         return cast(
diff --git a/superset/reports/dao.py b/superset/reports/dao.py
index 0f729c3..697b08c 100644
--- a/superset/reports/dao.py
+++ b/superset/reports/dao.py
@@ -14,6 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+import json
 import logging
 from datetime import datetime
 from typing import Any, Dict, List, Optional
@@ -138,7 +139,6 @@ class ReportScheduleDAO(BaseDAO):
         create a report schedule and nested recipients
         :raises: DAOCreateFailedError
         """
-        import json
 
         try:
             model = ReportSchedule()
@@ -171,7 +171,6 @@ class ReportScheduleDAO(BaseDAO):
         create a report schedule and nested recipients
         :raises: DAOCreateFailedError
         """
-        import json
 
         try:
             for key, value in properties.items():
diff --git a/superset/security/manager.py b/superset/security/manager.py
index d243e5a..6c99821 100644
--- a/superset/security/manager.py
+++ b/superset/security/manager.py
@@ -364,9 +364,7 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
         :returns: The access URL
         """
 
-        from superset import conf
-
-        return conf.get("PERMISSION_INSTRUCTIONS_LINK")
+        return current_app.config.get("PERMISSION_INSTRUCTIONS_LINK")
 
     def get_datasource_access_error_object(  # pylint: disable=invalid-name
         self, datasource: "BaseDatasource"
@@ -428,9 +426,7 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
         :returns: The access URL
         """
 
-        from superset import conf
-
-        return conf.get("PERMISSION_INSTRUCTIONS_LINK")
+        return current_app.config.get("PERMISSION_INSTRUCTIONS_LINK")
 
     def get_user_datasources(self) -> List["BaseDatasource"]:
         """
@@ -528,6 +524,7 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
         :returns: The list of accessible SQL schemas
         """
 
+        # pylint: disable=import-outside-toplevel
         from superset.connectors.sqla.models import SqlaTable
 
         if hierarchical and self.can_access_database(database):
@@ -629,6 +626,7 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
         Creates missing FAB permissions for datasources, schemas and metrics.
         """
 
+        # pylint: disable=import-outside-toplevel
         from superset.models import core as models
 
         logger.info("Fetching a set of all perms to lookup which ones are missing")
@@ -678,8 +676,6 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
         Initialize the Superset application with security roles and such.
         """
 
-        from superset import conf
-
         logger.info("Syncing role definition")
 
         self.create_custom_permissions()
@@ -692,9 +688,13 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
         self.set_role("sql_lab", self._is_sql_lab_pvm)
 
         # Configure public role
-        if conf["PUBLIC_ROLE_LIKE"]:
-            self.copy_role(conf["PUBLIC_ROLE_LIKE"], self.auth_role_public, merge=True)
-        if conf.get("PUBLIC_ROLE_LIKE_GAMMA", False):
+        if current_app.config["PUBLIC_ROLE_LIKE"]:
+            self.copy_role(
+                current_app.config["PUBLIC_ROLE_LIKE"],
+                self.auth_role_public,
+                merge=True,
+            )
+        if current_app.config.get("PUBLIC_ROLE_LIKE_GAMMA", False):
             logger.warning(
                 "The config `PUBLIC_ROLE_LIKE_GAMMA` is deprecated and will be removed "
                 "in Superset 1.0. Please use `PUBLIC_ROLE_LIKE` instead."
@@ -996,7 +996,9 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
         :raises SupersetSecurityException: If the user cannot access the resource
         """
 
+        # pylint: disable=import-outside-toplevel
         from superset.connectors.sqla.models import SqlaTable
+        from superset.extensions import feature_flag_manager
         from superset.sql_parse import Table
 
         if database and table or query:
@@ -1046,8 +1048,6 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
 
             assert datasource
 
-            from superset.extensions import feature_flag_manager
-
             if not (
                 self.can_access_schema(datasource)
                 or self.can_access("datasource_access", datasource.perm or "")
@@ -1087,6 +1087,7 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
         :returns: A list of filters
         """
         if hasattr(g, "user") and hasattr(g.user, "id"):
+            # pylint: disable=import-outside-toplevel
             from superset.connectors.sqla.models import (
                 RLSFilterRoles,
                 RLSFilterTables,
@@ -1168,6 +1169,7 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
         :param dashboard: Dashboard the user wants access to
         :raises DashboardAccessDeniedError: If the user cannot access the resource
         """
+        # pylint: disable=import-outside-toplevel
         from superset.dashboards.commands.exceptions import DashboardAccessDeniedError
         from superset.views.base import get_user_roles, is_user_admin
         from superset.views.utils import is_owner
@@ -1189,6 +1191,7 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
 
     @staticmethod
     def can_access_based_on_dashboard(datasource: "BaseDatasource") -> bool:
+        # pylint: disable=import-outside-toplevel
         from superset import db
         from superset.dashboards.filters import DashboardAccessFilter
         from superset.models.slice import Slice
diff --git a/superset/sql_validators/presto_db.py b/superset/sql_validators/presto_db.py
index 7f468c9..bf77f47 100644
--- a/superset/sql_validators/presto_db.py
+++ b/superset/sql_validators/presto_db.py
@@ -65,6 +65,7 @@ class PrestoDBSQLValidator(BaseSQLValidator):
         # engine spec's handle_cursor implementation since we don't record
         # these EXPLAIN queries done in validation as proper Query objects
         # in the superset ORM.
+        # pylint: disable=import-outside-toplevel
         from pyhive.exc import DatabaseError
 
         try:
diff --git a/superset/tasks/async_queries.py b/superset/tasks/async_queries.py
index 8e7d2ea..d2df6dd 100644
--- a/superset/tasks/async_queries.py
+++ b/superset/tasks/async_queries.py
@@ -50,6 +50,7 @@ def ensure_user_is_set(user_id: Optional[int]) -> None:
 def load_chart_data_into_cache(
     job_metadata: Dict[str, Any], form_data: Dict[str, Any],
 ) -> None:
+    # pylint: disable=import-outside-toplevel
     from superset.charts.commands.data import ChartDataCommand
 
     try:
diff --git a/superset/utils/core.py b/superset/utils/core.py
index eadf14d..8e51afb 100644
--- a/superset/utils/core.py
+++ b/superset/utils/core.py
@@ -1123,9 +1123,7 @@ def merge_extra_form_data(form_data: Dict[str, Any]) -> None:
     )
     if append_filters:
         adhoc_filters.extend(
-            simple_filter_to_adhoc(
-                {"isExtra": True, **fltr}  # type: ignore
-            )
+            simple_filter_to_adhoc({"isExtra": True, **fltr})  # type: ignore
             for fltr in append_filters
             if fltr
         )
@@ -1239,6 +1237,7 @@ def user_label(user: User) -> Optional[str]:
 def get_or_create_db(
     database_name: str, sqlalchemy_uri: str, always_create: Optional[bool] = True
 ) -> "Database":
+    # pylint: disable=import-outside-toplevel
     from superset import db
     from superset.models import core as models
 
@@ -1266,16 +1265,15 @@ def get_or_create_db(
 
 
 def get_example_database() -> "Database":
-    from superset import conf
-
-    db_uri = conf.get("SQLALCHEMY_EXAMPLES_URI") or conf.get("SQLALCHEMY_DATABASE_URI")
+    db_uri = (
+        current_app.config.get("SQLALCHEMY_EXAMPLES_URI")
+        or current_app.config["SQLALCHEMY_DATABASE_URI"]
+    )
     return get_or_create_db("examples", db_uri)
 
 
 def get_main_database() -> "Database":
-    from superset import conf
-
-    db_uri = conf.get("SQLALCHEMY_DATABASE_URI")
+    db_uri = current_app.config["SQLALCHEMY_DATABASE_URI"]
     return get_or_create_db("main", db_uri)
 
 
diff --git a/superset/utils/decorators.py b/superset/utils/decorators.py
index 3728e3a..ab4ee30 100644
--- a/superset/utils/decorators.py
+++ b/superset/utils/decorators.py
@@ -91,6 +91,7 @@ def check_dashboard_access(
     def decorator(f: Callable[..., Any]) -> Callable[..., Any]:
         @wraps(f)
         def wrapper(self: Any, *args: Any, **kwargs: Any) -> Any:
+            # pylint: disable=import-outside-toplevel
             from superset.models.dashboard import Dashboard
 
             dashboard = Dashboard.get(str(kwargs["dashboard_id_or_slug"]))
diff --git a/superset/utils/log.py b/superset/utils/log.py
index c0f33f2..f458dcc 100644
--- a/superset/utils/log.py
+++ b/superset/utils/log.py
@@ -125,6 +125,7 @@ class AbstractEventLogger(ABC):
         log_to_statsd: bool = True,
         **payload_override: Optional[Dict[str, Any]],
     ) -> None:
+        # pylint: disable=import-outside-toplevel
         from superset.views.core import get_form_data
 
         referrer = request.referrer[:1000] if request and request.referrer else None
@@ -321,6 +322,7 @@ class DBEventLogger(AbstractEventLogger):
         *args: Any,
         **kwargs: Any,
     ) -> None:
+        # pylint: disable=import-outside-toplevel
         from superset.models.core import Log
 
         records = kwargs.get("records", [])
diff --git a/superset/utils/mock_data.py b/superset/utils/mock_data.py
index 87278d2..6de9eca 100644
--- a/superset/utils/mock_data.py
+++ b/superset/utils/mock_data.py
@@ -179,6 +179,7 @@ def add_data(
     :param str table_name: name of table, will be created if it doesn't exist
     :param bool append: if the table already exists, append data or replace?
     """
+    # pylint: disable=import-outside-toplevel
     from superset.utils.core import get_example_database
 
     database = get_example_database()
diff --git a/superset/utils/pandas_postprocessing.py b/superset/utils/pandas_postprocessing.py
index da9163a..1c27cf9 100644
--- a/superset/utils/pandas_postprocessing.py
+++ b/superset/utils/pandas_postprocessing.py
@@ -714,11 +714,11 @@ def _prophet_fit_and_predict(  # pylint: disable=too-many-arguments
     Fit a prophet model and return a DataFrame with predicted results.
     """
     try:
-        prophet_logger = logging.getLogger("prophet.plot")
+        # pylint: disable=import-error,import-outside-toplevel
+        from prophet import Prophet
 
+        prophet_logger = logging.getLogger("prophet.plot")
         prophet_logger.setLevel(logging.CRITICAL)
-        from prophet import Prophet  # pylint: disable=import-error
-
         prophet_logger.setLevel(logging.NOTSET)
     except ModuleNotFoundError:
         raise QueryObjectValidationError(_("`prophet` package not installed"))
diff --git a/superset/utils/profiler.py b/superset/utils/profiler.py
index e7df50e..087ec60 100644
--- a/superset/utils/profiler.py
+++ b/superset/utils/profiler.py
@@ -21,7 +21,7 @@ from unittest import mock
 from werkzeug.wrappers import Request, Response
 
 try:
-    # pylint: disable=import-error
+    # pylint: disable=import-error,import-outside-toplevel
     from pyinstrument import Profiler
 except ModuleNotFoundError:
     Profiler = None
diff --git a/superset/utils/webdriver.py b/superset/utils/webdriver.py
index dbe94e4..c81f8c2 100644
--- a/superset/utils/webdriver.py
+++ b/superset/utils/webdriver.py
@@ -21,6 +21,7 @@ from time import sleep
 from typing import Any, Dict, Optional, Tuple, TYPE_CHECKING
 
 from flask import current_app
+from requests.models import PreparedRequest
 from selenium.common.exceptions import (
     StaleElementReferenceException,
     TimeoutException,
@@ -103,9 +104,6 @@ class WebDriverProxy:
     def get_screenshot(
         self, url: str, element_name: str, user: "User",
     ) -> Optional[bytes]:
-
-        from requests.models import PreparedRequest
-
         params = {"standalone": DashboardStandaloneMode.REPORT.value}
         req = PreparedRequest()
         req.prepare_url(url, params)