You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@superset.apache.org by el...@apache.org on 2023/12/02 01:20:55 UTC

(superset) branch 2.1 updated (840b486906 -> 52cdd57dd9)

This is an automated email from the ASF dual-hosted git repository.

elizabeth pushed a change to branch 2.1
in repository https://gitbox.apache.org/repos/asf/superset.git


    from 840b486906 update changelog
     new cb6de0a9c9 fix: remove `update_charts_owners` (#25843)
     new 34101594e2 fix: DB-specific quoting in Jinja macro (#25779)
     new 7c23cb0b3f fix: improve upload ZIP file validation (#25658)
     new 3e79273614 chore: rate limit requests (#24324)
     new 056c5985d6 chore: Bump sqlparse to 0.4.4 (#24045)
     new b76aa57090 chore: Update mypy and fix stubs issue (#24033)
     new 1111d00e04 chore: bump werkzeug and Flask (#23965)
     new 52cdd57dd9 chore: Use nh3 lib instead of bleach (#23862)

The 8 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .pre-commit-config.yaml                            |  17 +-
 requirements/base.txt                              |  25 +--
 requirements/docker.txt                            |   6 +-
 requirements/integration.txt                       |   2 +-
 setup.cfg                                          |   2 +-
 setup.py                                           |   7 +-
 .../dashboard/components/gridComponents/Chart.jsx  |   4 +-
 superset/charts/api.py                             |   2 +-
 superset/commands/importers/v1/utils.py            |   2 +
 superset/config.py                                 |  19 +++
 superset/dashboards/api.py                         |   4 +-
 superset/dashboards/commands/create.py             |   3 +-
 superset/dashboards/commands/update.py             |   1 -
 superset/dashboards/dao.py                         |   9 --
 superset/databases/api.py                          |   8 +-
 superset/databases/schemas.py                      |   5 +-
 superset/datasets/api.py                           |   2 +-
 superset/db_engine_specs/presto.py                 |   6 +-
 superset/embedded/view.py                          |   2 +
 superset/exceptions.py                             |   2 +-
 superset/extensions/__init__.py                    |   2 +-
 superset/importexport/api.py                       |   2 +-
 superset/initialization/__init__.py                |  10 +-
 superset/jinja_context.py                          |  47 ++++--
 superset/models/dashboard.py                       |   7 +-
 superset/queries/saved_queries/api.py              |   2 +-
 superset/reports/notifications/email.py            |  27 ++--
 superset/result_set.py                             |   6 +-
 superset/sql_parse.py                              |  16 +-
 superset/sqllab/api.py                             |   4 +-
 superset/sqllab/exceptions.py                      |  14 +-
 superset/sqllab/query_render.py                    |   7 +-
 superset/sqllab/sql_json_executer.py               |   3 +-
 superset/tasks/async_queries.py                    |   8 +-
 superset/utils/async_query_manager.py              |   4 +-
 superset/utils/core.py                             |  54 +++++--
 superset/utils/dashboard_import_export.py          |   4 +-
 superset/utils/decorators.py                       |   2 +-
 superset/utils/encrypt.py                          |   2 +-
 superset/utils/machine_auth.py                     |   2 +-
 superset/utils/pandas_postprocessing/boxplot.py    |   4 +-
 superset/utils/pandas_postprocessing/flatten.py    |   2 +-
 superset/utils/url_map_converters.py               |   2 +-
 superset/views/base.py                             |   8 +-
 superset/views/base_api.py                         |   5 +-
 superset/views/core.py                             |   6 +-
 superset/views/dashboard/views.py                  |   2 +-
 superset/views/utils.py                            |   4 +-
 tests/integration_tests/async_events/api_tests.py  |   4 +
 tests/integration_tests/charts/data/api_tests.py   |  18 ++-
 tests/integration_tests/core_tests.py              |   2 +
 tests/integration_tests/dashboards/api_tests.py    |  49 ------
 .../dashboards/permalink/api_tests.py              |   6 +-
 tests/integration_tests/datasource_tests.py        |   2 +-
 .../db_engine_specs/base_engine_spec_tests.py      |  82 +++++-----
 .../explore/permalink/api_tests.py                 |   8 +-
 tests/integration_tests/reports/commands_tests.py  |  12 +-
 tests/integration_tests/reports/scheduler_tests.py |   2 +-
 .../security/analytics_db_safety_tests.py          |  12 +-
 tests/integration_tests/superset_test_config.py    |   2 +
 .../integration_tests/tasks/async_queries_tests.py |   6 +
 tests/integration_tests/utils/core_tests.py        |   4 +-
 tests/unit_tests/jinja_context_test.py             |   9 +-
 tests/unit_tests/notifications/email_tests.py      |   5 +-
 tests/unit_tests/tasks/test_cron_util.py           |   2 +-
 tests/unit_tests/utils/test_core.py                | 179 ++++++++++++++++++++-
 66 files changed, 505 insertions(+), 283 deletions(-)


(superset) 04/08: chore: rate limit requests (#24324)

Posted by el...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

elizabeth pushed a commit to branch 2.1
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 3e792736144bd089c6b05f1afb42d87bce0c267a
Author: Beto Dealmeida <ro...@dealmeida.net>
AuthorDate: Fri Aug 11 09:35:31 2023 -0700

    chore: rate limit requests (#24324)
---
 superset/config.py                              | 14 ++++++++++++++
 superset/dashboards/api.py                      |  2 +-
 superset/models/dashboard.py                    |  3 ++-
 superset/utils/dashboard_import_export.py       |  4 ++--
 superset/views/dashboard/views.py               |  2 +-
 tests/integration_tests/superset_test_config.py |  2 ++
 6 files changed, 22 insertions(+), 5 deletions(-)

diff --git a/superset/config.py b/superset/config.py
index f2d9fa5adf..39ce66e875 100644
--- a/superset/config.py
+++ b/superset/config.py
@@ -278,6 +278,20 @@ PROXY_FIX_CONFIG = {"x_for": 1, "x_proto": 1, "x_host": 1, "x_port": 1, "x_prefi
 # Configuration for scheduling queries from SQL Lab.
 SCHEDULED_QUERIES: Dict[str, Any] = {}
 
+# FAB Rate limiting: this is a security feature for preventing DDOS attacks. The
+# feature is on by default to make Superset secure by default, but you should
+# fine tune the limits to your needs. You can read more about the different
+# parameters here: https://flask-limiter.readthedocs.io/en/stable/configuration.html
+RATELIMIT_ENABLED = True
+RATELIMIT_APPLICATION = "50 per second"
+AUTH_RATE_LIMITED = True
+AUTH_RATE_LIMIT = "5 per second"
+# A storage location conforming to the scheme in storage-scheme. See the limits
+# library for allowed values: https://limits.readthedocs.io/en/stable/storage.html
+# RATELIMIT_STORAGE_URI = "redis://host:port"
+# A callable that returns the unique identity of the current request.
+# RATELIMIT_REQUEST_IDENTIFIER = flask.Request.endpoint
+
 # ------------------------------
 # GLOBALS FOR APP Builder
 # ------------------------------
diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py
index 1a476a0a97..6f478e22cc 100644
--- a/superset/dashboards/api.py
+++ b/superset/dashboards/api.py
@@ -813,7 +813,7 @@ class DashboardRestApi(BaseSupersetModelRestApi):
             Dashboard.id.in_(requested_ids)
         )
         query = self._base_filters.apply_all(query)
-        ids = [item.id for item in query.all()]
+        ids = {item.id for item in query.all()}
         if not ids:
             return self.response_404()
         export = Dashboard.export_dashboards(ids)
diff --git a/superset/models/dashboard.py b/superset/models/dashboard.py
index 60a8ea0e30..e2192ae2f2 100644
--- a/superset/models/dashboard.py
+++ b/superset/models/dashboard.py
@@ -372,7 +372,8 @@ class Dashboard(Model, AuditMixinNullable, ImportExportMixin):
 
     @classmethod
     def export_dashboards(  # pylint: disable=too-many-locals
-        cls, dashboard_ids: List[int]
+        cls,
+        dashboard_ids: Set[int],
     ) -> str:
         copied_dashboards = []
         datasource_ids = set()
diff --git a/superset/utils/dashboard_import_export.py b/superset/utils/dashboard_import_export.py
index fc61d0a422..eef8cbe6df 100644
--- a/superset/utils/dashboard_import_export.py
+++ b/superset/utils/dashboard_import_export.py
@@ -27,8 +27,8 @@ def export_dashboards(session: Session) -> str:
     """Returns all dashboards metadata as a json dump"""
     logger.info("Starting export")
     dashboards = session.query(Dashboard)
-    dashboard_ids = []
+    dashboard_ids = set()
     for dashboard in dashboards:
-        dashboard_ids.append(dashboard.id)
+        dashboard_ids.add(dashboard.id)
     data = Dashboard.export_dashboards(dashboard_ids)
     return data
diff --git a/superset/views/dashboard/views.py b/superset/views/dashboard/views.py
index 52cb2da82e..e476a88f31 100644
--- a/superset/views/dashboard/views.py
+++ b/superset/views/dashboard/views.py
@@ -76,7 +76,7 @@ class DashboardModelView(
     @expose("/export_dashboards_form")
     def download_dashboards(self) -> FlaskResponse:
         if request.args.get("action") == "go":
-            ids = request.args.getlist("id")
+            ids = set(request.args.getlist("id"))
             return Response(
                 DashboardModel.export_dashboards(ids),
                 headers=generate_download_headers("json"),
diff --git a/tests/integration_tests/superset_test_config.py b/tests/integration_tests/superset_test_config.py
index 19c2cc000f..76b83fb465 100644
--- a/tests/integration_tests/superset_test_config.py
+++ b/tests/integration_tests/superset_test_config.py
@@ -96,6 +96,8 @@ REDIS_CELERY_DB = os.environ.get("REDIS_CELERY_DB", 2)
 REDIS_RESULTS_DB = os.environ.get("REDIS_RESULTS_DB", 3)
 REDIS_CACHE_DB = os.environ.get("REDIS_CACHE_DB", 4)
 
+RATELIMIT_ENABLED = False
+
 
 CACHE_CONFIG = {
     "CACHE_TYPE": "RedisCache",


(superset) 01/08: fix: remove `update_charts_owners` (#25843)

Posted by el...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

elizabeth pushed a commit to branch 2.1
in repository https://gitbox.apache.org/repos/asf/superset.git

commit cb6de0a9c9f505ee3f26e79ca9bfa5f3901528a0
Author: Beto Dealmeida <ro...@dealmeida.net>
AuthorDate: Fri Nov 3 13:06:31 2023 -0400

    fix: remove `update_charts_owners` (#25843)
---
 superset/dashboards/commands/create.py          |  3 +-
 superset/dashboards/commands/update.py          |  1 -
 superset/dashboards/dao.py                      |  9 -----
 tests/integration_tests/dashboards/api_tests.py | 49 -------------------------
 4 files changed, 1 insertion(+), 61 deletions(-)

diff --git a/superset/dashboards/commands/create.py b/superset/dashboards/commands/create.py
index 811508c2e7..68fd2567eb 100644
--- a/superset/dashboards/commands/create.py
+++ b/superset/dashboards/commands/create.py
@@ -40,8 +40,7 @@ class CreateDashboardCommand(CreateMixin, BaseCommand):
     def run(self) -> Model:
         self.validate()
         try:
-            dashboard = DashboardDAO.create(self._properties, commit=False)
-            dashboard = DashboardDAO.update_charts_owners(dashboard, commit=True)
+            dashboard = DashboardDAO.create(self._properties, commit=True)
         except DAOCreateFailedError as ex:
             logger.exception(ex.exception)
             raise DashboardCreateFailedError() from ex
diff --git a/superset/dashboards/commands/update.py b/superset/dashboards/commands/update.py
index 12ac241dcc..f10f12564f 100644
--- a/superset/dashboards/commands/update.py
+++ b/superset/dashboards/commands/update.py
@@ -56,7 +56,6 @@ class UpdateDashboardCommand(UpdateMixin, BaseCommand):
                     data=json.loads(self._properties.get("json_metadata", "{}")),
                     commit=False,
                 )
-            dashboard = DashboardDAO.update_charts_owners(dashboard, commit=False)
             db.session.commit()
         except DAOUpdateFailedError as ex:
             logger.exception(ex.exception)
diff --git a/superset/dashboards/dao.py b/superset/dashboards/dao.py
index 3f0666266f..68e2678247 100644
--- a/superset/dashboards/dao.py
+++ b/superset/dashboards/dao.py
@@ -154,15 +154,6 @@ class DashboardDAO(BaseDAO):
             return not db.session.query(dashboard_query.exists()).scalar()
         return True
 
-    @staticmethod
-    def update_charts_owners(model: Dashboard, commit: bool = True) -> Dashboard:
-        owners = list(model.owners)
-        for slc in model.slices:
-            slc.owners = list(set(owners) | set(slc.owners))
-        if commit:
-            db.session.commit()
-        return model
-
     @staticmethod
     def bulk_delete(models: Optional[List[Dashboard]], commit: bool = True) -> None:
         item_ids = [model.id for model in models] if models else []
diff --git a/tests/integration_tests/dashboards/api_tests.py b/tests/integration_tests/dashboards/api_tests.py
index c9d25b679c..7630d5aad1 100644
--- a/tests/integration_tests/dashboards/api_tests.py
+++ b/tests/integration_tests/dashboards/api_tests.py
@@ -1400,55 +1400,6 @@ class TestDashboardApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixi
         db.session.delete(model)
         db.session.commit()
 
-    @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
-    def test_update_dashboard_chart_owners(self):
-        """
-        Dashboard API: Test update chart owners
-        """
-        user_alpha1 = self.create_user(
-            "alpha1", "password", "Alpha", email="alpha1@superset.org"
-        )
-        user_alpha2 = self.create_user(
-            "alpha2", "password", "Alpha", email="alpha2@superset.org"
-        )
-        admin = self.get_user("admin")
-        slices = []
-        slices.append(
-            db.session.query(Slice).filter_by(slice_name="Girl Name Cloud").first()
-        )
-        slices.append(db.session.query(Slice).filter_by(slice_name="Trends").first())
-        slices.append(db.session.query(Slice).filter_by(slice_name="Boys").first())
-
-        dashboard = self.insert_dashboard(
-            "title1",
-            "slug1",
-            [admin.id],
-            slices=slices,
-        )
-        self.login(username="admin")
-        uri = f"api/v1/dashboard/{dashboard.id}"
-        dashboard_data = {"owners": [user_alpha1.id, user_alpha2.id]}
-        rv = self.client.put(uri, json=dashboard_data)
-        self.assertEqual(rv.status_code, 200)
-
-        # verify slices owners include alpha1 and alpha2 users
-        slices_ids = [slice.id for slice in slices]
-        # Refetch Slices
-        slices = db.session.query(Slice).filter(Slice.id.in_(slices_ids)).all()
-        for slice in slices:
-            self.assertIn(user_alpha1, slice.owners)
-            self.assertIn(user_alpha2, slice.owners)
-            self.assertNotIn(admin, slice.owners)
-            # Revert owners on slice
-            slice.owners = []
-            db.session.commit()
-
-        # Rollback changes
-        db.session.delete(dashboard)
-        db.session.delete(user_alpha1)
-        db.session.delete(user_alpha2)
-        db.session.commit()
-
     @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
     def test_update_dashboard_chart_owners_propagation(self):
         """


(superset) 02/08: fix: DB-specific quoting in Jinja macro (#25779)

Posted by el...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

elizabeth pushed a commit to branch 2.1
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 34101594e284ab3acce692f41aff7759ccb4bf1d
Author: Beto Dealmeida <ro...@dealmeida.net>
AuthorDate: Mon Oct 30 09:50:44 2023 -0400

    fix: DB-specific quoting in Jinja macro (#25779)
---
 superset/jinja_context.py              | 45 +++++++++++++++++++++++-----------
 tests/unit_tests/jinja_context_test.py |  9 +++++--
 2 files changed, 38 insertions(+), 16 deletions(-)

diff --git a/superset/jinja_context.py b/superset/jinja_context.py
index d9409e297b..ffbab4f9bc 100644
--- a/superset/jinja_context.py
+++ b/superset/jinja_context.py
@@ -35,6 +35,7 @@ from flask_babel import gettext as _
 from jinja2 import DebugUndefined
 from jinja2.sandbox import SandboxedEnvironment
 from sqlalchemy.engine.interfaces import Dialect
+from sqlalchemy.sql.expression import bindparam
 from sqlalchemy.types import String
 from typing_extensions import TypedDict
 
@@ -407,23 +408,39 @@ def validate_template_context(
     return validate_context_types(context)
 
 
-def where_in(values: List[Any], mark: str = "'") -> str:
-    """
-    Given a list of values, build a parenthesis list suitable for an IN expression.
+class WhereInMacro:  # pylint: disable=too-few-public-methods
+    def __init__(self, dialect: Dialect):
+        self.dialect = dialect
 
-        >>> where_in([1, "b", 3])
-        (1, 'b', 3)
+    def __call__(self, values: List[Any], mark: Optional[str] = None) -> str:
+        """
+        Given a list of values, build a parenthesis list suitable for an IN expression.
 
-    """
+            >>> from sqlalchemy.dialects import mysql
+            >>> where_in = WhereInMacro(dialect=mysql.dialect())
+            >>> where_in([1, "Joe's", 3])
+            (1, 'Joe''s', 3)
 
-    def quote(value: Any) -> str:
-        if isinstance(value, str):
-            value = value.replace(mark, mark * 2)
-            return f"{mark}{value}{mark}"
-        return str(value)
+        """
+        binds = [bindparam(f"value_{i}", value) for i, value in enumerate(values)]
+        string_representations = [
+            str(
+                bind.compile(
+                    dialect=self.dialect, compile_kwargs={"literal_binds": True}
+                )
+            )
+            for bind in binds
+        ]
+        joined_values = ", ".join(string_representations)
+        result = f"({joined_values})"
+
+        if mark:
+            result += (
+                "\n-- WARNING: the `mark` parameter was removed from the `where_in` "
+                "macro for security reasons\n"
+            )
 
-    joined_values = ", ".join(quote(value) for value in values)
-    return f"({joined_values})"
+        return result
 
 
 class BaseTemplateProcessor:
@@ -459,7 +476,7 @@ class BaseTemplateProcessor:
         self.set_context(**kwargs)
 
         # custom filters
-        self._env.filters["where_in"] = where_in
+        self._env.filters["where_in"] = WhereInMacro(database.get_dialect())
 
     def set_context(self, **kwargs: Any) -> None:
         self._context.update(kwargs)
diff --git a/tests/unit_tests/jinja_context_test.py b/tests/unit_tests/jinja_context_test.py
index 13b3ae9e9c..16b1420d08 100644
--- a/tests/unit_tests/jinja_context_test.py
+++ b/tests/unit_tests/jinja_context_test.py
@@ -20,17 +20,22 @@ import json
 
 import pytest
 from pytest_mock import MockFixture
+from sqlalchemy.dialects import mysql
 
 from superset.datasets.commands.exceptions import DatasetNotFoundError
-from superset.jinja_context import dataset_macro, where_in
+from superset.jinja_context import dataset_macro, WhereInMacro
 
 
 def test_where_in() -> None:
     """
     Test the ``where_in`` Jinja2 filter.
     """
+    where_in = WhereInMacro(mysql.dialect())
     assert where_in([1, "b", 3]) == "(1, 'b', 3)"
-    assert where_in([1, "b", 3], '"') == '(1, "b", 3)'
+    assert where_in([1, "b", 3], '"') == (
+        "(1, 'b', 3)\n-- WARNING: the `mark` parameter was removed from the "
+        "`where_in` macro for security reasons\n"
+    )
     assert where_in(["O'Malley's"]) == "('O''Malley''s')"
 
 


(superset) 05/08: chore: Bump sqlparse to 0.4.4 (#24045)

Posted by el...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

elizabeth pushed a commit to branch 2.1
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 056c5985d643a772b2b6cc510d6a34e2c20b1860
Author: EugeneTorap <ev...@gmail.com>
AuthorDate: Tue May 23 10:15:24 2023 +0300

    chore: Bump sqlparse to 0.4.4 (#24045)
    
    Co-authored-by: sebastianliebscher <li...@protonmail.ch>
---
 requirements/base.txt |  2 +-
 setup.py              |  2 +-
 superset/sql_parse.py | 16 ++++++++--------
 3 files changed, 10 insertions(+), 10 deletions(-)

diff --git a/requirements/base.txt b/requirements/base.txt
index 29b2932f2a..ca143b20e6 100644
--- a/requirements/base.txt
+++ b/requirements/base.txt
@@ -272,7 +272,7 @@ sqlalchemy-utils==0.38.3
     # via
     #   apache-superset
     #   flask-appbuilder
-sqlparse==0.4.3
+sqlparse==0.4.4
     # via apache-superset
 sshtunnel==0.4.0
     # via apache-superset
diff --git a/setup.py b/setup.py
index 05c6ca4ab5..7124e70f75 100644
--- a/setup.py
+++ b/setup.py
@@ -119,7 +119,7 @@ setup(
         "slack_sdk>=3.1.1, <4",
         "sqlalchemy>=1.4, <2",
         "sqlalchemy-utils>=0.38.3, <0.39",
-        "sqlparse>=0.4.3, <0.5",
+        "sqlparse>=0.4.4, <0.5",
         "tabulate>=0.8.9, <0.9",
         "typing-extensions>=4, <5",
         "waitress; sys_platform == 'win32'",
diff --git a/superset/sql_parse.py b/superset/sql_parse.py
index 81aebe0f41..821cd5f229 100644
--- a/superset/sql_parse.py
+++ b/superset/sql_parse.py
@@ -23,6 +23,8 @@ from urllib import parse
 
 import sqlparse
 from sqlalchemy import and_
+from sqlparse import keywords
+from sqlparse.lexer import Lexer
 from sqlparse.sql import (
     Identifier,
     IdentifierList,
@@ -59,15 +61,13 @@ CTE_PREFIX = "CTE__"
 
 logger = logging.getLogger(__name__)
 
-
 # TODO: Workaround for https://github.com/andialbrecht/sqlparse/issues/652.
-sqlparse.keywords.SQL_REGEX.insert(
-    0,
-    (
-        re.compile(r"'(''|\\\\|\\|[^'])*'", sqlparse.keywords.FLAGS).match,
-        sqlparse.tokens.String.Single,
-    ),
-)
+# configure the Lexer to extend sqlparse
+# reference: https://sqlparse.readthedocs.io/en/stable/extending/
+lex = Lexer.get_default_instance()
+sqlparser_sql_regex = keywords.SQL_REGEX
+sqlparser_sql_regex.insert(25, (r"'(''|\\\\|\\|[^'])*'", sqlparse.tokens.String.Single))
+lex.set_SQL_REGEX(sqlparser_sql_regex)
 
 
 class CtasMethod(str, Enum):


(superset) 06/08: chore: Update mypy and fix stubs issue (#24033)

Posted by el...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

elizabeth pushed a commit to branch 2.1
in repository https://gitbox.apache.org/repos/asf/superset.git

commit b76aa5709089a867eb33164a38af40ba27f23203
Author: EugeneTorap <ev...@gmail.com>
AuthorDate: Fri May 12 10:47:14 2023 +0300

    chore: Update mypy and fix stubs issue (#24033)
---
 .pre-commit-config.yaml                            | 17 +++++++++++++++--
 superset/charts/api.py                             |  2 +-
 superset/dashboards/api.py                         |  2 +-
 superset/databases/api.py                          |  8 +++-----
 superset/databases/schemas.py                      |  5 ++---
 superset/datasets/api.py                           |  2 +-
 superset/db_engine_specs/presto.py                 |  6 +++---
 superset/embedded/view.py                          |  2 ++
 superset/exceptions.py                             |  2 +-
 superset/extensions/__init__.py                    |  2 +-
 superset/importexport/api.py                       |  2 +-
 superset/initialization/__init__.py                | 10 +++-------
 superset/jinja_context.py                          |  2 +-
 superset/models/dashboard.py                       |  4 ++--
 superset/queries/saved_queries/api.py              |  2 +-
 superset/result_set.py                             |  6 +++---
 superset/sqllab/api.py                             |  4 ++--
 superset/sqllab/exceptions.py                      | 14 +++++++-------
 superset/sqllab/query_render.py                    |  7 ++-----
 superset/sqllab/sql_json_executer.py               |  3 +--
 superset/tasks/async_queries.py                    |  8 ++++++--
 superset/utils/core.py                             | 22 +++++++++++-----------
 superset/utils/decorators.py                       |  2 +-
 superset/utils/encrypt.py                          |  2 +-
 superset/utils/machine_auth.py                     |  2 +-
 superset/utils/pandas_postprocessing/boxplot.py    |  4 ++--
 superset/utils/pandas_postprocessing/flatten.py    |  2 +-
 superset/utils/url_map_converters.py               |  2 +-
 superset/views/base.py                             |  8 ++++----
 superset/views/base_api.py                         |  5 +----
 superset/views/core.py                             |  6 ++++--
 superset/views/utils.py                            |  4 ++--
 tests/integration_tests/reports/scheduler_tests.py |  2 +-
 tests/unit_tests/tasks/test_cron_util.py           |  2 +-
 34 files changed, 90 insertions(+), 83 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index aa0cf4af62..fe7ca007e9 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -20,10 +20,23 @@ repos:
     hooks:
       - id: isort
   - repo: https://github.com/pre-commit/mirrors-mypy
-    rev: v0.941
+    rev: v1.3.0
     hooks:
       - id: mypy
-        additional_dependencies: [types-all]
+        args: [--check-untyped-defs]
+        additional_dependencies:
+          [
+            types-simplejson,
+            types-python-dateutil,
+            types-requests,
+            types-redis,
+            types-pytz,
+            types-croniter,
+            types-PyYAML,
+            types-setuptools,
+            types-paramiko,
+            types-Markdown,
+          ]
   - repo: https://github.com/peterdemin/pip-compile-multi
     rev: v2.4.1
     hooks:
diff --git a/superset/charts/api.py b/superset/charts/api.py
index 88d74f875e..3f397d1bd7 100644
--- a/superset/charts/api.py
+++ b/superset/charts/api.py
@@ -780,7 +780,7 @@ class ChartRestApi(BaseSupersetModelRestApi):
             buf,
             mimetype="application/zip",
             as_attachment=True,
-            attachment_filename=filename,
+            download_name=filename,
         )
         if token:
             response.set_cookie(token, "done", max_age=600)
diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py
index 6f478e22cc..f5a0434c53 100644
--- a/superset/dashboards/api.py
+++ b/superset/dashboards/api.py
@@ -803,7 +803,7 @@ class DashboardRestApi(BaseSupersetModelRestApi):
                 buf,
                 mimetype="application/zip",
                 as_attachment=True,
-                attachment_filename=filename,
+                download_name=filename,
             )
             if token:
                 response.set_cookie(token, "done", max_age=600)
diff --git a/superset/databases/api.py b/superset/databases/api.py
index c285198747..3d4e94153f 100644
--- a/superset/databases/api.py
+++ b/superset/databases/api.py
@@ -1054,7 +1054,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
             buf,
             mimetype="application/zip",
             as_attachment=True,
-            attachment_filename=filename,
+            download_name=filename,
         )
         if token:
             response.set_cookie(token, "done", max_age=600)
@@ -1263,12 +1263,10 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
                 and hasattr(engine_spec, "sqlalchemy_uri_placeholder")
                 and getattr(engine_spec, "default_driver") in drivers
             ):
-                payload[
-                    "parameters"
-                ] = engine_spec.parameters_json_schema()  # type: ignore
+                payload["parameters"] = engine_spec.parameters_json_schema()
                 payload[
                     "sqlalchemy_uri_placeholder"
-                ] = engine_spec.sqlalchemy_uri_placeholder  # type: ignore
+                ] = engine_spec.sqlalchemy_uri_placeholder
 
             available_databases.append(payload)
 
diff --git a/superset/databases/schemas.py b/superset/databases/schemas.py
index e318e41121..f06ff0189a 100644
--- a/superset/databases/schemas.py
+++ b/superset/databases/schemas.py
@@ -298,7 +298,7 @@ class DatabaseParametersSchemaMixin:  # pylint: disable=too-few-public-methods
                 )
 
             # validate parameters
-            parameters = engine_spec.parameters_schema.load(parameters)  # type: ignore
+            parameters = engine_spec.parameters_schema.load(parameters)
 
             serialized_encrypted_extra = data.get("masked_encrypted_extra") or "{}"
             try:
@@ -306,7 +306,7 @@ class DatabaseParametersSchemaMixin:  # pylint: disable=too-few-public-methods
             except json.decoder.JSONDecodeError:
                 encrypted_extra = {}
 
-            data["sqlalchemy_uri"] = engine_spec.build_sqlalchemy_uri(  # type: ignore
+            data["sqlalchemy_uri"] = engine_spec.build_sqlalchemy_uri(
                 parameters,
                 encrypted_extra,
             )
@@ -482,7 +482,6 @@ class DatabasePutSchema(Schema, DatabaseParametersSchemaMixin):
 
 
 class DatabaseTestConnectionSchema(Schema, DatabaseParametersSchemaMixin):
-
     rename_encrypted_extra = pre_load(rename_encrypted_extra)
 
     database_name = fields.String(
diff --git a/superset/datasets/api.py b/superset/datasets/api.py
index 16975675e6..247e6b5ffd 100644
--- a/superset/datasets/api.py
+++ b/superset/datasets/api.py
@@ -514,7 +514,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
                 buf,
                 mimetype="application/zip",
                 as_attachment=True,
-                attachment_filename=filename,
+                download_name=filename,
             )
             if token:
                 response.set_cookie(token, "done", max_age=600)
diff --git a/superset/db_engine_specs/presto.py b/superset/db_engine_specs/presto.py
index 87f362acc8..33c848cfd5 100644
--- a/superset/db_engine_specs/presto.py
+++ b/superset/db_engine_specs/presto.py
@@ -1272,10 +1272,10 @@ class PrestoEngineSpec(PrestoBaseEngineSpec):
     def _extract_error_message(cls, ex: Exception) -> str:
         if (
             hasattr(ex, "orig")
-            and type(ex.orig).__name__ == "DatabaseError"  # type: ignore
-            and isinstance(ex.orig[0], dict)  # type: ignore
+            and type(ex.orig).__name__ == "DatabaseError"
+            and isinstance(ex.orig[0], dict)
         ):
-            error_dict = ex.orig[0]  # type: ignore
+            error_dict = ex.orig[0]
             return "{} at {}: {}".format(
                 error_dict.get("errorName"),
                 error_dict.get("errorLocation"),
diff --git a/superset/embedded/view.py b/superset/embedded/view.py
index 8dd383aada..b7062c0b5e 100644
--- a/superset/embedded/view.py
+++ b/superset/embedded/view.py
@@ -55,6 +55,8 @@ class EmbeddedView(BaseSupersetView):
         if not embedded:
             abort(404)
 
+        assert embedded is not None
+
         # validate request referrer in allowed domains
         is_referrer_allowed = not embedded.allowed_domains
         for domain in embedded.allowed_domains:
diff --git a/superset/exceptions.py b/superset/exceptions.py
index cee15be376..32b06203cd 100644
--- a/superset/exceptions.py
+++ b/superset/exceptions.py
@@ -54,7 +54,7 @@ class SupersetException(Exception):
         if self.error_type:
             rv["error_type"] = self.error_type
         if self.exception is not None and hasattr(self.exception, "to_dict"):
-            rv = {**rv, **self.exception.to_dict()}  # type: ignore
+            rv = {**rv, **self.exception.to_dict()}
         return rv
 
 
diff --git a/superset/extensions/__init__.py b/superset/extensions/__init__.py
index e2e5592e1e..f633385972 100644
--- a/superset/extensions/__init__.py
+++ b/superset/extensions/__init__.py
@@ -107,7 +107,7 @@ class ProfilingExtension:  # pylint: disable=too-few-public-methods
         self.interval = interval
 
     def init_app(self, app: Flask) -> None:
-        app.wsgi_app = SupersetProfiler(app.wsgi_app, self.interval)  # type: ignore
+        app.wsgi_app = SupersetProfiler(app.wsgi_app, self.interval)
 
 
 APP_DIR = os.path.join(os.path.dirname(__file__), os.path.pardir)
diff --git a/superset/importexport/api.py b/superset/importexport/api.py
index 26bc78e5d7..3a159c140e 100644
--- a/superset/importexport/api.py
+++ b/superset/importexport/api.py
@@ -87,7 +87,7 @@ class ImportExportRestApi(BaseSupersetApi):
             buf,
             mimetype="application/zip",
             as_attachment=True,
-            attachment_filename=filename,
+            download_name=filename,
         )
         return response
 
diff --git a/superset/initialization/__init__.py b/superset/initialization/__init__.py
index ef332e502d..f06dbdb907 100644
--- a/superset/initialization/__init__.py
+++ b/superset/initialization/__init__.py
@@ -564,7 +564,7 @@ class SupersetAppInitializer:  # pylint: disable=too-many-public-methods
             CORS(self.superset_app, **self.config["CORS_OPTIONS"])
 
         if self.config["ENABLE_PROXY_FIX"]:
-            self.superset_app.wsgi_app = ProxyFix(  # type: ignore
+            self.superset_app.wsgi_app = ProxyFix(
                 self.superset_app.wsgi_app, **self.config["PROXY_FIX_CONFIG"]
             )
 
@@ -583,9 +583,7 @@ class SupersetAppInitializer:  # pylint: disable=too-many-public-methods
                         environ["wsgi.input_terminated"] = True
                     return self.app(environ, start_response)
 
-            self.superset_app.wsgi_app = ChunkedEncodingFix(  # type: ignore
-                self.superset_app.wsgi_app  # type: ignore
-            )
+            self.superset_app.wsgi_app = ChunkedEncodingFix(self.superset_app.wsgi_app)
 
         if self.config["UPLOAD_FOLDER"]:
             try:
@@ -594,9 +592,7 @@ class SupersetAppInitializer:  # pylint: disable=too-many-public-methods
                 pass
 
         for middleware in self.config["ADDITIONAL_MIDDLEWARE"]:
-            self.superset_app.wsgi_app = middleware(  # type: ignore
-                self.superset_app.wsgi_app
-            )
+            self.superset_app.wsgi_app = middleware(self.superset_app.wsgi_app)
 
         # Flask-Compress
         Compress(self.superset_app)
diff --git a/superset/jinja_context.py b/superset/jinja_context.py
index ffbab4f9bc..4fcb6c374d 100644
--- a/superset/jinja_context.py
+++ b/superset/jinja_context.py
@@ -190,7 +190,7 @@ class ExtraCache:
         # pylint: disable=import-outside-toplevel
         from superset.views.utils import get_form_data
 
-        if has_request_context() and request.args.get(param):  # type: ignore
+        if has_request_context() and request.args.get(param):
             return request.args.get(param, default)
 
         form_data, _ = get_form_data()
diff --git a/superset/models/dashboard.py b/superset/models/dashboard.py
index e2192ae2f2..3715eb7bc6 100644
--- a/superset/models/dashboard.py
+++ b/superset/models/dashboard.py
@@ -319,8 +319,8 @@ class Dashboard(Model, AuditMixinNullable, ImportExportMixin):
 
         return result
 
-    @property  # type: ignore
-    def params(self) -> str:  # type: ignore
+    @property
+    def params(self) -> str:
         return self.json_metadata
 
     @params.setter
diff --git a/superset/queries/saved_queries/api.py b/superset/queries/saved_queries/api.py
index 2b70b582bb..5d1fb2d2b2 100644
--- a/superset/queries/saved_queries/api.py
+++ b/superset/queries/saved_queries/api.py
@@ -275,7 +275,7 @@ class SavedQueryRestApi(BaseSupersetModelRestApi):
             buf,
             mimetype="application/zip",
             as_attachment=True,
-            attachment_filename=filename,
+            download_name=filename,
         )
         if token:
             response.set_cookie(token, "done", max_age=600)
diff --git a/superset/result_set.py b/superset/result_set.py
index 1c4ae98dc9..170de1869c 100644
--- a/superset/result_set.py
+++ b/superset/result_set.py
@@ -70,14 +70,14 @@ def stringify_values(array: NDArray[Any]) -> NDArray[Any]:
         for obj in it:
             if na_obj := pd.isna(obj):
                 # pandas <NA> type cannot be converted to string
-                obj[na_obj] = None  # type: ignore
+                obj[na_obj] = None
             else:
                 try:
                     # for simple string conversions
                     # this handles odd character types better
-                    obj[...] = obj.astype(str)  # type: ignore
+                    obj[...] = obj.astype(str)
                 except ValueError:
-                    obj[...] = stringify(obj)  # type: ignore
+                    obj[...] = stringify(obj)
 
     return result
 
diff --git a/superset/sqllab/api.py b/superset/sqllab/api.py
index 5915601c0d..0f7a5236c5 100644
--- a/superset/sqllab/api.py
+++ b/superset/sqllab/api.py
@@ -280,7 +280,7 @@ class SqlLabRestApi(BaseSupersetApi):
         )
         execution_context_convertor = ExecutionContextConvertor()
         execution_context_convertor.set_max_row_in_display(
-            int(config.get("DISPLAY_MAX_ROW"))  # type: ignore
+            int(config.get("DISPLAY_MAX_ROW"))
         )
         return ExecuteSqlCommand(
             execution_context,
@@ -305,7 +305,7 @@ class SqlLabRestApi(BaseSupersetApi):
             sql_json_executor = SynchronousSqlJsonExecutor(
                 query_dao,
                 get_sql_results,
-                config.get("SQLLAB_TIMEOUT"),  # type: ignore
+                config.get("SQLLAB_TIMEOUT"),
                 is_feature_enabled("SQLLAB_BACKEND_PERSISTENCE"),
             )
         return sql_json_executor
diff --git a/superset/sqllab/exceptions.py b/superset/sqllab/exceptions.py
index c0096d5db6..8a4db21950 100644
--- a/superset/sqllab/exceptions.py
+++ b/superset/sqllab/exceptions.py
@@ -33,7 +33,7 @@ class SqlLabException(SupersetException):
     failed_reason_msg: str
     suggestion_help_msg: Optional[str]
 
-    def __init__(  # pylint: disable=too-many-arguments
+    def __init__(
         self,
         sql_json_execution_context: SqlJsonExecutionContext,
         error_type: Optional[SupersetErrorType] = None,
@@ -48,13 +48,13 @@ class SqlLabException(SupersetException):
             if exception is not None:
                 if (
                     hasattr(exception, "error_type")
-                    and exception.error_type is not None  # type: ignore
+                    and exception.error_type is not None
                 ):
-                    error_type = exception.error_type  # type: ignore
+                    error_type = exception.error_type
                 elif hasattr(exception, "error") and isinstance(
-                    exception.error, SupersetError  # type: ignore
+                    exception.error, SupersetError
                 ):
-                    error_type = exception.error.error_type  # type: ignore
+                    error_type = exception.error.error_type
             else:
                 error_type = SupersetErrorType.GENERIC_BACKEND_ERROR
 
@@ -79,9 +79,9 @@ class SqlLabException(SupersetException):
             return ": {}".format(reason_message)
         if exception is not None:
             if hasattr(exception, "get_message"):
-                return ": {}".format(exception.get_message())  # type: ignore
+                return ": {}".format(exception.get_message())
             if hasattr(exception, "message"):
-                return ": {}".format(exception.message)  # type: ignore
+                return ": {}".format(exception.message)
             return ": {}".format(str(exception))
         return ""
 
diff --git a/superset/sqllab/query_render.py b/superset/sqllab/query_render.py
index 2854a7e390..1369e78db1 100644
--- a/superset/sqllab/query_render.py
+++ b/superset/sqllab/query_render.py
@@ -48,8 +48,7 @@ class SqlQueryRenderImpl(SqlQueryRender):
     def __init__(
         self, sql_template_factory: Callable[..., BaseTemplateProcessor]
     ) -> None:
-
-        self._sql_template_processor_factory = sql_template_factory  # type: ignore
+        self._sql_template_processor_factory = sql_template_factory
 
     def render(self, execution_context: SqlJsonExecutionContext) -> str:
         query_model = execution_context.query
@@ -76,9 +75,7 @@ class SqlQueryRenderImpl(SqlQueryRender):
         if is_feature_enabled("ENABLE_TEMPLATE_PROCESSING"):
             # pylint: disable=protected-access
             syntax_tree = sql_template_processor._env.parse(rendered_query)
-            undefined_parameters = find_undeclared_variables(  # type: ignore
-                syntax_tree
-            )
+            undefined_parameters = find_undeclared_variables(syntax_tree)
             if undefined_parameters:
                 self._raise_undefined_parameter_exception(
                     execution_context, undefined_parameters
diff --git a/superset/sqllab/sql_json_executer.py b/superset/sqllab/sql_json_executer.py
index 3d55047b41..e4e6b60654 100644
--- a/superset/sqllab/sql_json_executer.py
+++ b/superset/sqllab/sql_json_executer.py
@@ -64,7 +64,7 @@ class SqlJsonExecutorBase(SqlJsonExecutor, ABC):
 
     def __init__(self, query_dao: QueryDAO, get_sql_results_task: GetSqlResultsTask):
         self._query_dao = query_dao
-        self._get_sql_results_task = get_sql_results_task  # type: ignore
+        self._get_sql_results_task = get_sql_results_task
 
 
 class SynchronousSqlJsonExecutor(SqlJsonExecutorBase):
@@ -163,7 +163,6 @@ class ASynchronousSqlJsonExecutor(SqlJsonExecutorBase):
         rendered_query: str,
         log_params: Optional[Dict[str, Any]],
     ) -> SqlJsonExecutionStatus:
-
         query_id = execution_context.query.id
         logger.info("Query %i: Running query on a Celery worker", query_id)
         try:
diff --git a/superset/tasks/async_queries.py b/superset/tasks/async_queries.py
index 1157c5fd37..8d2b488480 100644
--- a/superset/tasks/async_queries.py
+++ b/superset/tasks/async_queries.py
@@ -90,7 +90,9 @@ def load_chart_data_into_cache(
             raise ex
         except Exception as ex:
             # TODO: QueryContext should support SIP-40 style errors
-            error = ex.message if hasattr(ex, "message") else str(ex)  # type: ignore # pylint: disable=no-member
+            error = (
+                ex.message if hasattr(ex, "message") else str(ex)
+            )  # pylint: disable=no-member
             errors = [{"message": error}]
             async_query_manager.update_job(
                 job_metadata, async_query_manager.STATUS_ERROR, errors=errors
@@ -157,7 +159,9 @@ def load_explore_json_into_cache(  # pylint: disable=too-many-locals
             if isinstance(ex, SupersetVizException):
                 errors = ex.errors  # pylint: disable=no-member
             else:
-                error = ex.message if hasattr(ex, "message") else str(ex)  # type: ignore # pylint: disable=no-member
+                error = (
+                    ex.message if hasattr(ex, "message") else str(ex)
+                )  # pylint: disable=no-member
                 errors = [error]
 
             async_query_manager.update_job(
diff --git a/superset/utils/core.py b/superset/utils/core.py
index 517ca6e21b..109d6742b1 100644
--- a/superset/utils/core.py
+++ b/superset/utils/core.py
@@ -56,6 +56,7 @@ from typing import (
     Iterable,
     Iterator,
     List,
+    Literal,
     NamedTuple,
     Optional,
     Sequence,
@@ -655,10 +656,10 @@ def error_msg_from_exception(ex: Exception) -> str:
     """
     msg = ""
     if hasattr(ex, "message"):
-        if isinstance(ex.message, dict):  # type: ignore
+        if isinstance(ex.message, dict):
             msg = ex.message.get("message")  # type: ignore
-        elif ex.message:  # type: ignore
-            msg = ex.message  # type: ignore
+        elif ex.message:
+            msg = ex.message
     return msg or str(ex)
 
 
@@ -1148,9 +1149,7 @@ def merge_extra_form_data(form_data: Dict[str, Any]) -> None:
     append_adhoc_filters: List[AdhocFilterClause] = extra_form_data.get(
         "adhoc_filters", []
     )
-    adhoc_filters.extend(
-        {"isExtra": True, **fltr} for fltr in append_adhoc_filters  # type: ignore
-    )
+    adhoc_filters.extend({"isExtra": True, **fltr} for fltr in append_adhoc_filters)  # type: ignore
     if append_filters:
         for key, value in form_data.items():
             if re.match("adhoc_filter.*", key):
@@ -1667,7 +1666,7 @@ def get_form_data_token(form_data: Dict[str, Any]) -> str:
     return form_data.get("token") or "token_" + uuid.uuid4().hex[:8]
 
 
-def get_column_name_from_column(column: Column) -> Optional[str]:
+def get_column_name_from_column(column: Column) -> Optional[Column]:
     """
     Extract the physical column that a column is referencing. If the column is
     an adhoc column, always returns `None`.
@@ -1677,10 +1676,12 @@ def get_column_name_from_column(column: Column) -> Optional[str]:
     """
     if is_adhoc_column(column):
         return None
-    return column  # type: ignore
+    return column
 
 
-def get_column_names_from_columns(columns: List[Column]) -> List[str]:
+def get_column_names_from_columns(
+    columns: List[Column],
+) -> List[Union[AdhocColumn, str]]:
     """
     Extract the physical columns that a list of columns are referencing. Ignore
     adhoc columns
@@ -1785,7 +1786,7 @@ def indexed(
     return idx
 
 
-def is_test() -> bool:
+def is_test() -> Literal[0, 1]:
     return strtobool(os.environ.get("SUPERSET_TESTENV", "false"))
 
 
@@ -1793,7 +1794,6 @@ def get_time_filter_status(
     datasource: "BaseDatasource",
     applied_time_extras: Dict[str, str],
 ) -> Tuple[List[Dict[str, str]], List[Dict[str, str]]]:
-
     temporal_columns: Set[Any]
     if datasource.type == "query":
         temporal_columns = {
diff --git a/superset/utils/decorators.py b/superset/utils/decorators.py
index f80a578674..e77a559905 100644
--- a/superset/utils/decorators.py
+++ b/superset/utils/decorators.py
@@ -47,7 +47,7 @@ def statsd_gauge(metric_prefix: Optional[str] = None) -> Callable[..., Any]:
             except Exception as ex:
                 if (
                     hasattr(ex, "status")
-                    and ex.status < 500  # type: ignore # pylint: disable=no-member
+                    and ex.status < 500  # pylint: disable=no-member
                 ):
                     current_app.config["STATS_LOGGER"].gauge(
                         f"{metric_prefix_}.warning", 1
diff --git a/superset/utils/encrypt.py b/superset/utils/encrypt.py
index 0c230c6cd9..52b784bb23 100644
--- a/superset/utils/encrypt.py
+++ b/superset/utils/encrypt.py
@@ -60,7 +60,7 @@ class EncryptedFieldFactory:
 
     def init_app(self, app: Flask) -> None:
         self._config = app.config
-        self._concrete_type_adapter = self._config[
+        self._concrete_type_adapter = self._config[  # type: ignore
             "SQLALCHEMY_ENCRYPTED_FIELD_TYPE_ADAPTER"
         ]()
 
diff --git a/superset/utils/machine_auth.py b/superset/utils/machine_auth.py
index d58f739f77..b770245137 100644
--- a/superset/utils/machine_auth.py
+++ b/superset/utils/machine_auth.py
@@ -52,7 +52,7 @@ class MachineAuthProvider:
         :return: The WebDriver passed in (fluent)
         """
         # Short-circuit this method if we have an override configured
-        if self._auth_webdriver_func_override:
+        if self._auth_webdriver_func_override is not None:
             return self._auth_webdriver_func_override(driver, user)
 
         # Setting cookies requires doing a request first
diff --git a/superset/utils/pandas_postprocessing/boxplot.py b/superset/utils/pandas_postprocessing/boxplot.py
index d4c78bf15e..399cf569fb 100644
--- a/superset/utils/pandas_postprocessing/boxplot.py
+++ b/superset/utils/pandas_postprocessing/boxplot.py
@@ -99,8 +99,8 @@ def boxplot(
             return np.nanpercentile(series, low)
 
     else:
-        whisker_high = np.max  # type: ignore
-        whisker_low = np.min  # type: ignore
+        whisker_high = np.max
+        whisker_low = np.min
 
     def outliers(series: Series) -> Set[float]:
         above = series[series > whisker_high(series)]
diff --git a/superset/utils/pandas_postprocessing/flatten.py b/superset/utils/pandas_postprocessing/flatten.py
index 1026164e45..db783c4bed 100644
--- a/superset/utils/pandas_postprocessing/flatten.py
+++ b/superset/utils/pandas_postprocessing/flatten.py
@@ -85,7 +85,7 @@ def flatten(
         _columns = []
         for series in df.columns.to_flat_index():
             _cells = []
-            for cell in series if is_sequence(series) else [series]:  # type: ignore
+            for cell in series if is_sequence(series) else [series]:
                 if pd.notnull(cell):
                     # every cell should be converted to string and escape comma
                     _cells.append(escape_separator(str(cell)))
diff --git a/superset/utils/url_map_converters.py b/superset/utils/url_map_converters.py
index c5eaf3b359..fbd9c800b0 100644
--- a/superset/utils/url_map_converters.py
+++ b/superset/utils/url_map_converters.py
@@ -23,7 +23,7 @@ from superset.tags.models import ObjectTypes
 
 class RegexConverter(BaseConverter):
     def __init__(self, url_map: Map, *items: List[str]) -> None:
-        super().__init__(url_map)  # type: ignore
+        super().__init__(url_map)
         self.regex = items[0]
 
 
diff --git a/superset/views/base.py b/superset/views/base.py
index ec74b8ccdb..f4a945484c 100644
--- a/superset/views/base.py
+++ b/superset/views/base.py
@@ -486,7 +486,7 @@ def show_http_exception(ex: HTTPException) -> FlaskResponse:
         and ex.code in {404, 500}
     ):
         path = resource_filename("superset", f"static/assets/{ex.code}.html")
-        return send_file(path, cache_timeout=0), ex.code
+        return send_file(path, max_age=0), ex.code
 
     return json_errors_response(
         errors=[
@@ -508,7 +508,7 @@ def show_command_errors(ex: CommandException) -> FlaskResponse:
     logger.warning("CommandException", exc_info=True)
     if "text/html" in request.accept_mimetypes and not config["DEBUG"]:
         path = resource_filename("superset", "static/assets/500.html")
-        return send_file(path, cache_timeout=0), 500
+        return send_file(path, max_age=0), 500
 
     extra = ex.normalized_messages() if isinstance(ex, CommandInvalidError) else {}
     return json_errors_response(
@@ -530,7 +530,7 @@ def show_unexpected_exception(ex: Exception) -> FlaskResponse:
     logger.exception(ex)
     if "text/html" in request.accept_mimetypes and not config["DEBUG"]:
         path = resource_filename("superset", "static/assets/500.html")
-        return send_file(path, cache_timeout=0), 500
+        return send_file(path, max_age=0), 500
 
     return json_errors_response(
         errors=[
@@ -729,7 +729,7 @@ def apply_http_headers(response: Response) -> Response:
     """Applies the configuration's http headers to all responses"""
 
     # HTTP_HEADERS is deprecated, this provides backwards compatibility
-    response.headers.extend(  # type: ignore
+    response.headers.extend(
         {**config["OVERRIDE_HTTP_HEADERS"], **config["HTTP_HEADERS"]}
     )
 
diff --git a/superset/views/base_api.py b/superset/views/base_api.py
index 57d7e17367..b6f1644bde 100644
--- a/superset/views/base_api.py
+++ b/superset/views/base_api.py
@@ -112,10 +112,7 @@ def statsd_metrics(f: Callable[..., Any]) -> Callable[..., Any]:
         try:
             duration, response = time_function(f, self, *args, **kwargs)
         except Exception as ex:
-            if (
-                hasattr(ex, "status")
-                and ex.status < 500  # type: ignore # pylint: disable=no-member
-            ):
+            if hasattr(ex, "status") and ex.status < 500:  # pylint: disable=no-member
                 self.incr_stats("warning", func_name)
             else:
                 self.incr_stats("error", func_name)
diff --git a/superset/views/core.py b/superset/views/core.py
index b1580756b7..7f6364bb8a 100755
--- a/superset/views/core.py
+++ b/superset/views/core.py
@@ -1844,6 +1844,8 @@ class Superset(BaseSupersetView):  # pylint: disable=too-many-public-methods
         if not dashboard:
             abort(404)
 
+        assert dashboard is not None
+
         has_access_ = False
         for datasource in dashboard.datasources:
             datasource = DatasourceDAO.get_datasource(
@@ -2367,7 +2369,7 @@ class Superset(BaseSupersetView):  # pylint: disable=too-many-public-methods
         )
         execution_context_convertor = ExecutionContextConvertor()
         execution_context_convertor.set_max_row_in_display(
-            int(config.get("DISPLAY_MAX_ROW"))  # type: ignore
+            int(config.get("DISPLAY_MAX_ROW"))
         )
         return ExecuteSqlCommand(
             execution_context,
@@ -2392,7 +2394,7 @@ class Superset(BaseSupersetView):  # pylint: disable=too-many-public-methods
             sql_json_executor = SynchronousSqlJsonExecutor(
                 query_dao,
                 get_sql_results,
-                config.get("SQLLAB_TIMEOUT"),  # type: ignore
+                config.get("SQLLAB_TIMEOUT"),
                 is_feature_enabled("SQLLAB_BACKEND_PERSISTENCE"),
             )
         return sql_json_executor
diff --git a/superset/views/utils.py b/superset/views/utils.py
index cd84d8e0a5..35a39fdc9c 100644
--- a/superset/views/utils.py
+++ b/superset/views/utils.py
@@ -153,7 +153,7 @@ def get_form_data(  # pylint: disable=too-many-locals
 ) -> Tuple[Dict[str, Any], Optional[Slice]]:
     form_data: Dict[str, Any] = initial_form_data or {}
 
-    if has_request_context():  # type: ignore
+    if has_request_context():
         # chart data API requests are JSON
         request_json_data = (
             request.json["queries"][0]
@@ -186,7 +186,7 @@ def get_form_data(  # pylint: disable=too-many-locals
         json_data = form_data["queries"][0] if "queries" in form_data else {}
         form_data.update(json_data)
 
-    if has_request_context():  # type: ignore
+    if has_request_context():
         url_id = request.args.get("r")
         if url_id:
             saved_url = db.session.query(models.Url).filter_by(id=url_id).first()
diff --git a/tests/integration_tests/reports/scheduler_tests.py b/tests/integration_tests/reports/scheduler_tests.py
index 3dd6e72941..d121e87d45 100644
--- a/tests/integration_tests/reports/scheduler_tests.py
+++ b/tests/integration_tests/reports/scheduler_tests.py
@@ -22,7 +22,7 @@ from unittest.mock import patch
 import pytest
 from flask_appbuilder.security.sqla.models import User
 from freezegun import freeze_time
-from freezegun.api import FakeDatetime  # type: ignore
+from freezegun.api import FakeDatetime
 
 from superset.extensions import db
 from superset.reports.models import ReportScheduleType
diff --git a/tests/unit_tests/tasks/test_cron_util.py b/tests/unit_tests/tasks/test_cron_util.py
index d0f9ae2170..282dc99860 100644
--- a/tests/unit_tests/tasks/test_cron_util.py
+++ b/tests/unit_tests/tasks/test_cron_util.py
@@ -21,7 +21,7 @@ import pytest
 import pytz
 from dateutil import parser
 from freezegun import freeze_time
-from freezegun.api import FakeDatetime  # type: ignore
+from freezegun.api import FakeDatetime
 
 from superset.tasks.cron_util import cron_schedule_window
 


(superset) 07/08: chore: bump werkzeug and Flask (#23965)

Posted by el...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

elizabeth pushed a commit to branch 2.1
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 1111d00e04d14e721e9da2c41822ba23573ab3ad
Author: Daniel Vaz Gaspar <da...@gmail.com>
AuthorDate: Fri May 12 15:01:30 2023 +0100

    chore: bump werkzeug and Flask (#23965)
---
 requirements/base.txt                              | 15 ++--
 requirements/docker.txt                            |  6 +-
 requirements/integration.txt                       |  2 +-
 setup.py                                           |  3 +-
 tests/integration_tests/async_events/api_tests.py  |  4 ++
 tests/integration_tests/charts/data/api_tests.py   | 18 +++--
 tests/integration_tests/core_tests.py              |  2 +
 .../dashboards/permalink/api_tests.py              |  6 +-
 tests/integration_tests/datasource_tests.py        |  2 +-
 .../db_engine_specs/base_engine_spec_tests.py      | 82 ++++++++++++----------
 .../explore/permalink/api_tests.py                 |  8 +--
 tests/integration_tests/reports/commands_tests.py  | 12 ++--
 .../security/analytics_db_safety_tests.py          | 12 ++--
 .../integration_tests/tasks/async_queries_tests.py |  6 ++
 tests/integration_tests/utils/core_tests.py        |  4 +-
 15 files changed, 107 insertions(+), 75 deletions(-)

diff --git a/requirements/base.txt b/requirements/base.txt
index ca143b20e6..1ae306e8f3 100644
--- a/requirements/base.txt
+++ b/requirements/base.txt
@@ -35,7 +35,7 @@ cffi==1.15.1
     # via
     #   cryptography
     #   pynacl
-click==8.0.4
+click==8.1.3
     # via
     #   apache-superset
     #   celery
@@ -70,7 +70,7 @@ dnspython==2.1.0
     # via email-validator
 email-validator==1.1.3
     # via flask-appbuilder
-flask==2.1.3
+flask==2.2.5
     # via
     #   apache-superset
     #   flask-appbuilder
@@ -114,8 +114,11 @@ geographiclib==1.52
     # via geopy
 geopy==2.2.0
     # via apache-superset
+<<<<<<< HEAD
 graphlib-backport==1.0.3
     # via apache-superset
+=======
+>>>>>>> 97482c57b... chore: bump werkzeug and Flask (#23965)
 gunicorn==20.1.0
     # via apache-superset
 hashids==1.3.1
@@ -134,11 +137,11 @@ importlib-resources==5.12.0
     # via limits
 isodate==0.6.0
     # via apache-superset
-itsdangerous==2.1.1
+itsdangerous==2.1.2
     # via
     #   flask
     #   flask-wtf
-jinja2==3.0.3
+jinja2==3.1.2
     # via
     #   flask
     #   flask-babel
@@ -158,6 +161,7 @@ markupsafe==2.1.1
     # via
     #   jinja2
     #   mako
+    #   werkzeug
     #   wtforms
 marshmallow==3.13.0
     # via
@@ -295,8 +299,9 @@ wcwidth==0.2.5
     # via prompt-toolkit
 webencodings==0.5.1
     # via bleach
-werkzeug==2.1.2
+werkzeug==2.3.3
     # via
+    #   apache-superset
     #   flask
     #   flask-jwt-extended
     #   flask-login
diff --git a/requirements/docker.txt b/requirements/docker.txt
index 0338f43fd8..1b122d50fc 100644
--- a/requirements/docker.txt
+++ b/requirements/docker.txt
@@ -12,8 +12,10 @@
     #   -r requirements/docker.in
 gevent==21.8.0
     # via -r requirements/docker.in
-greenlet==1.1.3.post0
-    # via gevent
+greenlet==2.0.2
+    # via
+    #   -r requirements/docker.in
+    #   gevent
 psycopg2-binary==2.9.5
     # via apache-superset
 zope-event==4.5.0
diff --git a/requirements/integration.txt b/requirements/integration.txt
index a0243d6d0d..7da1e7432a 100644
--- a/requirements/integration.txt
+++ b/requirements/integration.txt
@@ -9,7 +9,7 @@ build==0.8.0
     # via pip-tools
 cfgv==3.3.0
     # via pre-commit
-click==8.0.4
+click==8.1.3
     # via
     #   pip-compile-multi
     #   pip-tools
diff --git a/setup.py b/setup.py
index 7124e70f75..d7d028ba93 100644
--- a/setup.py
+++ b/setup.py
@@ -82,7 +82,7 @@ setup(
         "cron-descriptor",
         "cryptography>=39.0.0,<40",
         "deprecation>=2.1.0, <2.2.0",
-        "flask>=2.1.3, <2.2",
+        "flask>=2.2.5, <3.0.0",
         "flask-appbuilder>=4.3.0, <5.0.0",
         "flask-caching>=1.10.1, <1.11",
         "flask-compress>=1.13, <2.0",
@@ -123,6 +123,7 @@ setup(
         "tabulate>=0.8.9, <0.9",
         "typing-extensions>=4, <5",
         "waitress; sys_platform == 'win32'",
+        "werkzeug>=2.3.3, <3",
         "wtforms>=2.3.3, <4",
         "wtforms-json",
         "xlsxwriter>=3.0.7, <3.1",
diff --git a/tests/integration_tests/async_events/api_tests.py b/tests/integration_tests/async_events/api_tests.py
index a63f540dd0..5c12b29af4 100644
--- a/tests/integration_tests/async_events/api_tests.py
+++ b/tests/integration_tests/async_events/api_tests.py
@@ -33,6 +33,7 @@ class TestAsyncEventApi(SupersetTestCase):
 
     @mock.patch("uuid.uuid4", return_value=UUID)
     def test_events(self, mock_uuid4):
+        app._got_first_request = False
         async_query_manager.init_app(app)
         self.login(username="admin")
         with mock.patch.object(async_query_manager._redis, "xrange") as mock_xrange:
@@ -46,6 +47,7 @@ class TestAsyncEventApi(SupersetTestCase):
 
     @mock.patch("uuid.uuid4", return_value=UUID)
     def test_events_last_id(self, mock_uuid4):
+        app._got_first_request = False
         async_query_manager.init_app(app)
         self.login(username="admin")
         with mock.patch.object(async_query_manager._redis, "xrange") as mock_xrange:
@@ -59,6 +61,7 @@ class TestAsyncEventApi(SupersetTestCase):
 
     @mock.patch("uuid.uuid4", return_value=UUID)
     def test_events_results(self, mock_uuid4):
+        app._got_first_request = False
         async_query_manager.init_app(app)
         self.login(username="admin")
         with mock.patch.object(async_query_manager._redis, "xrange") as mock_xrange:
@@ -107,6 +110,7 @@ class TestAsyncEventApi(SupersetTestCase):
         self.assertEqual(response, expected)
 
     def test_events_no_login(self):
+        app._got_first_request = False
         async_query_manager.init_app(app)
         rv = self.fetch_events()
         assert rv.status_code == 401
diff --git a/tests/integration_tests/charts/data/api_tests.py b/tests/integration_tests/charts/data/api_tests.py
index 83fb7281fb..891578a3f8 100644
--- a/tests/integration_tests/charts/data/api_tests.py
+++ b/tests/integration_tests/charts/data/api_tests.py
@@ -28,10 +28,7 @@ from zipfile import ZipFile
 from flask import Response
 from tests.integration_tests.conftest import with_feature_flags
 from superset.models.sql_lab import Query
-from tests.integration_tests.base_tests import (
-    SupersetTestCase,
-    test_client,
-)
+from tests.integration_tests.base_tests import SupersetTestCase, test_client
 from tests.integration_tests.annotation_layers.fixtures import create_annotation_layers
 from tests.integration_tests.fixtures.birth_names_dashboard import (
     load_birth_names_dashboard_with_slices,
@@ -602,6 +599,7 @@ class TestPostChartDataApi(BaseTestChartDataApi):
     @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
     def test_chart_data_async(self):
         self.logout()
+        app._got_first_request = False
         async_query_manager.init_app(app)
         self.login("admin")
         rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data")
@@ -619,6 +617,7 @@ class TestPostChartDataApi(BaseTestChartDataApi):
         Chart data API: Test chart data query returns results synchronously
         when results are already cached.
         """
+        app._got_first_request = False
         async_query_manager.init_app(app)
 
         class QueryContext:
@@ -648,6 +647,7 @@ class TestPostChartDataApi(BaseTestChartDataApi):
         """
         Chart data API: Test chart data query non-JSON format (async)
         """
+        app._got_first_request = False
         async_query_manager.init_app(app)
         self.query_context_payload["result_type"] = "results"
         rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data")
@@ -659,6 +659,7 @@ class TestPostChartDataApi(BaseTestChartDataApi):
         """
         Chart data API: Test chart data query (async)
         """
+        app._got_first_request = False
         async_query_manager.init_app(app)
         test_client.set_cookie(
             "localhost", app.config["GLOBAL_ASYNC_QUERIES_JWT_COOKIE_NAME"], "foo"
@@ -974,6 +975,7 @@ class TestGetChartDataApi(BaseTestChartDataApi):
         """
         Chart data cache API: Test chart data async cache request
         """
+        app._got_first_request = False
         async_query_manager.init_app(app)
         cache_loader.load.return_value = self.query_context_payload
         orig_run = ChartDataCommand.run
@@ -1000,6 +1002,7 @@ class TestGetChartDataApi(BaseTestChartDataApi):
         """
         Chart data cache API: Test chart data async cache request with run failure
         """
+        app._got_first_request = False
         async_query_manager.init_app(app)
         cache_loader.load.return_value = self.query_context_payload
         rv = self.get_assert_metric(
@@ -1017,8 +1020,9 @@ class TestGetChartDataApi(BaseTestChartDataApi):
         """
         Chart data cache API: Test chart data async cache request (no login)
         """
-        self.logout()
+        app._got_first_request = False
         async_query_manager.init_app(app)
+        self.logout()
         cache_loader.load.return_value = self.query_context_payload
         orig_run = ChartDataCommand.run
 
@@ -1039,6 +1043,7 @@ class TestGetChartDataApi(BaseTestChartDataApi):
         """
         Chart data cache API: Test chart data async cache request with invalid cache key
         """
+        app._got_first_request = False
         async_query_manager.init_app(app)
         rv = self.get_assert_metric(
             f"{CHART_DATA_URI}/test-cache-key", "data_from_cache"
@@ -1156,10 +1161,10 @@ def test_data_cache_default_timeout(
 
 
 def test_chart_cache_timeout(
+    load_energy_table_with_slice: List[Slice],
     test_client,
     login_as_admin,
     physical_query_context,
-    load_energy_table_with_slice: List[Slice],
 ):
     # should override datasource cache timeout
 
@@ -1178,7 +1183,6 @@ def test_chart_cache_timeout(
     db.session.commit()
 
     physical_query_context["form_data"] = {"slice_id": slice_with_cache_timeout.id}
-
     rv = test_client.post(CHART_DATA_URI, json=physical_query_context)
     assert rv.json["result"][0]["cache_timeout"] == 20
 
diff --git a/tests/integration_tests/core_tests.py b/tests/integration_tests/core_tests.py
index f036f18bf6..2e9e287620 100644
--- a/tests/integration_tests/core_tests.py
+++ b/tests/integration_tests/core_tests.py
@@ -1088,6 +1088,7 @@ class TestCore(SupersetTestCase):
             "groupby": ["gender"],
             "row_limit": 100,
         }
+        app._got_first_request = False
         async_query_manager.init_app(app)
         self.login(username="admin")
         rv = self.client.post(
@@ -1119,6 +1120,7 @@ class TestCore(SupersetTestCase):
             "groupby": ["gender"],
             "row_limit": 100,
         }
+        app._got_first_request = False
         async_query_manager.init_app(app)
         self.login(username="admin")
         rv = self.client.post(
diff --git a/tests/integration_tests/dashboards/permalink/api_tests.py b/tests/integration_tests/dashboards/permalink/api_tests.py
index 40a312ef85..ad19d13cd4 100644
--- a/tests/integration_tests/dashboards/permalink/api_tests.py
+++ b/tests/integration_tests/dashboards/permalink/api_tests.py
@@ -66,7 +66,7 @@ def permalink_salt() -> Iterator[str]:
 
 
 def test_post(
-    test_client, login_as_admin, dashboard_id: int, permalink_salt: str
+    dashboard_id: int, permalink_salt: str, test_client, login_as_admin
 ) -> None:
     resp = test_client.post(f"api/v1/dashboard/{dashboard_id}/permalink", json=STATE)
     assert resp.status_code == 201
@@ -93,14 +93,14 @@ def test_post_access_denied(test_client, login_as, dashboard_id: int):
     assert resp.status_code == 404
 
 
-def test_post_invalid_schema(test_client, login_as_admin, dashboard_id: int):
+def test_post_invalid_schema(dashboard_id: int, test_client, login_as_admin):
     resp = test_client.post(
         f"api/v1/dashboard/{dashboard_id}/permalink", json={"foo": "bar"}
     )
     assert resp.status_code == 400
 
 
-def test_get(test_client, login_as_admin, dashboard_id: int, permalink_salt: str):
+def test_get(dashboard_id: int, permalink_salt: str, test_client, login_as_admin):
     key = test_client.post(
         f"api/v1/dashboard/{dashboard_id}/permalink", json=STATE
     ).json["key"]
diff --git a/tests/integration_tests/datasource_tests.py b/tests/integration_tests/datasource_tests.py
index 52bd9ec244..2e42c32c8b 100644
--- a/tests/integration_tests/datasource_tests.py
+++ b/tests/integration_tests/datasource_tests.py
@@ -543,7 +543,7 @@ def test_get_samples_with_filters(test_client, login_as_admin, virtual_dataset):
         f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table"
     )
     rv = test_client.post(uri, json=None)
-    assert rv.status_code == 400
+    assert rv.status_code == 415
 
     rv = test_client.post(uri, json={})
     assert rv.status_code == 200
diff --git a/tests/integration_tests/db_engine_specs/base_engine_spec_tests.py b/tests/integration_tests/db_engine_specs/base_engine_spec_tests.py
index 6048ac8f19..2650ca1b6d 100644
--- a/tests/integration_tests/db_engine_specs/base_engine_spec_tests.py
+++ b/tests/integration_tests/db_engine_specs/base_engine_spec_tests.py
@@ -447,17 +447,19 @@ def test_validate_parameters_missing():
             "query": {},
         }
     }
-    errors = BasicParametersMixin.validate_parameters(properties)
-    assert errors == [
-        SupersetError(
-            message=(
-                "One or more parameters are missing: " "database, host, port, username"
+    with app.app_context():
+        errors = BasicParametersMixin.validate_parameters(properties)
+        assert errors == [
+            SupersetError(
+                message=(
+                    "One or more parameters are missing: "
+                    "database, host, port, username"
+                ),
+                error_type=SupersetErrorType.CONNECTION_MISSING_PARAMETERS_ERROR,
+                level=ErrorLevel.WARNING,
+                extra={"missing": ["database", "host", "port", "username"]},
             ),
-            error_type=SupersetErrorType.CONNECTION_MISSING_PARAMETERS_ERROR,
-            level=ErrorLevel.WARNING,
-            extra={"missing": ["database", "host", "port", "username"]},
-        ),
-    ]
+        ]
 
 
 @mock.patch("superset.db_engine_specs.base.is_hostname_valid")
@@ -474,21 +476,22 @@ def test_validate_parameters_invalid_host(is_hostname_valid):
             "query": {"sslmode": "verify-full"},
         }
     }
-    errors = BasicParametersMixin.validate_parameters(properties)
-    assert errors == [
-        SupersetError(
-            message="One or more parameters are missing: port",
-            error_type=SupersetErrorType.CONNECTION_MISSING_PARAMETERS_ERROR,
-            level=ErrorLevel.WARNING,
-            extra={"missing": ["port"]},
-        ),
-        SupersetError(
-            message="The hostname provided can't be resolved.",
-            error_type=SupersetErrorType.CONNECTION_INVALID_HOSTNAME_ERROR,
-            level=ErrorLevel.ERROR,
-            extra={"invalid": ["host"]},
-        ),
-    ]
+    with app.app_context():
+        errors = BasicParametersMixin.validate_parameters(properties)
+        assert errors == [
+            SupersetError(
+                message="One or more parameters are missing: port",
+                error_type=SupersetErrorType.CONNECTION_MISSING_PARAMETERS_ERROR,
+                level=ErrorLevel.WARNING,
+                extra={"missing": ["port"]},
+            ),
+            SupersetError(
+                message="The hostname provided can't be resolved.",
+                error_type=SupersetErrorType.CONNECTION_INVALID_HOSTNAME_ERROR,
+                level=ErrorLevel.ERROR,
+                extra={"invalid": ["host"]},
+            ),
+        ]
 
 
 @mock.patch("superset.db_engine_specs.base.is_hostname_valid")
@@ -507,17 +510,18 @@ def test_validate_parameters_port_closed(is_port_open, is_hostname_valid):
             "query": {"sslmode": "verify-full"},
         }
     }
-    errors = BasicParametersMixin.validate_parameters(properties)
-    assert errors == [
-        SupersetError(
-            message="The port is closed.",
-            error_type=SupersetErrorType.CONNECTION_PORT_CLOSED_ERROR,
-            level=ErrorLevel.ERROR,
-            extra={
-                "invalid": ["port"],
-                "issue_codes": [
-                    {"code": 1008, "message": "Issue 1008 - The port is closed."}
-                ],
-            },
-        )
-    ]
+    with app.app_context():
+        errors = BasicParametersMixin.validate_parameters(properties)
+        assert errors == [
+            SupersetError(
+                message="The port is closed.",
+                error_type=SupersetErrorType.CONNECTION_PORT_CLOSED_ERROR,
+                level=ErrorLevel.ERROR,
+                extra={
+                    "invalid": ["port"],
+                    "issue_codes": [
+                        {"code": 1008, "message": "Issue 1008 - The port is closed."}
+                    ],
+                },
+            )
+        ]
diff --git a/tests/integration_tests/explore/permalink/api_tests.py b/tests/integration_tests/explore/permalink/api_tests.py
index 3a07bd977a..b9b1bfd0fb 100644
--- a/tests/integration_tests/explore/permalink/api_tests.py
+++ b/tests/integration_tests/explore/permalink/api_tests.py
@@ -68,7 +68,7 @@ def permalink_salt() -> Iterator[str]:
 
 
 def test_post(
-    test_client, login_as_admin, form_data: Dict[str, Any], permalink_salt: str
+    form_data: Dict[str, Any], permalink_salt: str, test_client, login_as_admin
 ):
     resp = test_client.post(f"api/v1/explore/permalink", json={"formData": form_data})
     assert resp.status_code == 201
@@ -81,14 +81,14 @@ def test_post(
     db.session.commit()
 
 
-def test_post_access_denied(test_client, login_as, form_data):
+def test_post_access_denied(form_data, test_client, login_as):
     login_as("gamma")
     resp = test_client.post(f"api/v1/explore/permalink", json={"formData": form_data})
     assert resp.status_code == 403
 
 
 def test_get_missing_chart(
-    test_client, login_as_admin, chart, permalink_salt: str
+    chart, permalink_salt: str, test_client, login_as_admin
 ) -> None:
     from superset.key_value.models import KeyValueEntry
 
@@ -125,7 +125,7 @@ def test_post_invalid_schema(test_client, login_as_admin) -> None:
 
 
 def test_get(
-    test_client, login_as_admin, form_data: Dict[str, Any], permalink_salt: str
+    form_data: Dict[str, Any], permalink_salt: str, test_client, login_as_admin
 ) -> None:
     resp = test_client.post(f"api/v1/explore/permalink", json={"formData": form_data})
     data = json.loads(resp.data.decode("utf-8"))
diff --git a/tests/integration_tests/reports/commands_tests.py b/tests/integration_tests/reports/commands_tests.py
index 8d6a76c14f..a6720d9c46 100644
--- a/tests/integration_tests/reports/commands_tests.py
+++ b/tests/integration_tests/reports/commands_tests.py
@@ -663,7 +663,7 @@ def test_email_chart_report_schedule(
         )
         # assert that the link sent is correct
         assert (
-            '<a href="http://0.0.0.0:8080/explore/?form_data=%7B%22slice_id%22%3A+'
+            '<a href="http://0.0.0.0:8080/explore/?form_data=%7B%22slice_id%22:+'
             f"{create_report_email_chart.chart.id}"
             '%7D&force=false">Explore in Superset</a>' in email_mock.call_args[0][2]
         )
@@ -718,7 +718,7 @@ def test_email_chart_report_schedule_alpha_owner(
 
         # assert that the link sent is correct
         assert (
-            '<a href="http://0.0.0.0:8080/explore/?form_data=%7B%22slice_id%22%3A+'
+            '<a href="http://0.0.0.0:8080/explore/?form_data=%7B%22slice_id%22:+'
             f"{create_report_email_chart_alpha_owner.chart.id}"
             '%7D&force=false">Explore in Superset</a>' in email_mock.call_args[0][2]
         )
@@ -763,7 +763,7 @@ def test_email_chart_report_schedule_force_screenshot(
         )
         # assert that the link sent is correct
         assert (
-            '<a href="http://0.0.0.0:8080/explore/?form_data=%7B%22slice_id%22%3A+'
+            '<a href="http://0.0.0.0:8080/explore/?form_data=%7B%22slice_id%22:+'
             f"{create_report_email_chart_force_screenshot.chart.id}"
             '%7D&force=true">Explore in Superset</a>' in email_mock.call_args[0][2]
         )
@@ -800,7 +800,7 @@ def test_email_chart_alert_schedule(
         notification_targets = get_target_from_report_schedule(create_alert_email_chart)
         # assert that the link sent is correct
         assert (
-            '<a href="http://0.0.0.0:8080/explore/?form_data=%7B%22slice_id%22%3A+'
+            '<a href="http://0.0.0.0:8080/explore/?form_data=%7B%22slice_id%22:+'
             f"{create_alert_email_chart.chart.id}"
             '%7D&force=true">Explore in Superset</a>' in email_mock.call_args[0][2]
         )
@@ -872,7 +872,7 @@ def test_email_chart_report_schedule_with_csv(
         )
         # assert that the link sent is correct
         assert (
-            '<a href="http://0.0.0.0:8080/explore/?form_data=%7B%22slice_id%22%3A+'
+            '<a href="http://0.0.0.0:8080/explore/?form_data=%7B%22slice_id%22:+'
             f"{create_report_email_chart_with_csv.chart.id}%7D&"
             'force=false">Explore in Superset</a>' in email_mock.call_args[0][2]
         )
@@ -1303,7 +1303,7 @@ def test_slack_chart_report_schedule_with_text(
 |  1 | c21  | c22  | c23       |"""
         assert table_markdown in post_message_mock.call_args[1]["text"]
         assert (
-            f"<http://0.0.0.0:8080/explore/?form_data=%7B%22slice_id%22%3A+{create_report_slack_chart_with_text.chart.id}%7D&force=false|Explore in Superset>"
+            f"<http://0.0.0.0:8080/explore/?form_data=%7B%22slice_id%22:+{create_report_slack_chart_with_text.chart.id}%7D&force=false|Explore in Superset>"
             in post_message_mock.call_args[1]["text"]
         )
 
diff --git a/tests/integration_tests/security/analytics_db_safety_tests.py b/tests/integration_tests/security/analytics_db_safety_tests.py
index 7e36268e30..9c40050c0a 100644
--- a/tests/integration_tests/security/analytics_db_safety_tests.py
+++ b/tests/integration_tests/security/analytics_db_safety_tests.py
@@ -21,6 +21,7 @@ from sqlalchemy.engine.url import make_url
 
 from superset.exceptions import SupersetSecurityException
 from superset.security.analytics_db_safety import check_sqlalchemy_uri
+from tests.integration_tests.test_app import app
 
 
 @pytest.mark.parametrize(
@@ -83,9 +84,10 @@ from superset.security.analytics_db_safety import check_sqlalchemy_uri
 def test_check_sqlalchemy_uri(
     sqlalchemy_uri: str, error: bool, error_message: Optional[str]
 ):
-    if error:
-        with pytest.raises(SupersetSecurityException) as excinfo:
+    with app.app_context():
+        if error:
+            with pytest.raises(SupersetSecurityException) as excinfo:
+                check_sqlalchemy_uri(make_url(sqlalchemy_uri))
+                assert str(excinfo.value) == error_message
+        else:
             check_sqlalchemy_uri(make_url(sqlalchemy_uri))
-            assert str(excinfo.value) == error_message
-    else:
-        check_sqlalchemy_uri(make_url(sqlalchemy_uri))
diff --git a/tests/integration_tests/tasks/async_queries_tests.py b/tests/integration_tests/tasks/async_queries_tests.py
index 20d0f39eea..6beda4a224 100644
--- a/tests/integration_tests/tasks/async_queries_tests.py
+++ b/tests/integration_tests/tasks/async_queries_tests.py
@@ -46,6 +46,7 @@ class TestAsyncQueries(SupersetTestCase):
     @mock.patch.object(async_query_manager, "update_job")
     @mock.patch.object(async_queries, "set_form_data")
     def test_load_chart_data_into_cache(self, mock_set_form_data, mock_update_job):
+        app._got_first_request = False
         async_query_manager.init_app(app)
         query_context = get_query_context("birth_names")
         user = security_manager.find_user("gamma")
@@ -68,6 +69,7 @@ class TestAsyncQueries(SupersetTestCase):
     )
     @mock.patch.object(async_query_manager, "update_job")
     def test_load_chart_data_into_cache_error(self, mock_update_job, mock_run_command):
+        app._got_first_request = False
         async_query_manager.init_app(app)
         query_context = get_query_context("birth_names")
         user = security_manager.find_user("gamma")
@@ -90,6 +92,7 @@ class TestAsyncQueries(SupersetTestCase):
     def test_soft_timeout_load_chart_data_into_cache(
         self, mock_update_job, mock_run_command
     ):
+        app._got_first_request = False
         async_query_manager.init_app(app)
         user = security_manager.find_user("gamma")
         form_data = {}
@@ -114,6 +117,7 @@ class TestAsyncQueries(SupersetTestCase):
     @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
     @mock.patch.object(async_query_manager, "update_job")
     def test_load_explore_json_into_cache(self, mock_update_job):
+        app._got_first_request = False
         async_query_manager.init_app(app)
         table = self.get_table(name="birth_names")
         user = security_manager.find_user("gamma")
@@ -145,6 +149,7 @@ class TestAsyncQueries(SupersetTestCase):
     def test_load_explore_json_into_cache_error(
         self, mock_set_form_data, mock_update_job
     ):
+        app._got_first_request = False
         async_query_manager.init_app(app)
         user = security_manager.find_user("gamma")
         form_data = {}
@@ -168,6 +173,7 @@ class TestAsyncQueries(SupersetTestCase):
     def test_soft_timeout_load_explore_json_into_cache(
         self, mock_update_job, mock_run_command
     ):
+        app._got_first_request = False
         async_query_manager.init_app(app)
         user = security_manager.find_user("gamma")
         form_data = {}
diff --git a/tests/integration_tests/utils/core_tests.py b/tests/integration_tests/utils/core_tests.py
index 29b94d6d37..1a2fa6a521 100644
--- a/tests/integration_tests/utils/core_tests.py
+++ b/tests/integration_tests/utils/core_tests.py
@@ -17,6 +17,7 @@
 import pytest
 
 from superset.utils.core import form_data_to_adhoc, simple_filter_to_adhoc
+from tests.integration_tests.test_app import app
 
 
 def test_simple_filter_to_adhoc_generates_deterministic_values():
@@ -81,4 +82,5 @@ def test_form_data_to_adhoc_incorrect_clause_type():
     form_data = {"where": "1 = 1", "having": "count(*) > 1"}
 
     with pytest.raises(ValueError):
-        form_data_to_adhoc(form_data, "foobar")
+        with app.app_context():
+            form_data_to_adhoc(form_data, "foobar")


(superset) 08/08: chore: Use nh3 lib instead of bleach (#23862)

Posted by el...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

elizabeth pushed a commit to branch 2.1
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 52cdd57dd990e450fc9062ca65d485a3140d66a5
Author: EugeneTorap <ev...@gmail.com>
AuthorDate: Fri Apr 28 16:36:51 2023 +0300

    chore: Use nh3 lib instead of bleach (#23862)
---
 requirements/base.txt                              |  8 ++-----
 setup.cfg                                          |  2 +-
 setup.py                                           |  2 +-
 .../dashboard/components/gridComponents/Chart.jsx  |  4 ++--
 superset/reports/notifications/email.py            | 27 ++++++++++++----------
 superset/utils/async_query_manager.py              |  4 ++--
 superset/utils/core.py                             | 13 ++++++-----
 tests/unit_tests/notifications/email_tests.py      |  5 +++-
 8 files changed, 34 insertions(+), 31 deletions(-)

diff --git a/requirements/base.txt b/requirements/base.txt
index 1ae306e8f3..a8f7e0c18b 100644
--- a/requirements/base.txt
+++ b/requirements/base.txt
@@ -23,8 +23,6 @@ bcrypt==4.0.1
     # via paramiko
 billiard==3.6.4.0
     # via celery
-bleach==3.3.1
-    # via apache-superset
 brotli==1.0.9
     # via flask-compress
 cachelib==0.4.1
@@ -174,6 +172,8 @@ marshmallow-sqlalchemy==0.23.1
     # via flask-appbuilder
 msgpack==1.0.2
     # via apache-superset
+nh3==0.2.11
+    # via apache-superset
 numpy==1.23.5
     # via
     #   apache-superset
@@ -181,7 +181,6 @@ numpy==1.23.5
     #   pyarrow
 packaging==21.3
     # via
-    #   bleach
     #   deprecation
 pandas==1.5.3
     # via apache-superset
@@ -252,7 +251,6 @@ simplejson==3.17.3
     # via apache-superset
 six==1.16.0
     # via
-    #   bleach
     #   click-repl
     #   isodate
     #   jsonschema
@@ -297,8 +295,6 @@ vine==5.0.0
     #   kombu
 wcwidth==0.2.5
     # via prompt-toolkit
-webencodings==0.5.1
-    # via bleach
 werkzeug==2.3.3
     # via
     #   apache-superset
diff --git a/setup.cfg b/setup.cfg
index a9470d51bd..970fd4ca82 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -30,7 +30,7 @@ combine_as_imports = true
 include_trailing_comma = true
 line_length = 88
 known_first_party = superset
-known_third_party =alembic,apispec,backoff,bleach,cachelib,celery,click,colorama,cron_descriptor,croniter,cryptography,dateutil,deprecation,flask,flask_appbuilder,flask_babel,flask_caching,flask_compress,flask_jwt_extended,flask_login,flask_migrate,flask_sqlalchemy,flask_talisman,flask_testing,flask_wtf,freezegun,geohash,geopy,graphlib,holidays,humanize,isodate,jinja2,jwt,markdown,markupsafe,marshmallow,marshmallow_enum,msgpack,numpy,pandas,parameterized,parsedatetime,pgsanity,pkg_resour [...]
+known_third_party =alembic,apispec,backoff,cachelib,celery,click,colorama,cron_descriptor,croniter,cryptography,dateutil,deprecation,flask,flask_appbuilder,flask_babel,flask_caching,flask_compress,flask_jwt_extended,flask_login,flask_migrate,flask_sqlalchemy,flask_talisman,flask_testing,flask_wtf,freezegun,geohash,geopy,graphlib,holidays,humanize,isodate,jinja2,jwt,markdown,markupsafe,marshmallow,marshmallow_enum,msgpack,nh3,numpy,pandas,parameterized,parsedatetime,pgsanity,pkg_resources [...]
 multi_line_output = 3
 order_by_type = false
 
diff --git a/setup.py b/setup.py
index d7d028ba93..66314ed9c4 100644
--- a/setup.py
+++ b/setup.py
@@ -73,7 +73,6 @@ setup(
     },
     install_requires=[
         "backoff>=1.8.0",
-        "bleach>=3.0.2, <4.0.0",
         "cachelib>=0.4.1,<0.5",
         "celery>=5.2.2, <6.0.0",
         "click>=8.0.3",
@@ -100,6 +99,7 @@ setup(
         "isodate",
         "markdown>=3.0",
         "msgpack>=1.0.0, <1.1",
+        "nh3>=0.2.11, <0.3",
         "numpy==1.23.5",
         "pandas>=1.5.3, <1.6",
         "parsedatetime",
diff --git a/superset-frontend/src/dashboard/components/gridComponents/Chart.jsx b/superset-frontend/src/dashboard/components/gridComponents/Chart.jsx
index 89d4fb94ce..cf06a1d7ed 100644
--- a/superset-frontend/src/dashboard/components/gridComponents/Chart.jsx
+++ b/superset-frontend/src/dashboard/components/gridComponents/Chart.jsx
@@ -486,10 +486,10 @@ class Chart extends React.Component {
 
         {/*
           This usage of dangerouslySetInnerHTML is safe since it is being used to render
-          markdown that is sanitized with bleach. See:
+          markdown that is sanitized with nh3. See:
              https://github.com/apache/superset/pull/4390
           and
-             https://github.com/apache/superset/commit/b6fcc22d5a2cb7a5e92599ed5795a0169385a825
+             https://github.com/apache/superset/pull/23862
         */}
         {isExpanded && slice.description_markeddown && (
           <div
diff --git a/superset/reports/notifications/email.py b/superset/reports/notifications/email.py
index 22b1714f99..10a76e7573 100644
--- a/superset/reports/notifications/email.py
+++ b/superset/reports/notifications/email.py
@@ -22,7 +22,7 @@ from dataclasses import dataclass
 from email.utils import make_msgid, parseaddr
 from typing import Any, Dict, Optional
 
-import bleach
+import nh3
 from flask_babel import gettext as __
 
 from superset import app
@@ -35,10 +35,10 @@ from superset.utils.decorators import statsd_gauge
 
 logger = logging.getLogger(__name__)
 
-TABLE_TAGS = ["table", "th", "tr", "td", "thead", "tbody", "tfoot"]
-TABLE_ATTRIBUTES = ["colspan", "rowspan", "halign", "border", "class"]
+TABLE_TAGS = {"table", "th", "tr", "td", "thead", "tbody", "tfoot"}
+TABLE_ATTRIBUTES = {"colspan", "rowspan", "halign", "border", "class"}
 
-ALLOWED_TAGS = [
+ALLOWED_TAGS = {
     "a",
     "abbr",
     "acronym",
@@ -54,13 +54,14 @@ ALLOWED_TAGS = [
     "p",
     "strong",
     "ul",
-] + TABLE_TAGS
+}.union(TABLE_TAGS)
 
+ALLOWED_TABLE_ATTRIBUTES = {tag: TABLE_ATTRIBUTES for tag in TABLE_TAGS}
 ALLOWED_ATTRIBUTES = {
-    "a": ["href", "title"],
-    "abbr": ["title"],
-    "acronym": ["title"],
-    **{tag: TABLE_ATTRIBUTES for tag in TABLE_TAGS},
+    "a": {"href", "title"},
+    "abbr": {"title"},
+    "acronym": {"title"},
+    **ALLOWED_TABLE_ATTRIBUTES,
 }
 
 
@@ -108,7 +109,8 @@ class EmailNotification(BaseNotification):  # pylint: disable=too-few-public-met
             }
 
         # Strip any malicious HTML from the description
-        description = bleach.clean(
+        # pylint: disable=no-member
+        description = nh3.clean(
             self._content.description or "",
             tags=ALLOWED_TAGS,
             attributes=ALLOWED_ATTRIBUTES,
@@ -117,12 +119,13 @@ class EmailNotification(BaseNotification):  # pylint: disable=too-few-public-met
         # Strip malicious HTML from embedded data, allowing only table elements
         if self._content.embedded_data is not None:
             df = self._content.embedded_data
-            html_table = bleach.clean(
+            # pylint: disable=no-member
+            html_table = nh3.clean(
                 df.to_html(na_rep="", index=True, escape=True),
                 # pandas will escape the HTML in cells already, so passing
                 # more allowed tags here will not work
                 tags=TABLE_TAGS,
-                attributes=TABLE_ATTRIBUTES,
+                attributes=ALLOWED_TABLE_ATTRIBUTES,
             )
         else:
             html_table = ""
diff --git a/superset/utils/async_query_manager.py b/superset/utils/async_query_manager.py
index 71559aaa3d..b6c608948d 100644
--- a/superset/utils/async_query_manager.py
+++ b/superset/utils/async_query_manager.py
@@ -192,5 +192,5 @@ class AsyncQueryManager:
         logger.debug("********** logging event data to stream %s", scoped_stream_name)
         logger.debug(event_data)
 
-        self._redis.xadd(scoped_stream_name, event_data, "*", self._stream_limit)
-        self._redis.xadd(full_stream_name, event_data, "*", self._stream_limit_firehose)
+        self._redis.xadd(scoped_stream_name, event_data, "*", self._stream_limit)  # type: ignore
+        self._redis.xadd(full_stream_name, event_data, "*", self._stream_limit_firehose)  # type: ignore
diff --git a/superset/utils/core.py b/superset/utils/core.py
index 109d6742b1..84a4bde9b3 100644
--- a/superset/utils/core.py
+++ b/superset/utils/core.py
@@ -70,8 +70,8 @@ from typing import (
 from urllib.parse import unquote_plus
 from zipfile import ZipFile
 
-import bleach
 import markdown as md
+import nh3
 import numpy as np
 import pandas as pd
 import sqlalchemy as sa
@@ -664,7 +664,7 @@ def error_msg_from_exception(ex: Exception) -> str:
 
 
 def markdown(raw: str, markup_wrap: Optional[bool] = False) -> str:
-    safe_markdown_tags = [
+    safe_markdown_tags = {
         "h1",
         "h2",
         "h3",
@@ -690,10 +690,10 @@ def markdown(raw: str, markup_wrap: Optional[bool] = False) -> str:
         "dt",
         "img",
         "a",
-    ]
+    }
     safe_markdown_attrs = {
-        "img": ["src", "alt", "title"],
-        "a": ["href", "alt", "title"],
+        "img": {"src", "alt", "title"},
+        "a": {"href", "alt", "title"},
     }
     safe = md.markdown(
         raw or "",
@@ -703,7 +703,8 @@ def markdown(raw: str, markup_wrap: Optional[bool] = False) -> str:
             "markdown.extensions.codehilite",
         ],
     )
-    safe = bleach.clean(safe, safe_markdown_tags, safe_markdown_attrs)
+    # pylint: disable=no-member
+    safe = nh3.clean(safe, tags=safe_markdown_tags, attributes=safe_markdown_attrs)
     if markup_wrap:
         safe = Markup(safe)
     return safe
diff --git a/tests/unit_tests/notifications/email_tests.py b/tests/unit_tests/notifications/email_tests.py
index 4ce34b99ca..697a9bac40 100644
--- a/tests/unit_tests/notifications/email_tests.py
+++ b/tests/unit_tests/notifications/email_tests.py
@@ -50,5 +50,8 @@ def test_render_description_with_html() -> None:
         ._get_content()
         .body
     )
-    assert '<p>This is <a href="#">a test</a> alert</p><br>' in email_body
+    assert (
+        '<p>This is <a href="#" rel="noopener noreferrer">a test</a> alert</p><br>'
+        in email_body
+    )
     assert '<td>&lt;a href="http://www.example.com"&gt;333&lt;/a&gt;</td>' in email_body


(superset) 03/08: fix: improve upload ZIP file validation (#25658)

Posted by el...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

elizabeth pushed a commit to branch 2.1
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 7c23cb0b3fd224c320b35f05e74b572033569154
Author: Daniel Vaz Gaspar <da...@gmail.com>
AuthorDate: Tue Oct 17 18:28:09 2023 +0100

    fix: improve upload ZIP file validation (#25658)
---
 superset/commands/importers/v1/utils.py |   2 +
 superset/config.py                      |   5 +
 superset/utils/core.py                  |  19 ++++
 tests/unit_tests/utils/test_core.py     | 179 +++++++++++++++++++++++++++++++-
 4 files changed, 203 insertions(+), 2 deletions(-)

diff --git a/superset/commands/importers/v1/utils.py b/superset/commands/importers/v1/utils.py
index 1e73886682..3df5fbf821 100644
--- a/superset/commands/importers/v1/utils.py
+++ b/superset/commands/importers/v1/utils.py
@@ -25,6 +25,7 @@ from marshmallow.exceptions import ValidationError
 from superset import db
 from superset.commands.importers.exceptions import IncorrectVersionError
 from superset.models.core import Database
+from superset.utils.core import check_is_safe_zip
 
 METADATA_FILE_NAME = "metadata.yaml"
 IMPORT_VERSION = "1.0.0"
@@ -147,6 +148,7 @@ def is_valid_config(file_name: str) -> bool:
 
 
 def get_contents_from_bundle(bundle: ZipFile) -> Dict[str, str]:
+    check_is_safe_zip(bundle)
     return {
         remove_root(file_name): bundle.read(file_name).decode()
         for file_name in bundle.namelist()
diff --git a/superset/config.py b/superset/config.py
index c089416505..f2d9fa5adf 100644
--- a/superset/config.py
+++ b/superset/config.py
@@ -1530,6 +1530,11 @@ WELCOME_PAGE_LAST_TAB: Union[
     Literal["examples", "all"], Tuple[str, List[Dict[str, Any]]]
 ] = "all"
 
+# Max allowed size for a zipped file
+ZIPPED_FILE_MAX_SIZE = 100 * 1024 * 1024  # 100MB
+# Max allowed compression ratio for a zipped file
+ZIP_FILE_MAX_COMPRESS_RATIO = 200.0
+
 # Configuration for environment tag shown on the navbar. Setting 'text' to '' will hide the tag.
 # 'color' can either be a hex color code, or a dot-indexed theme color (e.g. error.base)
 ENVIRONMENT_TAG_CONFIG = {
diff --git a/superset/utils/core.py b/superset/utils/core.py
index 8cf73b84aa..517ca6e21b 100644
--- a/superset/utils/core.py
+++ b/superset/utils/core.py
@@ -1998,6 +1998,25 @@ def create_zip(files: Dict[str, Any]) -> BytesIO:
     return buf
 
 
+def check_is_safe_zip(zip_file: ZipFile) -> None:
+    """
+    Checks whether a ZIP file is safe, raises SupersetException if not.
+
+    :param zip_file:
+    :return:
+    """
+    uncompress_size = 0
+    compress_size = 0
+    for zip_file_element in zip_file.infolist():
+        if zip_file_element.file_size > current_app.config["ZIPPED_FILE_MAX_SIZE"]:
+            raise SupersetException("Found file with size above allowed threshold")
+        uncompress_size += zip_file_element.file_size
+        compress_size += zip_file_element.compress_size
+    compress_ratio = uncompress_size / compress_size
+    if compress_ratio > current_app.config["ZIP_FILE_MAX_COMPRESS_RATIO"]:
+        raise SupersetException("Zip compress ratio above allowed threshold")
+
+
 def remove_extra_adhoc_filters(form_data: Dict[str, Any]) -> None:
     """
     Remove filters from slice data that originate from a filter box or native filter
diff --git a/tests/unit_tests/utils/test_core.py b/tests/unit_tests/utils/test_core.py
index 6845bb2fc1..d95b302c4f 100644
--- a/tests/unit_tests/utils/test_core.py
+++ b/tests/unit_tests/utils/test_core.py
@@ -15,11 +15,25 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-from typing import Any, Dict
+import os
+from dataclasses import dataclass
+from typing import Any, Dict, Optional
+from unittest.mock import MagicMock
 
+import pandas as pd
 import pytest
 
-from superset.utils.core import QueryObjectFilterClause, remove_extra_adhoc_filters
+from superset.exceptions import SupersetException
+from superset.utils.core import (
+    cast_to_boolean,
+    check_is_safe_zip,
+    DateColumn,
+    is_test,
+    normalize_dttm_col,
+    parse_boolean_string,
+    QueryObjectFilterClause,
+    remove_extra_adhoc_filters,
+)
 
 ADHOC_FILTER: QueryObjectFilterClause = {
     "col": "foo",
@@ -35,6 +49,12 @@ EXTRA_FILTER: QueryObjectFilterClause = {
 }
 
 
+@dataclass
+class MockZipInfo:
+    file_size: int
+    compress_size: int
+
+
 @pytest.mark.parametrize(
     "original,expected",
     [
@@ -84,3 +104,158 @@ def test_remove_extra_adhoc_filters(
 ) -> None:
     remove_extra_adhoc_filters(original)
     assert expected == original
+
+
+def test_is_test() -> None:
+    orig_value = os.getenv("SUPERSET_TESTENV")
+
+    os.environ["SUPERSET_TESTENV"] = "true"
+    assert is_test()
+    os.environ["SUPERSET_TESTENV"] = "false"
+    assert not is_test()
+    os.environ["SUPERSET_TESTENV"] = ""
+    assert not is_test()
+
+    if orig_value is not None:
+        os.environ["SUPERSET_TESTENV"] = orig_value
+
+
+@pytest.mark.parametrize(
+    "test_input,expected",
+    [
+        ("y", True),
+        ("Y", True),
+        ("yes", True),
+        ("True", True),
+        ("t", True),
+        ("true", True),
+        ("On", True),
+        ("on", True),
+        ("1", True),
+        ("n", False),
+        ("N", False),
+        ("no", False),
+        ("False", False),
+        ("f", False),
+        ("false", False),
+        ("Off", False),
+        ("off", False),
+        ("0", False),
+        ("foo", False),
+        (None, False),
+    ],
+)
+def test_parse_boolean_string(test_input: Optional[str], expected: bool) -> None:
+    assert parse_boolean_string(test_input) == expected
+
+
+def test_int_values() -> None:
+    assert cast_to_boolean(1) is True
+    assert cast_to_boolean(0) is False
+    assert cast_to_boolean(-1) is True
+    assert cast_to_boolean(42) is True
+    assert cast_to_boolean(0) is False
+
+
+def test_float_values() -> None:
+    assert cast_to_boolean(0.5) is True
+    assert cast_to_boolean(3.14) is True
+    assert cast_to_boolean(-2.71) is True
+    assert cast_to_boolean(0.0) is False
+
+
+def test_string_values() -> None:
+    assert cast_to_boolean("true") is True
+    assert cast_to_boolean("TruE") is True
+    assert cast_to_boolean("false") is False
+    assert cast_to_boolean("FaLsE") is False
+    assert cast_to_boolean("") is False
+
+
+def test_none_value() -> None:
+    assert cast_to_boolean(None) is None
+
+
+def test_boolean_values() -> None:
+    assert cast_to_boolean(True) is True
+    assert cast_to_boolean(False) is False
+
+
+def test_other_values() -> None:
+    assert cast_to_boolean([]) is False
+    assert cast_to_boolean({}) is False
+    assert cast_to_boolean(object()) is False
+
+
+def test_normalize_dttm_col() -> None:
+    """
+    Tests for the ``normalize_dttm_col`` function.
+
+    In particular, this covers a regression when Pandas was upgraded from 1.5.3 to
+    2.0.3 and the behavior of ``pd.to_datetime`` changed.
+    """
+    df = pd.DataFrame({"__time": ["2017-07-01T00:00:00.000Z"]})
+    assert (
+        df.to_markdown()
+        == """
+|    | __time                   |
+|---:|:-------------------------|
+|  0 | 2017-07-01T00:00:00.000Z |
+    """.strip()
+    )
+
+    # in 1.5.3 this would return a datetime64[ns] dtype, but in 2.0.3 we had to
+    # add ``exact=False`` since there is a leftover after parsing the format
+    dttm_cols = (DateColumn("__time", "%Y-%m-%d"),)
+
+    # the function modifies the dataframe in place
+    normalize_dttm_col(df, dttm_cols)
+
+    assert df["__time"].astype(str).tolist() == ["2017-07-01"]
+
+
+def test_check_if_safe_zip_success(app_context: None) -> None:
+    """
+    Test if ZIP files are safe
+    """
+    ZipFile = MagicMock()
+    ZipFile.infolist.return_value = [
+        MockZipInfo(file_size=1000, compress_size=10),
+        MockZipInfo(file_size=1000, compress_size=10),
+        MockZipInfo(file_size=1000, compress_size=10),
+        MockZipInfo(file_size=1000, compress_size=10),
+        MockZipInfo(file_size=1000, compress_size=10),
+    ]
+    check_is_safe_zip(ZipFile)
+
+
+def test_check_if_safe_zip_high_rate(app_context: None) -> None:
+    """
+    Test if ZIP files is not highly compressed
+    """
+    ZipFile = MagicMock()
+    ZipFile.infolist.return_value = [
+        MockZipInfo(file_size=1000, compress_size=1),
+        MockZipInfo(file_size=1000, compress_size=1),
+        MockZipInfo(file_size=1000, compress_size=1),
+        MockZipInfo(file_size=1000, compress_size=1),
+        MockZipInfo(file_size=1000, compress_size=1),
+    ]
+    with pytest.raises(SupersetException):
+        check_is_safe_zip(ZipFile)
+
+
+def test_check_if_safe_zip_hidden_bomb(app_context: None) -> None:
+    """
+    Test if ZIP file does not contain a big file highly compressed
+    """
+    ZipFile = MagicMock()
+    ZipFile.infolist.return_value = [
+        MockZipInfo(file_size=1000, compress_size=100),
+        MockZipInfo(file_size=1000, compress_size=100),
+        MockZipInfo(file_size=1000, compress_size=100),
+        MockZipInfo(file_size=1000, compress_size=100),
+        MockZipInfo(file_size=1000 * (1024 * 1024), compress_size=100),
+    ]
+    with pytest.raises(SupersetException):
+        check_is_safe_zip(ZipFile)