You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@superset.apache.org by jo...@apache.org on 2023/07/06 16:21:17 UTC

[superset] branch master updated: chore(dao): Condense delete/bulk-delete operations (#24466)

This is an automated email from the ASF dual-hosted git repository.

johnbodley pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/superset.git


The following commit(s) were added to refs/heads/master by this push:
     new 7409289762 chore(dao): Condense delete/bulk-delete operations (#24466)
7409289762 is described below

commit 7409289762056f53faf424b80ef4146db8191487
Author: John Bodley <45...@users.noreply.github.com>
AuthorDate: Thu Jul 6 09:21:08 2023 -0700

    chore(dao): Condense delete/bulk-delete operations (#24466)
    
    Co-authored-by: Michael S. Molina <70...@users.noreply.github.com>
---
 .../annotations/commands/bulk_delete.py            |  5 +-
 .../annotations/commands/delete.py                 |  7 +--
 superset/annotation_layers/commands/bulk_delete.py |  5 +-
 superset/annotation_layers/commands/delete.py      |  7 +--
 superset/charts/commands/bulk_delete.py            |  4 +-
 superset/charts/commands/delete.py                 |  6 +-
 superset/css_templates/commands/bulk_delete.py     |  5 +-
 superset/daos/annotation.py                        | 31 ----------
 superset/daos/base.py                              | 50 +++++++++-------
 superset/daos/chart.py                             | 21 +++----
 superset/daos/css.py                               | 23 +-------
 superset/daos/dashboard.py                         | 37 ++++++------
 superset/daos/dataset.py                           | 67 ++++++++++++----------
 superset/daos/query.py                             | 18 +-----
 superset/daos/report.py                            | 38 ++++++------
 superset/dashboards/api.py                         |  2 +-
 superset/dashboards/commands/bulk_delete.py        |  5 +-
 superset/dashboards/commands/delete.py             |  6 +-
 superset/dashboards/filter_sets/api.py             |  4 +-
 superset/dashboards/filter_sets/commands/delete.py |  6 +-
 superset/databases/commands/delete.py              |  6 +-
 superset/databases/ssh_tunnel/commands/delete.py   |  7 +--
 superset/datasets/columns/commands/delete.py       | 11 ++--
 superset/datasets/commands/bulk_delete.py          |  2 +-
 superset/datasets/commands/delete.py               |  4 +-
 superset/datasets/metrics/commands/delete.py       | 11 ++--
 .../queries/saved_queries/commands/bulk_delete.py  |  5 +-
 superset/reports/commands/bulk_delete.py           |  5 +-
 superset/reports/commands/delete.py                |  7 +--
 .../row_level_security/commands/bulk_delete.py     |  4 +-
 tests/integration_tests/datasets/api_tests.py      |  4 +-
 31 files changed, 171 insertions(+), 242 deletions(-)

diff --git a/superset/annotation_layers/annotations/commands/bulk_delete.py b/superset/annotation_layers/annotations/commands/bulk_delete.py
index 2e0c53808f..153f93aa6f 100644
--- a/superset/annotation_layers/annotations/commands/bulk_delete.py
+++ b/superset/annotation_layers/annotations/commands/bulk_delete.py
@@ -36,9 +36,10 @@ class BulkDeleteAnnotationCommand(BaseCommand):
 
     def run(self) -> None:
         self.validate()
+        assert self._models
+
         try:
-            AnnotationDAO.bulk_delete(self._models)
-            return None
+            AnnotationDAO.delete(self._models)
         except DAODeleteFailedError as ex:
             logger.exception(ex.exception)
             raise AnnotationBulkDeleteFailedError() from ex
diff --git a/superset/annotation_layers/annotations/commands/delete.py b/superset/annotation_layers/annotations/commands/delete.py
index 2af01f57f4..bbd7f39dd6 100644
--- a/superset/annotation_layers/annotations/commands/delete.py
+++ b/superset/annotation_layers/annotations/commands/delete.py
@@ -17,8 +17,6 @@
 import logging
 from typing import Optional
 
-from flask_appbuilder.models.sqla import Model
-
 from superset.annotation_layers.annotations.commands.exceptions import (
     AnnotationDeleteFailedError,
     AnnotationNotFoundError,
@@ -36,16 +34,15 @@ class DeleteAnnotationCommand(BaseCommand):
         self._model_id = model_id
         self._model: Optional[Annotation] = None
 
-    def run(self) -> Model:
+    def run(self) -> None:
         self.validate()
         assert self._model
 
         try:
-            annotation = AnnotationDAO.delete(self._model)
+            AnnotationDAO.delete(self._model)
         except DAODeleteFailedError as ex:
             logger.exception(ex.exception)
             raise AnnotationDeleteFailedError() from ex
-        return annotation
 
     def validate(self) -> None:
         # Validate/populate model exists
diff --git a/superset/annotation_layers/commands/bulk_delete.py b/superset/annotation_layers/commands/bulk_delete.py
index e4696065a6..a227fc3266 100644
--- a/superset/annotation_layers/commands/bulk_delete.py
+++ b/superset/annotation_layers/commands/bulk_delete.py
@@ -37,9 +37,10 @@ class BulkDeleteAnnotationLayerCommand(BaseCommand):
 
     def run(self) -> None:
         self.validate()
+        assert self._models
+
         try:
-            AnnotationLayerDAO.bulk_delete(self._models)
-            return None
+            AnnotationLayerDAO.delete(self._models)
         except DAODeleteFailedError as ex:
             logger.exception(ex.exception)
             raise AnnotationLayerBulkDeleteFailedError() from ex
diff --git a/superset/annotation_layers/commands/delete.py b/superset/annotation_layers/commands/delete.py
index 1af4242dce..14f53272f1 100644
--- a/superset/annotation_layers/commands/delete.py
+++ b/superset/annotation_layers/commands/delete.py
@@ -17,8 +17,6 @@
 import logging
 from typing import Optional
 
-from flask_appbuilder.models.sqla import Model
-
 from superset.annotation_layers.commands.exceptions import (
     AnnotationLayerDeleteFailedError,
     AnnotationLayerDeleteIntegrityError,
@@ -37,16 +35,15 @@ class DeleteAnnotationLayerCommand(BaseCommand):
         self._model_id = model_id
         self._model: Optional[AnnotationLayer] = None
 
-    def run(self) -> Model:
+    def run(self) -> None:
         self.validate()
         assert self._model
 
         try:
-            annotation_layer = AnnotationLayerDAO.delete(self._model)
+            AnnotationLayerDAO.delete(self._model)
         except DAODeleteFailedError as ex:
             logger.exception(ex.exception)
             raise AnnotationLayerDeleteFailedError() from ex
-        return annotation_layer
 
     def validate(self) -> None:
         # Validate/populate model exists
diff --git a/superset/charts/commands/bulk_delete.py b/superset/charts/commands/bulk_delete.py
index 964c40d812..0555992e24 100644
--- a/superset/charts/commands/bulk_delete.py
+++ b/superset/charts/commands/bulk_delete.py
@@ -43,8 +43,10 @@ class BulkDeleteChartCommand(BaseCommand):
 
     def run(self) -> None:
         self.validate()
+        assert self._models
+
         try:
-            ChartDAO.bulk_delete(self._models)
+            ChartDAO.delete(self._models)
         except DeleteFailedError as ex:
             logger.exception(ex.exception)
             raise ChartBulkDeleteFailedError() from ex
diff --git a/superset/charts/commands/delete.py b/superset/charts/commands/delete.py
index 184e9f8e1a..6bbceb576f 100644
--- a/superset/charts/commands/delete.py
+++ b/superset/charts/commands/delete.py
@@ -17,7 +17,6 @@
 import logging
 from typing import cast, Optional
 
-from flask_appbuilder.models.sqla import Model
 from flask_babel import lazy_gettext as _
 
 from superset import security_manager
@@ -43,7 +42,7 @@ class DeleteChartCommand(BaseCommand):
         self._model_id = model_id
         self._model: Optional[Slice] = None
 
-    def run(self) -> Model:
+    def run(self) -> None:
         self.validate()
         self._model = cast(Slice, self._model)
         try:
@@ -52,11 +51,10 @@ class DeleteChartCommand(BaseCommand):
             # table, sporadically Superset will error because the rows are not deleted.
             # Let's do it manually here to prevent the error.
             self._model.owners = []
-            chart = ChartDAO.delete(self._model)
+            ChartDAO.delete(self._model)
         except DAODeleteFailedError as ex:
             logger.exception(ex.exception)
             raise ChartDeleteFailedError() from ex
-        return chart
 
     def validate(self) -> None:
         # Validate/populate model exists
diff --git a/superset/css_templates/commands/bulk_delete.py b/superset/css_templates/commands/bulk_delete.py
index c676e9eed8..ef13a1c8e2 100644
--- a/superset/css_templates/commands/bulk_delete.py
+++ b/superset/css_templates/commands/bulk_delete.py
@@ -36,9 +36,10 @@ class BulkDeleteCssTemplateCommand(BaseCommand):
 
     def run(self) -> None:
         self.validate()
+        assert self._models
+
         try:
-            CssTemplateDAO.bulk_delete(self._models)
-            return None
+            CssTemplateDAO.delete(self._models)
         except DAODeleteFailedError as ex:
             logger.exception(ex.exception)
             raise CssTemplateBulkDeleteFailedError() from ex
diff --git a/superset/daos/annotation.py b/superset/daos/annotation.py
index 2df336647a..1dd992241f 100644
--- a/superset/daos/annotation.py
+++ b/superset/daos/annotation.py
@@ -17,10 +17,7 @@
 import logging
 from typing import Optional, Union
 
-from sqlalchemy.exc import SQLAlchemyError
-
 from superset.daos.base import BaseDAO
-from superset.daos.exceptions import DAODeleteFailedError
 from superset.extensions import db
 from superset.models.annotations import Annotation, AnnotationLayer
 
@@ -28,19 +25,6 @@ logger = logging.getLogger(__name__)
 
 
 class AnnotationDAO(BaseDAO[Annotation]):
-    @staticmethod
-    def bulk_delete(models: Optional[list[Annotation]], commit: bool = True) -> None:
-        item_ids = [model.id for model in models] if models else []
-        try:
-            db.session.query(Annotation).filter(Annotation.id.in_(item_ids)).delete(
-                synchronize_session="fetch"
-            )
-            if commit:
-                db.session.commit()
-        except SQLAlchemyError as ex:
-            db.session.rollback()
-            raise DAODeleteFailedError() from ex
-
     @staticmethod
     def validate_update_uniqueness(
         layer_id: int, short_descr: str, annotation_id: Optional[int] = None
@@ -63,21 +47,6 @@ class AnnotationDAO(BaseDAO[Annotation]):
 
 
 class AnnotationLayerDAO(BaseDAO[AnnotationLayer]):
-    @staticmethod
-    def bulk_delete(
-        models: Optional[list[AnnotationLayer]], commit: bool = True
-    ) -> None:
-        item_ids = [model.id for model in models] if models else []
-        try:
-            db.session.query(AnnotationLayer).filter(
-                AnnotationLayer.id.in_(item_ids)
-            ).delete(synchronize_session="fetch")
-            if commit:
-                db.session.commit()
-        except SQLAlchemyError as ex:
-            db.session.rollback()
-            raise DAODeleteFailedError() from ex
-
     @staticmethod
     def has_annotations(model_id: Union[int, list[int]]) -> bool:
         if isinstance(model_id, list):
diff --git a/superset/daos/base.py b/superset/daos/base.py
index c0758f51dd..a92e4813b3 100644
--- a/superset/daos/base.py
+++ b/superset/daos/base.py
@@ -14,7 +14,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-from typing import Any, Generic, get_args, Optional, TypeVar, Union
+from __future__ import annotations
+
+from typing import Any, Generic, get_args, TypeVar
 
 from flask_appbuilder.models.filters import BaseFilter
 from flask_appbuilder.models.sqla import Model
@@ -29,6 +31,7 @@ from superset.daos.exceptions import (
     DAOUpdateFailedError,
 )
 from superset.extensions import db
+from superset.utils.core import get_iterable
 
 T = TypeVar("T", bound=Model)  # pylint: disable=invalid-name
 
@@ -38,12 +41,12 @@ class BaseDAO(Generic[T]):
     Base DAO, implement base CRUD sqlalchemy operations
     """
 
-    model_cls: Optional[type[Model]] = None
+    model_cls: type[Model] | None = None
     """
     Child classes need to state the Model class so they don't need to implement basic
     create, update and delete methods
     """
-    base_filter: Optional[BaseFilter] = None
+    base_filter: BaseFilter | None = None
     """
     Child classes can register base filtering to be applied to all filter methods
     """
@@ -57,10 +60,10 @@ class BaseDAO(Generic[T]):
     @classmethod
     def find_by_id(
         cls,
-        model_id: Union[str, int],
+        model_id: str | int,
         session: Session = None,
         skip_base_filter: bool = False,
-    ) -> Optional[Model]:
+    ) -> Model | None:
         """
         Find a model by id, if defined applies `base_filter`
         """
@@ -81,7 +84,7 @@ class BaseDAO(Generic[T]):
     @classmethod
     def find_by_ids(
         cls,
-        model_ids: Union[list[str], list[int]],
+        model_ids: list[str] | list[int],
         session: Session = None,
         skip_base_filter: bool = False,
     ) -> list[T]:
@@ -114,7 +117,7 @@ class BaseDAO(Generic[T]):
         return query.all()
 
     @classmethod
-    def find_one_or_none(cls, **filter_by: Any) -> Optional[T]:
+    def find_one_or_none(cls, **filter_by: Any) -> T | None:
         """
         Get the first that fit the `base_filter`
         """
@@ -180,25 +183,28 @@ class BaseDAO(Generic[T]):
         return model
 
     @classmethod
-    def delete(cls, model: T, commit: bool = True) -> T:
+    def delete(cls, items: T | list[T], commit: bool = True) -> None:
         """
-        Generic delete a model
-        :raises: DAODeleteFailedError
+        Delete the specified item(s) including their associated relationships.
+
+        Note that bulk deletion via `delete` is not invoked in the base class as this
+        does not dispatch the ORM `after_delete` event which may be required to augment
+        additional records loosely defined via implicit relationships. Instead ORM
+        objects are deleted one-by-one via `Session.delete`.
+
+        Subclasses may invoke bulk deletion but are responsible for instrumenting any
+        post-deletion logic.
+
+        :param items: The item(s) to delete
+        :param commit: Whether to commit the transaction
+        :raises DAODeleteFailedError: If the deletion failed
+        :see: https://docs.sqlalchemy.org/en/latest/orm/queryguide/dml.html
         """
-        try:
-            db.session.delete(model)
-            if commit:
-                db.session.commit()
-        except SQLAlchemyError as ex:  # pragma: no cover
-            db.session.rollback()
-            raise DAODeleteFailedError(exception=ex) from ex
-        return model
 
-    @classmethod
-    def bulk_delete(cls, models: list[T], commit: bool = True) -> None:
         try:
-            for model in models:
-                cls.delete(model, False)
+            for item in get_iterable(items):
+                db.session.delete(item)
+
             if commit:
                 db.session.commit()
         except SQLAlchemyError as ex:
diff --git a/superset/daos/chart.py b/superset/daos/chart.py
index 1a13965022..6fb257f45a 100644
--- a/superset/daos/chart.py
+++ b/superset/daos/chart.py
@@ -15,9 +15,11 @@
 # specific language governing permissions and limitations
 # under the License.
 # pylint: disable=arguments-renamed
+from __future__ import annotations
+
 import logging
 from datetime import datetime
-from typing import Optional, TYPE_CHECKING
+from typing import TYPE_CHECKING
 
 from sqlalchemy.exc import SQLAlchemyError
 
@@ -26,7 +28,7 @@ from superset.daos.base import BaseDAO
 from superset.extensions import db
 from superset.models.core import FavStar, FavStarClassName
 from superset.models.slice import Slice
-from superset.utils.core import get_user_id
+from superset.utils.core import get_iterable, get_user_id
 
 if TYPE_CHECKING:
     from superset.connectors.base.models import BaseDatasource
@@ -37,15 +39,14 @@ logger = logging.getLogger(__name__)
 class ChartDAO(BaseDAO[Slice]):
     base_filter = ChartFilter
 
-    @staticmethod
-    def bulk_delete(models: Optional[list[Slice]], commit: bool = True) -> None:
-        item_ids = [model.id for model in models] if models else []
+    @classmethod
+    def delete(cls, items: Slice | list[Slice], commit: bool = True) -> None:
+        item_ids = [item.id for item in get_iterable(items)]
         # bulk delete, first delete related data
-        if models:
-            for model in models:
-                model.owners = []
-                model.dashboards = []
-                db.session.merge(model)
+        for item in get_iterable(items):
+            item.owners = []
+            item.dashboards = []
+            db.session.merge(item)
         # bulk delete itself
         try:
             db.session.query(Slice).filter(Slice.id.in_(item_ids)).delete(
diff --git a/superset/daos/css.py b/superset/daos/css.py
index 3a1cbe8fda..df52de37e3 100644
--- a/superset/daos/css.py
+++ b/superset/daos/css.py
@@ -14,30 +14,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-import logging
-from typing import Optional
-
-from sqlalchemy.exc import SQLAlchemyError
-
 from superset.daos.base import BaseDAO
-from superset.daos.exceptions import DAODeleteFailedError
-from superset.extensions import db
 from superset.models.core import CssTemplate
 
-logger = logging.getLogger(__name__)
-
 
 class CssTemplateDAO(BaseDAO[CssTemplate]):
-    @staticmethod
-    def bulk_delete(models: Optional[list[CssTemplate]], commit: bool = True) -> None:
-        item_ids = [model.id for model in models] if models else []
-        try:
-            db.session.query(CssTemplate).filter(CssTemplate.id.in_(item_ids)).delete(
-                synchronize_session="fetch"
-            )
-            if commit:
-                db.session.commit()
-        except SQLAlchemyError as ex:
-            if commit:
-                db.session.rollback()
-            raise DAODeleteFailedError() from ex
+    pass
diff --git a/superset/daos/dashboard.py b/superset/daos/dashboard.py
index 1650711d50..baeac95f2f 100644
--- a/superset/daos/dashboard.py
+++ b/superset/daos/dashboard.py
@@ -14,10 +14,12 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 import json
 import logging
 from datetime import datetime
-from typing import Any, Optional, Union
+from typing import Any
 
 from flask import g
 from flask_appbuilder.models.sqla import Model
@@ -43,7 +45,7 @@ from superset.models.dashboard import Dashboard, id_or_slug_filter
 from superset.models.embedded_dashboard import EmbeddedDashboard
 from superset.models.filter_set import FilterSet
 from superset.models.slice import Slice
-from superset.utils.core import get_user_id
+from superset.utils.core import get_iterable, get_user_id
 from superset.utils.dashboard_filter_scopes_converter import copy_filter_scopes
 
 logger = logging.getLogger(__name__)
@@ -53,7 +55,7 @@ class DashboardDAO(BaseDAO[Dashboard]):
     base_filter = DashboardAccessFilter
 
     @classmethod
-    def get_by_id_or_slug(cls, id_or_slug: Union[int, str]) -> Dashboard:
+    def get_by_id_or_slug(cls, id_or_slug: int | str) -> Dashboard:
         if is_uuid(id_or_slug):
             # just get dashboard if it's uuid
             dashboard = Dashboard.get(id_or_slug)
@@ -88,9 +90,7 @@ class DashboardDAO(BaseDAO[Dashboard]):
         return DashboardDAO.get_by_id_or_slug(id_or_slug).slices
 
     @staticmethod
-    def get_dashboard_changed_on(
-        id_or_slug_or_dashboard: Union[str, Dashboard]
-    ) -> datetime:
+    def get_dashboard_changed_on(id_or_slug_or_dashboard: str | Dashboard) -> datetime:
         """
         Get latest changed datetime for a dashboard.
 
@@ -108,7 +108,7 @@ class DashboardDAO(BaseDAO[Dashboard]):
 
     @staticmethod
     def get_dashboard_and_slices_changed_on(  # pylint: disable=invalid-name
-        id_or_slug_or_dashboard: Union[str, Dashboard]
+        id_or_slug_or_dashboard: str | Dashboard,
     ) -> datetime:
         """
         Get latest changed datetime for a dashboard. The change could be a dashboard
@@ -134,7 +134,7 @@ class DashboardDAO(BaseDAO[Dashboard]):
 
     @staticmethod
     def get_dashboard_and_datasets_changed_on(  # pylint: disable=invalid-name
-        id_or_slug_or_dashboard: Union[str, Dashboard]
+        id_or_slug_or_dashboard: str | Dashboard,
     ) -> datetime:
         """
         Get latest changed datetime for a dashboard. The change could be a dashboard
@@ -166,7 +166,7 @@ class DashboardDAO(BaseDAO[Dashboard]):
         return not db.session.query(dashboard_query.exists()).scalar()
 
     @staticmethod
-    def validate_update_slug_uniqueness(dashboard_id: int, slug: Optional[str]) -> bool:
+    def validate_update_slug_uniqueness(dashboard_id: int, slug: str | None) -> bool:
         if slug is not None:
             dashboard_query = db.session.query(Dashboard).filter(
                 Dashboard.slug == slug, Dashboard.id != dashboard_id
@@ -183,16 +183,15 @@ class DashboardDAO(BaseDAO[Dashboard]):
             db.session.commit()
         return model
 
-    @staticmethod
-    def bulk_delete(models: Optional[list[Dashboard]], commit: bool = True) -> None:
-        item_ids = [model.id for model in models] if models else []
+    @classmethod
+    def delete(cls, items: Dashboard | list[Dashboard], commit: bool = True) -> None:
+        item_ids = [item.id for item in get_iterable(items)]
         # bulk delete, first delete related data
-        if models:
-            for model in models:
-                model.slices = []
-                model.owners = []
-                model.embedded = []
-                db.session.merge(model)
+        for item in get_iterable(items):
+            item.slices = []
+            item.owners = []
+            item.embedded = []
+            db.session.merge(item)
         # bulk delete itself
         try:
             db.session.query(Dashboard).filter(Dashboard.id.in_(item_ids)).delete(
@@ -208,7 +207,7 @@ class DashboardDAO(BaseDAO[Dashboard]):
     def set_dash_metadata(  # pylint: disable=too-many-locals
         dashboard: Dashboard,
         data: dict[Any, Any],
-        old_to_new_slice_ids: Optional[dict[int, int]] = None,
+        old_to_new_slice_ids: dict[int, int] | None = None,
         commit: bool = False,
     ) -> Dashboard:
         new_filter_scopes = {}
diff --git a/superset/daos/dataset.py b/superset/daos/dataset.py
index 74b7fa50eb..716fcd9a05 100644
--- a/superset/daos/dataset.py
+++ b/superset/daos/dataset.py
@@ -14,8 +14,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 import logging
-from typing import Any, Optional
+from typing import Any
 
 from sqlalchemy.exc import SQLAlchemyError
 
@@ -26,7 +28,7 @@ from superset.extensions import db
 from superset.models.core import Database
 from superset.models.dashboard import Dashboard
 from superset.models.slice import Slice
-from superset.utils.core import DatasourceType
+from superset.utils.core import DatasourceType, get_iterable
 from superset.views.base import DatasourceFilter
 
 logger = logging.getLogger(__name__)
@@ -36,7 +38,7 @@ class DatasetDAO(BaseDAO[SqlaTable]):  # pylint: disable=too-many-public-methods
     base_filter = DatasourceFilter
 
     @staticmethod
-    def get_database_by_id(database_id: int) -> Optional[Database]:
+    def get_database_by_id(database_id: int) -> Database | None:
         try:
             return db.session.query(Database).filter_by(id=database_id).one_or_none()
         except SQLAlchemyError as ex:  # pragma: no cover
@@ -68,7 +70,7 @@ class DatasetDAO(BaseDAO[SqlaTable]):  # pylint: disable=too-many-public-methods
 
     @staticmethod
     def validate_table_exists(
-        database: Database, table_name: str, schema: Optional[str]
+        database: Database, table_name: str, schema: str | None
     ) -> bool:
         try:
             database.get_table(table_name, schema=schema)
@@ -80,9 +82,9 @@ class DatasetDAO(BaseDAO[SqlaTable]):  # pylint: disable=too-many-public-methods
     @staticmethod
     def validate_uniqueness(
         database_id: int,
-        schema: Optional[str],
+        schema: str | None,
         name: str,
-        dataset_id: Optional[int] = None,
+        dataset_id: int | None = None,
     ) -> bool:
         dataset_query = db.session.query(SqlaTable).filter(
             SqlaTable.table_name == name,
@@ -287,9 +289,7 @@ class DatasetDAO(BaseDAO[SqlaTable]):  # pylint: disable=too-many-public-methods
             db.session.commit()
 
     @classmethod
-    def find_dataset_column(
-        cls, dataset_id: int, column_id: int
-    ) -> Optional[TableColumn]:
+    def find_dataset_column(cls, dataset_id: int, column_id: int) -> TableColumn | None:
         # We want to apply base dataset filters
         dataset = DatasetDAO.find_by_id(dataset_id)
         if not dataset:
@@ -319,16 +319,14 @@ class DatasetDAO(BaseDAO[SqlaTable]):  # pylint: disable=too-many-public-methods
         return DatasetColumnDAO.create(properties, commit=commit)
 
     @classmethod
-    def delete_column(cls, model: TableColumn, commit: bool = True) -> TableColumn:
+    def delete_column(cls, model: TableColumn, commit: bool = True) -> None:
         """
         Deletes a Dataset column
         """
-        return cls.delete(model, commit=commit)
+        DatasetColumnDAO.delete(model, commit=commit)
 
     @classmethod
-    def find_dataset_metric(
-        cls, dataset_id: int, metric_id: int
-    ) -> Optional[SqlMetric]:
+    def find_dataset_metric(cls, dataset_id: int, metric_id: int) -> SqlMetric | None:
         # We want to apply base dataset filters
         dataset = DatasetDAO.find_by_id(dataset_id)
         if not dataset:
@@ -336,11 +334,11 @@ class DatasetDAO(BaseDAO[SqlaTable]):  # pylint: disable=too-many-public-methods
         return db.session.query(SqlMetric).get(metric_id)
 
     @classmethod
-    def delete_metric(cls, model: SqlMetric, commit: bool = True) -> SqlMetric:
+    def delete_metric(cls, model: SqlMetric, commit: bool = True) -> None:
         """
         Deletes a Dataset metric
         """
-        return cls.delete(model, commit=commit)
+        DatasetMetricDAO.delete(model, commit=commit)
 
     @classmethod
     def update_metric(
@@ -363,22 +361,33 @@ class DatasetDAO(BaseDAO[SqlaTable]):  # pylint: disable=too-many-public-methods
         return DatasetMetricDAO.create(properties, commit=commit)
 
     @classmethod
-    def bulk_delete(
-        cls, models: Optional[list[SqlaTable]], commit: bool = True
+    def delete(
+        cls,
+        items: SqlaTable | list[SqlaTable],
+        commit: bool = True,
     ) -> None:
-        item_ids = [model.id for model in models] if models else []
-        # bulk delete itself
+        """
+        Delete the specified items(s) including their associated relationships.
+
+        Note that bulk deletion via `delete` does not dispatch the `after_delete` event
+        and thus the ORM event listener callback needs to be invoked manually.
+
+        :param items: The item(s) to delete
+        :param commit: Whether to commit the transaction
+        :raises DAODeleteFailedError: If the deletion failed
+        :see: https://docs.sqlalchemy.org/en/latest/orm/queryguide/dml.html
+        """
+
         try:
-            db.session.query(SqlaTable).filter(SqlaTable.id.in_(item_ids)).delete(
-                synchronize_session="fetch"
-            )
+            db.session.query(SqlaTable).filter(
+                SqlaTable.id.in_(item.id for item in get_iterable(items))
+            ).delete(synchronize_session="fetch")
 
-            if models:
-                connection = db.session.connection()
-                mapper = next(iter(cls.model_cls.registry.mappers))  # type: ignore
+            connection = db.session.connection()
+            mapper = next(iter(cls.model_cls.registry.mappers))  # type: ignore
 
-                for model in models:
-                    security_manager.dataset_after_delete(mapper, connection, model)
+            for item in get_iterable(items):
+                security_manager.dataset_after_delete(mapper, connection, item)
 
             if commit:
                 db.session.commit()
@@ -388,7 +397,7 @@ class DatasetDAO(BaseDAO[SqlaTable]):  # pylint: disable=too-many-public-methods
             raise ex
 
     @staticmethod
-    def get_table_by_name(database_id: int, table_name: str) -> Optional[SqlaTable]:
+    def get_table_by_name(database_id: int, table_name: str) -> SqlaTable | None:
         return (
             db.session.query(SqlaTable)
             .filter_by(database_id=database_id, table_name=table_name)
diff --git a/superset/daos/query.py b/superset/daos/query.py
index 80b5d1ad4e..ea7c82cc34 100644
--- a/superset/daos/query.py
+++ b/superset/daos/query.py
@@ -16,14 +16,11 @@
 # under the License.
 import logging
 from datetime import datetime
-from typing import Any, Optional, Union
-
-from sqlalchemy.exc import SQLAlchemyError
+from typing import Any, Union
 
 from superset import sql_lab
 from superset.common.db_query_status import QueryStatus
 from superset.daos.base import BaseDAO
-from superset.daos.exceptions import DAODeleteFailedError
 from superset.exceptions import QueryNotFoundException, SupersetCancelQueryException
 from superset.extensions import db
 from superset.models.sql_lab import Query, SavedQuery
@@ -105,16 +102,3 @@ class QueryDAO(BaseDAO[Query]):
 
 class SavedQueryDAO(BaseDAO[SavedQuery]):
     base_filter = SavedQueryFilter
-
-    @staticmethod
-    def bulk_delete(models: Optional[list[SavedQuery]], commit: bool = True) -> None:
-        item_ids = [model.id for model in models] if models else []
-        try:
-            db.session.query(SavedQuery).filter(SavedQuery.id.in_(item_ids)).delete(
-                synchronize_session="fetch"
-            )
-            if commit:
-                db.session.commit()
-        except SQLAlchemyError as ex:
-            db.session.rollback()
-            raise DAODeleteFailedError() from ex
diff --git a/superset/daos/report.py b/superset/daos/report.py
index 70a87a6454..b753270122 100644
--- a/superset/daos/report.py
+++ b/superset/daos/report.py
@@ -14,10 +14,12 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 import json
 import logging
 from datetime import datetime
-from typing import Any, Optional
+from typing import Any
 
 from flask_appbuilder import Model
 from sqlalchemy.exc import SQLAlchemyError
@@ -34,7 +36,7 @@ from superset.reports.models import (
     ReportScheduleType,
     ReportState,
 )
-from superset.utils.core import get_user_id
+from superset.utils.core import get_iterable, get_user_id
 
 logger = logging.getLogger(__name__)
 
@@ -93,11 +95,13 @@ class ReportScheduleDAO(BaseDAO[ReportSchedule]):
             .all()
         )
 
-    @staticmethod
-    def bulk_delete(
-        models: Optional[list[ReportSchedule]], commit: bool = True
+    @classmethod
+    def delete(
+        cls,
+        items: ReportSchedule | list[ReportSchedule],
+        commit: bool = True,
     ) -> None:
-        item_ids = [model.id for model in models] if models else []
+        item_ids = [item.id for item in get_iterable(items)]
         try:
             # Clean owners secondary table
             report_schedules = (
@@ -117,7 +121,7 @@ class ReportScheduleDAO(BaseDAO[ReportSchedule]):
 
     @staticmethod
     def validate_unique_creation_method(
-        dashboard_id: Optional[int] = None, chart_id: Optional[int] = None
+        dashboard_id: int | None = None, chart_id: int | None = None
     ) -> bool:
         """
         Validate if the user already has a chart or dashboard
@@ -135,7 +139,7 @@ class ReportScheduleDAO(BaseDAO[ReportSchedule]):
 
     @staticmethod
     def validate_update_uniqueness(
-        name: str, report_type: ReportScheduleType, expect_id: Optional[int] = None
+        name: str, report_type: ReportScheduleType, expect_id: int | None = None
     ) -> bool:
         """
         Validate if this name and type is unique.
@@ -218,7 +222,7 @@ class ReportScheduleDAO(BaseDAO[ReportSchedule]):
             raise DAOCreateFailedError(str(ex)) from ex
 
     @staticmethod
-    def find_active(session: Optional[Session] = None) -> list[ReportSchedule]:
+    def find_active(session: Session | None = None) -> list[ReportSchedule]:
         """
         Find all active reports. If session is passed it will be used instead of the
         default `db.session`, this is useful when on a celery worker session context
@@ -231,8 +235,8 @@ class ReportScheduleDAO(BaseDAO[ReportSchedule]):
     @staticmethod
     def find_last_success_log(
         report_schedule: ReportSchedule,
-        session: Optional[Session] = None,
-    ) -> Optional[ReportExecutionLog]:
+        session: Session | None = None,
+    ) -> ReportExecutionLog | None:
         """
         Finds last success execution log for a given report
         """
@@ -250,8 +254,8 @@ class ReportScheduleDAO(BaseDAO[ReportSchedule]):
     @staticmethod
     def find_last_entered_working_log(
         report_schedule: ReportSchedule,
-        session: Optional[Session] = None,
-    ) -> Optional[ReportExecutionLog]:
+        session: Session | None = None,
+    ) -> ReportExecutionLog | None:
         """
         Finds last success execution log for a given report
         """
@@ -270,8 +274,8 @@ class ReportScheduleDAO(BaseDAO[ReportSchedule]):
     @staticmethod
     def find_last_error_notification(
         report_schedule: ReportSchedule,
-        session: Optional[Session] = None,
-    ) -> Optional[ReportExecutionLog]:
+        session: Session | None = None,
+    ) -> ReportExecutionLog | None:
         """
         Finds last error email sent
         """
@@ -307,9 +311,9 @@ class ReportScheduleDAO(BaseDAO[ReportSchedule]):
     def bulk_delete_logs(
         model: ReportSchedule,
         from_date: datetime,
-        session: Optional[Session] = None,
+        session: Session | None = None,
         commit: bool = True,
-    ) -> Optional[int]:
+    ) -> int | None:
         session = session or db.session
         try:
             row_count = (
diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py
index 3b8b70e167..781ad6f6c7 100644
--- a/superset/dashboards/api.py
+++ b/superset/dashboards/api.py
@@ -1364,7 +1364,7 @@ class DashboardRestApi(BaseSupersetModelRestApi):
               $ref: '#/components/responses/500'
         """
         for embedded in dashboard.embedded:
-            DashboardDAO.delete(embedded)
+            EmbeddedDashboardDAO.delete(embedded)
         return self.response(200, message="OK")
 
     @expose("/<id_or_slug>/copy/", methods=("POST",))
diff --git a/superset/dashboards/commands/bulk_delete.py b/superset/dashboards/commands/bulk_delete.py
index 4802c9f101..707f0d722a 100644
--- a/superset/dashboards/commands/bulk_delete.py
+++ b/superset/dashboards/commands/bulk_delete.py
@@ -43,9 +43,10 @@ class BulkDeleteDashboardCommand(BaseCommand):
 
     def run(self) -> None:
         self.validate()
+        assert self._models
+
         try:
-            DashboardDAO.bulk_delete(self._models)
-            return None
+            DashboardDAO.delete(self._models)
         except DeleteFailedError as ex:
             logger.exception(ex.exception)
             raise DashboardBulkDeleteFailedError() from ex
diff --git a/superset/dashboards/commands/delete.py b/superset/dashboards/commands/delete.py
index 1f5eb4ae3b..13b92977df 100644
--- a/superset/dashboards/commands/delete.py
+++ b/superset/dashboards/commands/delete.py
@@ -17,7 +17,6 @@
 import logging
 from typing import Optional
 
-from flask_appbuilder.models.sqla import Model
 from flask_babel import lazy_gettext as _
 
 from superset import security_manager
@@ -42,16 +41,15 @@ class DeleteDashboardCommand(BaseCommand):
         self._model_id = model_id
         self._model: Optional[Dashboard] = None
 
-    def run(self) -> Model:
+    def run(self) -> None:
         self.validate()
         assert self._model
 
         try:
-            dashboard = DashboardDAO.delete(self._model)
+            DashboardDAO.delete(self._model)
         except DAODeleteFailedError as ex:
             logger.exception(ex.exception)
             raise DashboardDeleteFailedError() from ex
-        return dashboard
 
     def validate(self) -> None:
         # Validate/populate model exists
diff --git a/superset/dashboards/filter_sets/api.py b/superset/dashboards/filter_sets/api.py
index 11291b91cc..f735404cd4 100644
--- a/superset/dashboards/filter_sets/api.py
+++ b/superset/dashboards/filter_sets/api.py
@@ -374,8 +374,8 @@ class FilterSetRestApi(BaseSupersetModelRestApi):
               $ref: '#/components/responses/500'
         """
         try:
-            changed_model = DeleteFilterSetCommand(dashboard_id, pk).run()
-            return self.response(200, id=changed_model.id)
+            DeleteFilterSetCommand(dashboard_id, pk).run()
+            return self.response(200, id=dashboard_id)
         except ValidationError as error:
             return self.response_400(message=error.messages)
         except FilterSetNotFoundError:
diff --git a/superset/dashboards/filter_sets/commands/delete.py b/superset/dashboards/filter_sets/commands/delete.py
index c058354245..1ae5f7cda3 100644
--- a/superset/dashboards/filter_sets/commands/delete.py
+++ b/superset/dashboards/filter_sets/commands/delete.py
@@ -16,8 +16,6 @@
 # under the License.
 import logging
 
-from flask_appbuilder.models.sqla import Model
-
 from superset.daos.dashboard import FilterSetDAO
 from superset.daos.exceptions import DAODeleteFailedError
 from superset.dashboards.filter_sets.commands.base import BaseFilterSetCommand
@@ -35,12 +33,12 @@ class DeleteFilterSetCommand(BaseFilterSetCommand):
         super().__init__(dashboard_id)
         self._filter_set_id = filter_set_id
 
-    def run(self) -> Model:
+    def run(self) -> None:
         self.validate()
         assert self._filter_set
 
         try:
-            return FilterSetDAO.delete(self._filter_set, commit=True)
+            FilterSetDAO.delete(self._filter_set)
         except DAODeleteFailedError as err:
             raise FilterSetDeleteFailedError(str(self._filter_set_id), "") from err
 
diff --git a/superset/databases/commands/delete.py b/superset/databases/commands/delete.py
index 95d212e290..b7a86d6ef1 100644
--- a/superset/databases/commands/delete.py
+++ b/superset/databases/commands/delete.py
@@ -17,7 +17,6 @@
 import logging
 from typing import Optional
 
-from flask_appbuilder.models.sqla import Model
 from flask_babel import lazy_gettext as _
 
 from superset.commands.base import BaseCommand
@@ -40,16 +39,15 @@ class DeleteDatabaseCommand(BaseCommand):
         self._model_id = model_id
         self._model: Optional[Database] = None
 
-    def run(self) -> Model:
+    def run(self) -> None:
         self.validate()
         assert self._model
 
         try:
-            database = DatabaseDAO.delete(self._model)
+            DatabaseDAO.delete(self._model)
         except DAODeleteFailedError as ex:
             logger.exception(ex.exception)
             raise DatabaseDeleteFailedError() from ex
-        return database
 
     def validate(self) -> None:
         # Validate/populate model exists
diff --git a/superset/databases/ssh_tunnel/commands/delete.py b/superset/databases/ssh_tunnel/commands/delete.py
index 375c496f2a..04d6e68338 100644
--- a/superset/databases/ssh_tunnel/commands/delete.py
+++ b/superset/databases/ssh_tunnel/commands/delete.py
@@ -17,8 +17,6 @@
 import logging
 from typing import Optional
 
-from flask_appbuilder.models.sqla import Model
-
 from superset import is_feature_enabled
 from superset.commands.base import BaseCommand
 from superset.daos.database import SSHTunnelDAO
@@ -38,17 +36,16 @@ class DeleteSSHTunnelCommand(BaseCommand):
         self._model_id = model_id
         self._model: Optional[SSHTunnel] = None
 
-    def run(self) -> Model:
+    def run(self) -> None:
         if not is_feature_enabled("SSH_TUNNELING"):
             raise SSHTunnelingNotEnabledError()
         self.validate()
         assert self._model
 
         try:
-            ssh_tunnel = SSHTunnelDAO.delete(self._model)
+            SSHTunnelDAO.delete(self._model)
         except DAODeleteFailedError as ex:
             raise SSHTunnelDeleteFailedError() from ex
-        return ssh_tunnel
 
     def validate(self) -> None:
         # Validate/populate model exists
diff --git a/superset/datasets/columns/commands/delete.py b/superset/datasets/columns/commands/delete.py
index 6ff8c21d7d..d377141e13 100644
--- a/superset/datasets/columns/commands/delete.py
+++ b/superset/datasets/columns/commands/delete.py
@@ -17,8 +17,6 @@
 import logging
 from typing import Optional
 
-from flask_appbuilder.models.sqla import Model
-
 from superset import security_manager
 from superset.commands.base import BaseCommand
 from superset.connectors.sqla.models import TableColumn
@@ -40,13 +38,12 @@ class DeleteDatasetColumnCommand(BaseCommand):
         self._model_id = model_id
         self._model: Optional[TableColumn] = None
 
-    def run(self) -> Model:
+    def run(self) -> None:
         self.validate()
+        assert self._model
+
         try:
-            if not self._model:
-                raise DatasetColumnNotFoundError()
-            column = DatasetDAO.delete_column(self._model)
-            return column
+            DatasetDAO.delete_column(self._model)
         except DAODeleteFailedError as ex:
             logger.exception(ex.exception)
             raise DatasetColumnDeleteFailedError() from ex
diff --git a/superset/datasets/commands/bulk_delete.py b/superset/datasets/commands/bulk_delete.py
index 6937dd20b7..48515bbc6a 100644
--- a/superset/datasets/commands/bulk_delete.py
+++ b/superset/datasets/commands/bulk_delete.py
@@ -42,7 +42,7 @@ class BulkDeleteDatasetCommand(BaseCommand):
         assert self._models
 
         try:
-            DatasetDAO.bulk_delete(self._models)
+            DatasetDAO.delete(self._models)
         except DeleteFailedError as ex:
             logger.exception(ex.exception)
             raise DatasetBulkDeleteFailedError() from ex
diff --git a/superset/datasets/commands/delete.py b/superset/datasets/commands/delete.py
index 1c2147b959..3e7fc7d5a3 100644
--- a/superset/datasets/commands/delete.py
+++ b/superset/datasets/commands/delete.py
@@ -17,8 +17,6 @@
 import logging
 from typing import Optional
 
-from flask_appbuilder.models.sqla import Model
-
 from superset import security_manager
 from superset.commands.base import BaseCommand
 from superset.connectors.sqla.models import SqlaTable
@@ -39,7 +37,7 @@ class DeleteDatasetCommand(BaseCommand):
         self._model_id = model_id
         self._model: Optional[SqlaTable] = None
 
-    def run(self) -> Model:
+    def run(self) -> None:
         self.validate()
         assert self._model
 
diff --git a/superset/datasets/metrics/commands/delete.py b/superset/datasets/metrics/commands/delete.py
index 5e7b2144c0..2e1f0897b9 100644
--- a/superset/datasets/metrics/commands/delete.py
+++ b/superset/datasets/metrics/commands/delete.py
@@ -17,8 +17,6 @@
 import logging
 from typing import Optional
 
-from flask_appbuilder.models.sqla import Model
-
 from superset import security_manager
 from superset.commands.base import BaseCommand
 from superset.connectors.sqla.models import SqlMetric
@@ -40,13 +38,12 @@ class DeleteDatasetMetricCommand(BaseCommand):
         self._model_id = model_id
         self._model: Optional[SqlMetric] = None
 
-    def run(self) -> Model:
+    def run(self) -> None:
         self.validate()
+        assert self._model
+
         try:
-            if not self._model:
-                raise DatasetMetricNotFoundError()
-            column = DatasetDAO.delete_metric(self._model)
-            return column
+            DatasetDAO.delete_metric(self._model)
         except DAODeleteFailedError as ex:
             logger.exception(ex.exception)
             raise DatasetMetricDeleteFailedError() from ex
diff --git a/superset/queries/saved_queries/commands/bulk_delete.py b/superset/queries/saved_queries/commands/bulk_delete.py
index ba01bd456a..4e68f6b79d 100644
--- a/superset/queries/saved_queries/commands/bulk_delete.py
+++ b/superset/queries/saved_queries/commands/bulk_delete.py
@@ -36,9 +36,10 @@ class BulkDeleteSavedQueryCommand(BaseCommand):
 
     def run(self) -> None:
         self.validate()
+        assert self._models
+
         try:
-            SavedQueryDAO.bulk_delete(self._models)
-            return None
+            SavedQueryDAO.delete(self._models)
         except DAODeleteFailedError as ex:
             logger.exception(ex.exception)
             raise SavedQueryBulkDeleteFailedError() from ex
diff --git a/superset/reports/commands/bulk_delete.py b/superset/reports/commands/bulk_delete.py
index a3644d9fb4..3e3df3d100 100644
--- a/superset/reports/commands/bulk_delete.py
+++ b/superset/reports/commands/bulk_delete.py
@@ -39,9 +39,10 @@ class BulkDeleteReportScheduleCommand(BaseCommand):
 
     def run(self) -> None:
         self.validate()
+        assert self._models
+
         try:
-            ReportScheduleDAO.bulk_delete(self._models)
-            return None
+            ReportScheduleDAO.delete(self._models)
         except DAODeleteFailedError as ex:
             logger.exception(ex.exception)
             raise ReportScheduleBulkDeleteFailedError() from ex
diff --git a/superset/reports/commands/delete.py b/superset/reports/commands/delete.py
index f52d96f7f5..4f9be93f8a 100644
--- a/superset/reports/commands/delete.py
+++ b/superset/reports/commands/delete.py
@@ -17,8 +17,6 @@
 import logging
 from typing import Optional
 
-from flask_appbuilder.models.sqla import Model
-
 from superset import security_manager
 from superset.commands.base import BaseCommand
 from superset.daos.exceptions import DAODeleteFailedError
@@ -39,16 +37,15 @@ class DeleteReportScheduleCommand(BaseCommand):
         self._model_id = model_id
         self._model: Optional[ReportSchedule] = None
 
-    def run(self) -> Model:
+    def run(self) -> None:
         self.validate()
         assert self._model
 
         try:
-            report_schedule = ReportScheduleDAO.delete(self._model)
+            ReportScheduleDAO.delete(self._model)
         except DAODeleteFailedError as ex:
             logger.exception(ex.exception)
             raise ReportScheduleDeleteFailedError() from ex
-        return report_schedule
 
     def validate(self) -> None:
         # Validate/populate model exists
diff --git a/superset/row_level_security/commands/bulk_delete.py b/superset/row_level_security/commands/bulk_delete.py
index f180d0b2b8..f0c8dddabc 100644
--- a/superset/row_level_security/commands/bulk_delete.py
+++ b/superset/row_level_security/commands/bulk_delete.py
@@ -37,9 +37,7 @@ class BulkDeleteRLSRuleCommand(BaseCommand):
     def run(self) -> None:
         self.validate()
         try:
-            RLSDAO.bulk_delete(self._models)
-
-            return None
+            RLSDAO.delete(self._models)
         except DAODeleteFailedError as ex:
             logger.exception(ex.exception)
             raise RuleBulkDeleteFailedError() from ex
diff --git a/tests/integration_tests/datasets/api_tests.py b/tests/integration_tests/datasets/api_tests.py
index d31b8f0b28..97c4800478 100644
--- a/tests/integration_tests/datasets/api_tests.py
+++ b/tests/integration_tests/datasets/api_tests.py
@@ -1591,7 +1591,7 @@ class TestDatasetApi(SupersetTestCase):
         assert rv.status_code == 403
 
     @pytest.mark.usefixtures("create_datasets")
-    @patch("superset.daos.dataset.DatasetDAO.delete")
+    @patch("superset.daos.dataset.DatasetColumnDAO.delete")
     def test_delete_dataset_column_fail(self, mock_dao_delete):
         """
         Dataset API: Test delete dataset column
@@ -1671,7 +1671,7 @@ class TestDatasetApi(SupersetTestCase):
         assert rv.status_code == 403
 
     @pytest.mark.usefixtures("create_datasets")
-    @patch("superset.daos.dataset.DatasetDAO.delete")
+    @patch("superset.daos.dataset.DatasetMetricDAO.delete")
     def test_delete_dataset_metric_fail(self, mock_dao_delete):
         """
         Dataset API: Test delete dataset metric