You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@superset.apache.org by hu...@apache.org on 2020/11/27 20:59:19 UTC

[incubator-superset] 01/01: api changes needed for sqllab to explore improvements

This is an automated email from the ASF dual-hosted git repository.

hugh pushed a commit to branch so-1117-api
in repository https://gitbox.apache.org/repos/asf/incubator-superset.git

commit b49d93574368fab2ad9157ecea3d48234541f670
Author: hughhhh <hu...@gmail.com>
AuthorDate: Fri Nov 27 12:58:24 2020 -0800

    api changes needed for sqllab to explore improvements
---
 superset/datasets/api.py             | 32 +++++++++++++++++---------------
 superset/datasets/commands/update.py | 33 +++++++++++++++++++++++----------
 superset/datasets/dao.py             | 15 +++++++++++++--
 3 files changed, 53 insertions(+), 27 deletions(-)

diff --git a/superset/datasets/api.py b/superset/datasets/api.py
index e5ddae5..a2f10f9 100644
--- a/superset/datasets/api.py
+++ b/superset/datasets/api.py
@@ -27,9 +27,8 @@ from flask_appbuilder.models.sqla.interface import SQLAInterface
 from flask_babel import ngettext
 from marshmallow import ValidationError
 
-from superset import event_logger, is_feature_enabled
+from superset import is_feature_enabled
 from superset.commands.exceptions import CommandInvalidError
-from superset.commands.importers.v1.utils import remove_root
 from superset.connectors.sqla.models import SqlaTable
 from superset.constants import RouteMethod
 from superset.databases.filters import DatabaseFilter
@@ -41,7 +40,6 @@ from superset.datasets.commands.exceptions import (
     DatasetCreateFailedError,
     DatasetDeleteFailedError,
     DatasetForbiddenError,
-    DatasetImportError,
     DatasetInvalidError,
     DatasetNotFoundError,
     DatasetRefreshFailedError,
@@ -183,7 +181,6 @@ class DatasetRestApi(BaseSupersetModelRestApi):
     @protect()
     @safe
     @statsd_metrics
-    @event_logger.log_this_with_context(log_to_statsd=False)
     def post(self) -> Response:
         """Creates a new Dataset
         ---
@@ -240,7 +237,6 @@ class DatasetRestApi(BaseSupersetModelRestApi):
     @protect()
     @safe
     @statsd_metrics
-    @event_logger.log_this_with_context(log_to_statsd=False)
     def put(self, pk: int) -> Response:
         """Changes a Dataset
         ---
@@ -252,6 +248,10 @@ class DatasetRestApi(BaseSupersetModelRestApi):
             schema:
               type: integer
             name: pk
+          - in: path
+            schema:
+              type: bool
+            name: override_column
           requestBody:
             description: Dataset schema
             required: true
@@ -284,6 +284,11 @@ class DatasetRestApi(BaseSupersetModelRestApi):
             500:
               $ref: '#/components/responses/500'
         """
+        override_column = (
+            request.args["override_column"]
+            if request.args.get("override_column")
+            else False
+        )
         if not request.is_json:
             return self.response_400(message="Request is not JSON")
         try:
@@ -292,7 +297,9 @@ class DatasetRestApi(BaseSupersetModelRestApi):
         except ValidationError as error:
             return self.response_400(message=error.messages)
         try:
-            changed_model = UpdateDatasetCommand(g.user, pk, item).run()
+            changed_model = UpdateDatasetCommand(
+                g.user, pk, item, override_column
+            ).run()
             response = self.response(200, id=changed_model.id, result=item)
         except DatasetNotFoundError:
             response = self.response_404()
@@ -311,7 +318,6 @@ class DatasetRestApi(BaseSupersetModelRestApi):
     @protect()
     @safe
     @statsd_metrics
-    @event_logger.log_this_with_context(log_to_statsd=False)
     def delete(self, pk: int) -> Response:
         """Deletes a Dataset
         ---
@@ -362,7 +368,6 @@ class DatasetRestApi(BaseSupersetModelRestApi):
     @safe
     @statsd_metrics
     @rison(get_export_ids_schema)
-    @event_logger.log_this_with_context(log_to_statsd=False)
     def export(self, **kwargs: Any) -> Response:
         """Export datasets
         ---
@@ -438,7 +443,6 @@ class DatasetRestApi(BaseSupersetModelRestApi):
     @protect()
     @safe
     @statsd_metrics
-    @event_logger.log_this_with_context(log_to_statsd=False)
     def refresh(self, pk: int) -> Response:
         """Refresh a Dataset
         ---
@@ -488,7 +492,6 @@ class DatasetRestApi(BaseSupersetModelRestApi):
     @protect()
     @safe
     @statsd_metrics
-    @event_logger.log_this_with_context(log_to_statsd=False)
     def related_objects(self, pk: int) -> Response:
         """Get charts and dashboards count associated to a dataset
         ---
@@ -547,7 +550,6 @@ class DatasetRestApi(BaseSupersetModelRestApi):
     @safe
     @statsd_metrics
     @rison(get_delete_ids_schema)
-    @event_logger.log_this_with_context(log_to_statsd=False)
     def bulk_delete(self, **kwargs: Any) -> Response:
         """Delete bulk Datasets
         ---
@@ -607,7 +609,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
     @safe
     @statsd_metrics
     def import_(self) -> Response:
-        """Import dataset(s) with associated databases
+        """Import dataset (s) with associated databases
         ---
         post:
           requestBody:
@@ -635,12 +637,12 @@ class DatasetRestApi(BaseSupersetModelRestApi):
             500:
               $ref: '#/components/responses/500'
         """
-        upload = request.files.get("formData")
+        upload = request.files.get("file")
         if not upload:
             return self.response_400()
         with ZipFile(upload) as bundle:
             contents = {
-                remove_root(file_name): bundle.read(file_name).decode()
+                file_name: bundle.read(file_name).decode()
                 for file_name in bundle.namelist()
             }
 
@@ -651,6 +653,6 @@ class DatasetRestApi(BaseSupersetModelRestApi):
         except CommandInvalidError as exc:
             logger.warning("Import dataset failed")
             return self.response_422(message=exc.normalized_messages())
-        except DatasetImportError as exc:
+        except Exception as exc:  # pylint: disable=broad-except
             logger.exception("Import dataset failed")
             return self.response_500(message=str(exc))
diff --git a/superset/datasets/commands/update.py b/superset/datasets/commands/update.py
index dfc3986..809720e 100644
--- a/superset/datasets/commands/update.py
+++ b/superset/datasets/commands/update.py
@@ -16,7 +16,7 @@
 # under the License.
 import logging
 from collections import Counter
-from typing import Any, Dict, List, Optional
+from typing import Any, Dict, List, Optional, Union
 
 from flask_appbuilder.models.sqla import Model
 from flask_appbuilder.security.sqla.models import User
@@ -48,17 +48,28 @@ logger = logging.getLogger(__name__)
 
 
 class UpdateDatasetCommand(BaseCommand):
-    def __init__(self, user: User, model_id: int, data: Dict[str, Any]):
+    def __init__(
+        self,
+        user: User,
+        model_id: int,
+        data: Dict[str, Any],
+        override_columns: Union[bool, Any, None] = False,
+    ):
         self._actor = user
         self._model_id = model_id
         self._properties = data.copy()
         self._model: Optional[SqlaTable] = None
+        self.override_columns = override_columns
 
     def run(self) -> Model:
         self.validate()
         if self._model:
             try:
-                dataset = DatasetDAO.update(self._model, self._properties)
+                dataset = DatasetDAO.update(
+                    self._model,
+                    self._properties,
+                    override_columns=self.override_columns,
+                )
                 return dataset
             except DAOUpdateFailedError as ex:
                 logger.exception(ex.exception)
@@ -123,14 +134,16 @@ class UpdateDatasetCommand(BaseCommand):
             ]
             if not DatasetDAO.validate_columns_exist(self._model_id, columns_ids):
                 exceptions.append(DatasetColumnNotFoundValidationError())
+
             # validate new column names uniqueness
-            columns_names: List[str] = [
-                column["column_name"] for column in columns if "id" not in column
-            ]
-            if not DatasetDAO.validate_columns_uniqueness(
-                self._model_id, columns_names
-            ):
-                exceptions.append(DatasetColumnsExistsValidationError())
+            if not self.override_columns:
+                columns_names: List[str] = [
+                    column["column_name"] for column in columns if "id" not in column
+                ]
+                if not DatasetDAO.validate_columns_uniqueness(
+                    self._model_id, columns_names
+                ):
+                    exceptions.append(DatasetColumnsExistsValidationError())
 
     def _validate_metrics(
         self, metrics: List[Dict[str, Any]], exceptions: List[ValidationError]
diff --git a/superset/datasets/dao.py b/superset/datasets/dao.py
index 3b905f4..22526e9 100644
--- a/superset/datasets/dao.py
+++ b/superset/datasets/dao.py
@@ -15,7 +15,7 @@
 # specific language governing permissions and limitations
 # under the License.
 import logging
-from typing import Any, Dict, List, Optional
+from typing import Any, Dict, List, Optional, Union
 
 from flask import current_app
 from sqlalchemy.exc import SQLAlchemyError
@@ -144,7 +144,11 @@ class DatasetDAO(BaseDAO):
 
     @classmethod
     def update(
-        cls, model: SqlaTable, properties: Dict[str, Any], commit: bool = True
+        cls,
+        model: SqlaTable,
+        properties: Dict[str, Any],
+        commit: bool = True,
+        override_columns: Union[bool, Any, None] = False,
     ) -> Optional[SqlaTable]:
         """
         Updates a Dataset model on the metadata DB
@@ -175,6 +179,13 @@ class DatasetDAO(BaseDAO):
                 new_metrics.append(metric_obj)
             properties["metrics"] = new_metrics
 
+        if override_columns:
+            # remove columns initially for full refresh
+            original_properties = properties["columns"]
+            properties["columns"] = []
+            super().update(model, properties, commit=commit)
+            properties["columns"] = original_properties
+
         return super().update(model, properties, commit=commit)
 
     @classmethod