You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@superset.apache.org by hu...@apache.org on 2022/04/11 21:49:38 UTC

[superset] branch sip68/querying-get-list-dataset-and-get-single created (now 2f4c15ab80)

This is an automated email from the ASF dual-hosted git repository.

hugh pushed a change to branch sip68/querying-get-list-dataset-and-get-single
in repository https://gitbox.apache.org/repos/asf/superset.git


      at 2f4c15ab80 update api with new model

This branch includes the following new commits:

     new 2f4c15ab80 update api with new model

The 1 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.



[superset] 01/01: update api with new model

Posted by hu...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

hugh pushed a commit to branch sip68/querying-get-list-dataset-and-get-single
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 2f4c15ab80669a02c2789ecd2020e7004158ad46
Author: hughhhh <hu...@gmail.com>
AuthorDate: Mon Apr 11 17:49:19 2022 -0400

    update api with new model
---
 superset/datasets/models.py                      |  82 +++++++-
 superset/datasets/sl/api.py                      | 102 ++++++++++
 superset/initialization/__init__.py              |  10 +-
 superset/models/helpers.py                       |  12 +-
 tests/integration_tests/datasets/sl/api_tests.py | 227 +++++++++++++++++++++++
 5 files changed, 418 insertions(+), 15 deletions(-)

diff --git a/superset/datasets/models.py b/superset/datasets/models.py
index 56a6fbf400..382782359f 100644
--- a/superset/datasets/models.py
+++ b/superset/datasets/models.py
@@ -24,13 +24,15 @@ dataset, new models for columns, metrics, and tables were also introduced.
 These models are not fully implemented, and shouldn't be used yet.
 """
 
-from typing import List
+from typing import Any, Dict, List, Optional, Tuple, Type, Union
 
 import sqlalchemy as sa
 from flask_appbuilder import Model
-from sqlalchemy.orm import relationship
+from sqlalchemy.orm import column_property, relationship
 
 from superset.columns.models import Column
+from superset.extensions import db
+from superset.models.core import Database
 from superset.models.helpers import (
     AuditMixinNullable,
     ExtraJSONMixin,
@@ -90,3 +92,79 @@ class Dataset(Model, AuditMixinNullable, ExtraJSONMixin, ImportExportMixin):
     # Column is managed externally and should be read-only inside Superset
     is_managed_externally = sa.Column(sa.Boolean, nullable=False, default=False)
     external_url = sa.Column(sa.Text, nullable=True)
+
+    # todo(hugh): Figure how to use this field and populate
+    # default_schema = Column()
+
+    # String representing the permissions for a given dataset
+    # todo(hugh): compute these columns based upon the original SqlaTable models
+    # perm = column_property(name)
+    schema = column_property()
+
+    """
+    Legacy Properties used to main backwards compatibility for
+    the current API schema
+    """
+
+    @property
+    def datasource_type(self) -> Optional[str]:
+        return self.__tablename__
+
+    @property
+    def kind(self) -> Optional[str]:
+        # https://github.com/apache/superset/blob/79a7a5d1b1682f79f1aab1723f76a34dcb9bf030/superset/connectors/base/models.py#L121
+        return "virtual" if self.is_physical else "physical"
+
+    @property
+    def schema(self) -> Optional[str]:
+        return "public"
+
+    @property
+    def sql(self) -> Optional[str]:
+        return self.expression
+
+    @property
+    def table_name(self) -> Optional[str]:
+        return self.name
+
+    @property
+    def explore_url(self) -> Optional[str]:
+        return f"/superset/explore/{self.type}/{self.id}/"
+
+    @property
+    def changed_by_url(self) -> Optional[str]:
+        return "todo"
+
+    @property
+    def default_endpoint(self) -> Optional[str]:
+        return "todo"
+
+    @property
+    def description(self) -> Optional[str]:
+        return "todo"
+
+    @property
+    def database(self) -> Optional[Dict[str, Any]]:
+        if self.tables:
+            database = (
+                db.session.query(Database)
+                .filter(Database.id == self.tables[0].database_id)
+                .one()
+            )
+            return database.data
+        return None
+
+    @property
+    def schema(self) -> Optional[str]:
+        if self.tables:
+            database = (
+                db.session.query(Database)
+                .filter(Database.id == self.tables[0].database_id)
+                .one()
+            )
+            return database.schema
+        return "default"
+
+    @property
+    def owners(self) -> Optional[List[int]]:
+        return []
diff --git a/superset/datasets/sl/api.py b/superset/datasets/sl/api.py
new file mode 100644
index 0000000000..6aa2fbe5f6
--- /dev/null
+++ b/superset/datasets/sl/api.py
@@ -0,0 +1,102 @@
+from typing import Any, Set
+
+from flask_appbuilder.models.sqla.interface import SQLAInterface
+from flask_babel import lazy_gettext as _
+from sqlalchemy import or_
+
+from superset import security_manager
+from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP
+from superset.datasets.filters import (
+    DatasetIsNullOrEmptyFilter,
+    DatasetIsPhysicalOrVirtual,
+)
+from superset.datasets.models import Dataset, table_association_table
+from superset.models.core import Database
+from superset.models.sql_lab import Query
+from superset.tables.models import Table
+from superset.views.base import BaseFilter, DatasourceFilter
+from superset.views.base_api import BaseSupersetModelRestApi, RelatedFieldFilter
+
+
+class DatasetAllTextFilter(BaseFilter):  # pylint: disable=too-few-public-methods
+    name = _("All Text")
+    arg_name = "dataset_all_text"
+
+    def apply(self, query: Query, value: Any) -> Query:
+        if not value:
+            return query
+        ilike_value = f"%{value}%"
+        return query.filter(
+            or_(
+                Dataset.name.ilike(ilike_value),
+                Dataset.expression.ilike((ilike_value)),
+            )
+        )
+
+
+# example risom: (filters:!((col:tables,opr:schema,value:public)),order_column:changed_on_delta_humanized,order_direction:desc,page:0,page_size:25)
+class DatasetSchemaFilter(BaseFilter):
+    name = _("Schema")
+    arg_name = "schema"
+
+    def apply(self, query: Query, value: Any) -> Query:
+        if not value:
+            return query
+
+        filter_clause = (
+            (table_association_table.c.dataset_id == Dataset.id)
+            & (table_association_table.c.table_id == Table.id)
+            & (Table.schema == value)
+        )
+        return query.join(table_association_table).join(Table).filter(filter_clause)
+
+
+class DatasetDatabaseFilter(BaseFilter):
+    name = _("Database")
+    arg_name = "db"
+
+    def apply(self, query: Query, value: Any) -> Query:
+        if not value:
+            return query
+
+        filter_clause = (
+            (table_association_table.c.dataset_id == Dataset.id)
+            & (table_association_table.c.table_id == Table.id)
+            & (Table.database_id == value)
+        )
+        return query.join(table_association_table).join(Table).filter(filter_clause)
+
+
+class SLDatasetRestApi(BaseSupersetModelRestApi):
+    datamodel = SQLAInterface(Dataset)
+    # todo(hugh): this should be a DatasetFilter instead of Datsource (security)
+    #  base_filters = [["id", DatasourceFilter, lambda: []]]
+
+    resource_name = "datasets"
+    allow_browser_login = True
+    class_permission_name = "Dataset"
+    method_permission_name = MODEL_API_RW_METHOD_PERMISSION_MAP
+    list_columns = [
+        "changed_by",
+        "changed_by_name",
+        "changed_by_url",
+        "changed_on_delta_humanized",
+        "database",
+        "datasource_type",
+        "default_endpoint",
+        "description",
+        "explore_url",
+        "extra",
+        "id",
+        "kind",
+        "owners",
+        "schema",
+        "sql",
+    ]
+    order_columns = ["changed_on_delta_humanized", "schema"]
+    search_columns = {"expression", "name", "tables"}
+    search_filters = {
+        "expression": [DatasetIsPhysicalOrVirtual],
+        "name": [DatasetAllTextFilter],
+        "tables": [DatasetSchemaFilter, DatasetDatabaseFilter],
+    }
diff --git a/superset/initialization/__init__.py b/superset/initialization/__init__.py
index 74b05e1688..74034f31e5 100644
--- a/superset/initialization/__init__.py
+++ b/superset/initialization/__init__.py
@@ -141,7 +141,7 @@ class SupersetAppInitializer:  # pylint: disable=too-many-public-methods
         from superset.datasets.api import DatasetRestApi
         from superset.datasets.columns.api import DatasetColumnsRestApi
         from superset.datasets.metrics.api import DatasetMetricRestApi
-        from superset.embedded.view import EmbeddedView
+        from superset.datasets.sl.api import SLDatasetRestApi
         from superset.explore.form_data.api import ExploreFormDataRestApi
         from superset.explore.permalink.api import ExplorePermalinkRestApi
         from superset.importexport.api import ImportExportRestApi
@@ -151,7 +151,7 @@ class SupersetAppInitializer:  # pylint: disable=too-many-public-methods
         from superset.reports.logs.api import ReportExecutionLogRestApi
         from superset.security.api import SecurityRestApi
         from superset.views.access_requests import AccessRequestsModelView
-        from superset.views.alerts import AlertView, ReportView
+        from superset.views.alerts import AlertView
         from superset.views.annotations import (
             AnnotationLayerModelView,
             AnnotationModelView,
@@ -206,6 +206,7 @@ class SupersetAppInitializer:  # pylint: disable=too-many-public-methods
         appbuilder.add_api(DashboardRestApi)
         appbuilder.add_api(DatabaseRestApi)
         appbuilder.add_api(DatasetRestApi)
+        appbuilder.add_api(SLDatasetRestApi)
         appbuilder.add_api(DatasetColumnsRestApi)
         appbuilder.add_api(DatasetMetricRestApi)
         appbuilder.add_api(ExploreFormDataRestApi)
@@ -277,6 +278,9 @@ class SupersetAppInitializer:  # pylint: disable=too-many-public-methods
             category="Security",
             category_label=__("Security"),
             icon="fa-lock",
+            menu_cond=lambda: feature_flag_manager.is_feature_enabled(
+                "ROW_LEVEL_SECURITY"
+            ),
         )
 
         #
@@ -290,7 +294,6 @@ class SupersetAppInitializer:  # pylint: disable=too-many-public-methods
         appbuilder.add_view_no_menu(Dashboard)
         appbuilder.add_view_no_menu(DashboardModelViewAsync)
         appbuilder.add_view_no_menu(Datasource)
-        appbuilder.add_view_no_menu(EmbeddedView)
         appbuilder.add_view_no_menu(KV)
         appbuilder.add_view_no_menu(R)
         appbuilder.add_view_no_menu(SavedQueryView)
@@ -445,7 +448,6 @@ class SupersetAppInitializer:  # pylint: disable=too-many-public-methods
                 and self.config["DRUID_METADATA_LINKS_ENABLED"]
             ),
         )
-        appbuilder.add_view_no_menu(ReportView)
         appbuilder.add_link(
             "Refresh Druid Metadata",
             label=__("Refresh Druid Metadata"),
diff --git a/superset/models/helpers.py b/superset/models/helpers.py
index baa0566c01..090f152d72 100644
--- a/superset/models/helpers.py
+++ b/superset/models/helpers.py
@@ -420,10 +420,6 @@ class AuditMixinNullable(AuditMixin):
     def changed_on_delta_humanized(self) -> str:
         return self.changed_on_humanized
 
-    @renders("created_on")
-    def created_on_delta_humanized(self) -> str:
-        return self.created_on_humanized
-
     @renders("changed_on")
     def changed_on_utc(self) -> str:
         # Convert naive datetime to UTC
@@ -431,11 +427,9 @@ class AuditMixinNullable(AuditMixin):
 
     @property
     def changed_on_humanized(self) -> str:
-        return humanize.naturaltime(datetime.now() - self.changed_on)
-
-    @property
-    def created_on_humanized(self) -> str:
-        return humanize.naturaltime(datetime.now() - self.created_on)
+        if self.changed_on:
+            return humanize.naturaltime(datetime.now() - self.changed_on)
+        return humanize.naturaltime(self.created_on)
 
     @renders("changed_on")
     def modified(self) -> Markup:
diff --git a/tests/integration_tests/datasets/sl/api_tests.py b/tests/integration_tests/datasets/sl/api_tests.py
new file mode 100644
index 0000000000..f6e29adc21
--- /dev/null
+++ b/tests/integration_tests/datasets/sl/api_tests.py
@@ -0,0 +1,227 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Unit tests for Superset"""
+import json
+import unittest
+from io import BytesIO
+from typing import List, Optional
+from unittest.mock import patch
+from zipfile import is_zipfile, ZipFile
+
+import prison
+import pytest
+import yaml
+from sqlalchemy.sql import func
+
+from superset import db
+from superset.columns.models import Column
+from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn
+from superset.dao.exceptions import (
+    DAOCreateFailedError,
+    DAODeleteFailedError,
+    DAOUpdateFailedError,
+)
+from superset.datasets.models import Dataset
+from superset.extensions import db, security_manager
+from superset.models.core import Database
+from superset.tables.models import Table
+from superset.utils.core import backend, get_example_default_schema
+from superset.utils.database import get_example_database, get_main_database
+from superset.utils.dict_import_export import export_to_dict
+from tests.integration_tests.base_tests import SupersetTestCase
+from tests.integration_tests.conftest import CTAS_SCHEMA_NAME
+
+
+class SLTestDatasetApi(SupersetTestCase):
+    def create_table(self):
+        pass
+
+    def create_datasets(self):
+        pass
+
+    def insert_dataset(self):
+        database = Database(database_name="db1", sqlalchemy_uri="sqlite://")
+        database1 = Database(database_name="db2", sqlalchemy_uri="sqlite://")
+
+        table = Table(
+            name="a",
+            schema="schema1",
+            catalog="my_catalog",
+            database=database,
+            columns=[
+                Column(name="longitude", expression="longitude", type="test"),
+                Column(name="latitude", expression="latitude", type="test"),
+            ],
+        )
+
+        dataset = Dataset(
+            name="position",
+            expression="""
+            SELECT array_agg(array[longitude,latitude]) AS position
+            FROM my_catalog.my_schema.my_table
+            """,
+            tables=[table],
+            columns=[
+                Column(
+                    name="position",
+                    expression="array_agg(array[longitude,latitude])",
+                    type="test",
+                ),
+            ],
+        )
+
+        table1 = Table(
+            name="b",
+            schema="schema2",
+            catalog="my_catalog",
+            database=database1,
+            columns=[
+                Column(name="longitude", expression="longitude", type="test"),
+                Column(name="latitude", expression="latitude", type="test"),
+            ],
+        )
+
+        dataset1 = Dataset(
+            name="position2",
+            expression="""
+            SELECT array_agg(array[longitude,latitude]) AS position
+            FROM my_catalog.my_schema.my_table
+            """,
+            tables=[table1],
+            columns=[
+                Column(
+                    name="position",
+                    expression="array_agg(array[longitude,latitude])",
+                    type="test",
+                ),
+            ],
+        )
+
+        db.session.add(database)
+        db.session.add(table)
+        db.session.add(dataset)
+        db.session.add(table1)
+        db.session.add(dataset1)
+        db.session.add(database1)
+
+        db.session.commit()
+
+        return [database, table, table1, dataset, dataset1, database1]
+
+    @pytest.fixture()
+    def create_dataset(self):
+        with self.create_app().app_context():
+            models = self.insert_dataset()
+
+            yield
+
+            for m in models:
+                db.session.delete(m)
+
+            db.session.commit()
+
+    @pytest.mark.usefixtures("create_dataset")
+    def test_get_dataset_list(self):
+        """
+        Dataset API: Test get all datasets
+        """
+        self.login(username="admin")
+        uri = f"api/v1/datasets/"
+        rv = self.get_assert_metric(uri, "get_list")
+        assert rv.status_code == 200
+        response = json.loads(rv.data.decode("utf-8"))
+        assert response["count"] == 2
+        expected_columns = [
+            "changed_by",
+            "changed_by_name",
+            "changed_by_url",
+            "changed_on_delta_humanized",
+            "database",
+            "datasource_type",
+            "default_endpoint",
+            "description",
+            "extra",
+            "id",
+            "kind",
+            "owners",
+            "sql",
+        ]
+        assert sorted(list(response["result"][0].keys())) == expected_columns
+
+    @pytest.mark.usefixtures("create_dataset")
+    def test_get_dataset_list_filter_schema(self):
+        """
+        Dataset API: Test get all datasets with specfic schema
+        """
+        self.login(username="admin")
+        arguments = {
+            "filters": [
+                {"col": "tables", "opr": "schema", "value": "schema1"},
+            ]
+        }
+        uri = f"api/v1/datasets/?q={prison.dumps(arguments)}"
+        rv = self.get_assert_metric(uri, "get_list")
+        response = json.loads(rv.data.decode("utf-8"))
+        assert len(response["result"]) == 1
+        assert rv.status_code == 200
+
+    @pytest.mark.usefixtures("create_dataset")
+    def test_get_dataset_list_filter_db(self):
+        """
+        Dataset API: Test get all datasets connected to specific db
+        """
+        self.login(username="admin")
+        from superset import db
+        from superset.models import core as models
+
+        database = (
+            db.session.query(models.Database)
+            .filter_by(database_name="db1")
+            .autoflush(False)
+            .first()
+        )
+
+        arguments = {
+            "filters": [
+                {"col": "tables", "opr": "db", "value": database.id},
+            ]
+        }
+        uri = f"api/v1/datasets/?q={prison.dumps(arguments)}"
+        rv = self.get_assert_metric(uri, "get_list")
+        response = json.loads(rv.data.decode("utf-8"))
+        assert len(response["result"]) == 1
+        assert rv.status_code == 200
+
+    # todo: write this test once owners pr is merged
+    # @pytest.mark.usefixtures("create_dataset")
+    # def test_get_dataset_list_filter_owners(self):
+    #     """
+    #     Dataset API: Test get all datasets with specific owners
+    #     """
+    #     self.login(username="admin")
+    #     uri = f"api/v1/datasets/"
+    #     rv = self.get_assert_metric(uri, "get_list")
+    #     assert rv.status_code == 200
+
+    def test_get_dataset_list_search(self):
+        """
+        Dataset API: Test get all datasets search
+        """
+        self.login(username="admin")
+        uri = f"api/v1/datasets/"
+        rv = self.get_assert_metric(uri, "get_list")
+        assert rv.status_code == 200