You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@superset.apache.org by mi...@apache.org on 2023/08/30 12:40:59 UTC

[superset] branch 3.0 updated (2c99366333 -> 798b493f3a)

This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a change to branch 3.0
in repository https://gitbox.apache.org/repos/asf/superset.git


    from 2c99366333 chore: Updates CHANGELOG.md
     new 55c57b9277 chore: remove CssTemplate and Annotation access from gamma role (#24826)
     new 0caaad7b0a fix(embedded sdk): Remove trailing slash from passed superset domain if there is one (#25020)
     new 7d5cd72e43 fix: dataset update permission out of sync (#25043)
     new 76da1b59f9 refactor(pinot): The `python_date_format` for a temporal column was not being passed to `get_timestamp_expr` (#24942)
     new 9ceba619c3 fix: Filter names overflow wrap (#25087)
     new f34e21be69 fix(assets import): Ensure old datasource ids are not referenced in imported charts (#25086)
     new 6a461260fc fix: Allow embedded guest user datasource access with dashboard context (#25081)
     new 429ff9b0f8 fix(sqllab): rendering performance regression by resultset (#25091)
     new 90e7e769ce fix(sqllab): error while removing a referenced table (#25114)
     new caa3b6d5ba fix: Date format when importing international timestamps (#25113)
     new 798b493f3a docs: Update UPDATING.md regarding potential breaking change to `ab_user.email` column (#25115)

The 11 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 UPDATING.md                                        |   1 +
 docs/docs/installation/configuring-superset.mdx    |  23 +-
 superset-embedded-sdk/package-lock.json            |   4 +-
 superset-embedded-sdk/package.json                 |   2 +-
 superset-embedded-sdk/src/index.ts                 |   4 +
 .../{SouthPane.test.jsx => SouthPane.test.tsx}     |   9 +-
 .../src/SqlLab/components/SouthPane/index.tsx      |  56 +++--
 .../SqlLab/components/SqlEditor/SqlEditor.test.jsx |  24 +-
 .../src/SqlLab/components/TableElement/index.tsx   |  11 +-
 superset-frontend/src/SqlLab/reducers/sqlLab.js    |   3 +
 .../src/SqlLab/reducers/sqlLab.test.js             |  32 +++
 .../FilterBar/FilterControls/FilterControl.tsx     |   2 +-
 .../components/nativeFilters/FilterCard/Styles.ts  |   1 +
 superset/commands/importers/v1/assets.py           |   9 +-
 superset/connectors/sqla/models.py                 | 112 +--------
 superset/db_engine_specs/pinot.py                  | 134 ++++-------
 superset/security/manager.py                       |  72 ++++--
 .../form_view/csv_to_database_view/edit.html       | 229 +++++++++---------
 superset/views/base.py                             |  28 ---
 superset/views/database/forms.py                   |   4 +
 superset/views/database/views.py                   |   2 +
 superset/viz.py                                    |   1 +
 tests/integration_tests/commands_test.py           |   3 +
 .../db_engine_specs/pinot_tests.py                 |  39 ++--
 .../fixtures/birth_names_dashboard.py              |   8 +
 .../fixtures/world_bank_dashboard.py               |   8 +
 .../security/guest_token_security_tests.py         | 258 ++++++++++++++++++++-
 tests/integration_tests/security_tests.py          |   5 +-
 tests/unit_tests/db_engine_specs/test_pinot.py     |  57 +++++
 29 files changed, 706 insertions(+), 435 deletions(-)
 rename superset-frontend/src/SqlLab/components/SouthPane/{SouthPane.test.jsx => SouthPane.test.tsx} (93%)
 mode change 100644 => 100755 tests/integration_tests/db_engine_specs/pinot_tests.py
 create mode 100644 tests/unit_tests/db_engine_specs/test_pinot.py


[superset] 03/11: fix: dataset update permission out of sync (#25043)

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 3.0
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 7d5cd72e4360c433b6823e538fd8b6e2104d6fd0
Author: Zef Lin <ze...@preset.io>
AuthorDate: Fri Aug 25 11:34:25 2023 -0700

    fix: dataset update permission out of sync (#25043)
---
 superset/connectors/sqla/models.py | 112 ++++---------------------------------
 superset/security/manager.py       |  40 +++++++++----
 superset/views/base.py             |  28 ----------
 3 files changed, 41 insertions(+), 139 deletions(-)

diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py
index aeda42cf8c..d4078dbfd0 100644
--- a/superset/connectors/sqla/models.py
+++ b/superset/connectors/sqla/models.py
@@ -74,12 +74,10 @@ from superset import app, db, is_feature_enabled, security_manager
 from superset.common.db_query_status import QueryStatus
 from superset.connectors.base.models import BaseColumn, BaseDatasource, BaseMetric
 from superset.connectors.sqla.utils import (
-    find_cached_objects_in_session,
     get_columns_description,
     get_physical_table_metadata,
     get_virtual_table_metadata,
 )
-from superset.datasets.models import Dataset as NewDataset
 from superset.db_engine_specs.base import BaseEngineSpec, TimestampExpression
 from superset.exceptions import (
     ColumnNotFoundException,
@@ -1430,44 +1428,20 @@ class SqlaTable(
 
     @staticmethod
     def before_update(
-        mapper: Mapper,  # pylint: disable=unused-argument
-        connection: Connection,  # pylint: disable=unused-argument
+        mapper: Mapper,
+        connection: Connection,
         target: SqlaTable,
     ) -> None:
         """
-        Check before update if the target table already exists.
-
-        Note this listener is called when any fields are being updated and thus it is
-        necessary to first check whether the reference table is being updated.
-
-        Note this logic is temporary, given uniqueness is handled via the dataset DAO,
-        but is necessary until both the legacy datasource editor and datasource/save
-        endpoints are deprecated.
+        Note this listener is called when any fields are being updated
 
         :param mapper: The table mapper
         :param connection: The DB-API connection
         :param target: The mapped instance being persisted
         :raises Exception: If the target table is not unique
         """
-
-        # pylint: disable=import-outside-toplevel
-        from superset.daos.dataset import DatasetDAO
-        from superset.datasets.commands.exceptions import get_dataset_exist_error_msg
-
-        # Check whether the relevant attributes have changed.
-        state = db.inspect(target)  # pylint: disable=no-member
-
-        for attr in ["database_id", "schema", "table_name"]:
-            history = state.get_history(attr, True)
-            if history.has_changes():
-                break
-        else:
-            return None
-
-        if not DatasetDAO.validate_uniqueness(
-            target.database_id, target.schema, target.table_name, target.id
-        ):
-            raise Exception(get_dataset_exist_error_msg(target.full_name))
+        target.load_database()
+        security_manager.dataset_before_update(mapper, connection, target)
 
     @staticmethod
     def update_column(  # pylint: disable=unused-argument
@@ -1485,34 +1459,17 @@ class SqlaTable(
         # table is updated. This busts the cache key for all charts that use the table.
         session.execute(update(SqlaTable).where(SqlaTable.id == target.table.id))
 
-        # TODO: This shadow writing is deprecated
-        # if table itself has changed, shadow-writing will happen in `after_update` anyway
-        if target.table not in session.dirty:
-            dataset: NewDataset = (
-                session.query(NewDataset)
-                .filter_by(uuid=target.table.uuid)
-                .one_or_none()
-            )
-            # Update shadow dataset and columns
-            # did we find the dataset?
-            if not dataset:
-                # if dataset is not found create a new copy
-                target.table.write_shadow_dataset()
-                return
-
     @staticmethod
     def after_insert(
         mapper: Mapper,
         connection: Connection,
-        sqla_table: SqlaTable,
+        target: SqlaTable,
     ) -> None:
         """
         Update dataset permissions after insert
         """
-        security_manager.dataset_after_insert(mapper, connection, sqla_table)
-
-        # TODO: deprecated
-        sqla_table.write_shadow_dataset()
+        target.load_database()
+        security_manager.dataset_after_insert(mapper, connection, target)
 
     @staticmethod
     def after_delete(
@@ -1525,63 +1482,16 @@ class SqlaTable(
         """
         security_manager.dataset_after_delete(mapper, connection, sqla_table)
 
-    @staticmethod
-    def after_update(
-        mapper: Mapper,
-        connection: Connection,
-        sqla_table: SqlaTable,
-    ) -> None:
-        """
-        Update dataset permissions
-        """
-        # set permissions
-        security_manager.dataset_after_update(mapper, connection, sqla_table)
-
-        # TODO: the shadow writing is deprecated
-        inspector = inspect(sqla_table)
-        session = inspector.session
-
-        # double-check that ``UPDATE``s are actually pending (this method is called even
-        # for instances that have no net changes to their column-based attributes)
-        if not session.is_modified(sqla_table, include_collections=True):
-            return
-
-        # find the dataset from the known instance list first
-        # (it could be either from a previous query or newly created)
-        dataset = next(
-            find_cached_objects_in_session(
-                session, NewDataset, uuids=[sqla_table.uuid]
-            ),
-            None,
-        )
-        # if not found, pull from database
-        if not dataset:
-            dataset = (
-                session.query(NewDataset).filter_by(uuid=sqla_table.uuid).one_or_none()
-            )
-        if not dataset:
-            sqla_table.write_shadow_dataset()
-            return
-
-    def write_shadow_dataset(
-        self: SqlaTable,
-    ) -> None:
-        """
-        This method is deprecated
-        """
-        session = inspect(self).session
-        # most of the write_shadow_dataset functionality has been removed
-        # but leaving this portion in
-        # to remove later because it is adding a Database relationship to the session
-        # and there is some functionality that depends on this
+    def load_database(self: SqlaTable) -> None:
+        # somehow the database attribute is not loaded on access
         if self.database_id and (
             not self.database or self.database.id != self.database_id
         ):
+            session = inspect(self).session
             self.database = session.query(Database).filter_by(id=self.database_id).one()
 
 
 sa.event.listen(SqlaTable, "before_update", SqlaTable.before_update)
-sa.event.listen(SqlaTable, "after_update", SqlaTable.after_update)
 sa.event.listen(SqlaTable, "after_insert", SqlaTable.after_insert)
 sa.event.listen(SqlaTable, "after_delete", SqlaTable.after_delete)
 sa.event.listen(SqlMetric, "after_update", SqlaTable.update_column)
diff --git a/superset/security/manager.py b/superset/security/manager.py
index 1d6ff1a2eb..b1cb15aadf 100644
--- a/superset/security/manager.py
+++ b/superset/security/manager.py
@@ -1323,7 +1323,7 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
             mapper, connection, "datasource_access", dataset_vm_name
         )
 
-    def dataset_after_update(
+    def dataset_before_update(
         self,
         mapper: Mapper,
         connection: Connection,
@@ -1343,14 +1343,20 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
         :param target: The changed dataset object
         :return:
         """
+        # pylint: disable=import-outside-toplevel
+        from superset.connectors.sqla.models import SqlaTable
+
         # Check if watched fields have changed
-        state = inspect(target)
-        history_database = state.get_history("database_id", True)
-        history_table_name = state.get_history("table_name", True)
-        history_schema = state.get_history("schema", True)
+        table = SqlaTable.__table__
+        current_dataset = connection.execute(
+            table.select().where(table.c.id == target.id)
+        ).one()
+        current_db_id = current_dataset.database_id
+        current_schema = current_dataset.schema
+        current_table_name = current_dataset.table_name
 
         # When database name changes
-        if history_database.has_changes() and history_database.deleted:
+        if current_db_id != target.database_id:
             new_dataset_vm_name = self.get_dataset_perm(
                 target.id, target.table_name, target.database.database_name
             )
@@ -1370,20 +1376,19 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
             )
 
         # When table name changes
-        if history_table_name.has_changes() and history_table_name.deleted:
-            old_dataset_name = history_table_name.deleted[0]
+        if current_table_name != target.table_name:
             new_dataset_vm_name = self.get_dataset_perm(
                 target.id, target.table_name, target.database.database_name
             )
             old_dataset_vm_name = self.get_dataset_perm(
-                target.id, old_dataset_name, target.database.database_name
+                target.id, current_table_name, target.database.database_name
             )
             self._update_dataset_perm(
                 mapper, connection, old_dataset_vm_name, new_dataset_vm_name, target
             )
 
         # When schema changes
-        if history_schema.has_changes() and history_schema.deleted:
+        if current_schema != target.schema:
             new_dataset_schema_name = self.get_schema_perm(
                 target.database.database_name, target.schema
             )
@@ -1414,6 +1419,7 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
         :param target: Dataset that was updated
         :return:
         """
+        logger.info("Updating schema perm, new: %s", new_schema_permission_name)
         from superset.connectors.sqla.models import (  # pylint: disable=import-outside-toplevel
             SqlaTable,
         )
@@ -1467,6 +1473,11 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
         :param target:
         :return:
         """
+        logger.info(
+            "Updating dataset perm, old: %s, new: %s",
+            old_permission_name,
+            new_permission_name,
+        )
         from superset.connectors.sqla.models import (  # pylint: disable=import-outside-toplevel
             SqlaTable,
         )
@@ -1481,6 +1492,15 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
         new_dataset_view_menu = self.find_view_menu(new_permission_name)
         if new_dataset_view_menu:
             return
+        old_dataset_view_menu = self.find_view_menu(old_permission_name)
+        if not old_dataset_view_menu:
+            logger.warning(
+                "Could not find previous dataset permission %s", old_permission_name
+            )
+            self._insert_pvm_on_sqla_event(
+                mapper, connection, "datasource_access", new_permission_name
+            )
+            return
         # Update VM
         connection.execute(
             view_menu_table.update()
diff --git a/superset/views/base.py b/superset/views/base.py
index ab53ff07da..a0102bf3bb 100644
--- a/superset/views/base.py
+++ b/superset/views/base.py
@@ -56,14 +56,12 @@ from superset import (
     app as superset_app,
     appbuilder,
     conf,
-    db,
     get_feature_flags,
     is_feature_enabled,
     security_manager,
 )
 from superset.commands.exceptions import CommandException, CommandInvalidError
 from superset.connectors.sqla import models
-from superset.datasets.commands.exceptions import get_dataset_exist_error_msg
 from superset.db_engine_specs import get_available_engine_specs
 from superset.db_engine_specs.gsheets import GSheetsEngineSpec
 from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
@@ -285,32 +283,6 @@ def handle_api_exception(
     return functools.update_wrapper(wraps, f)
 
 
-def validate_sqlatable(table: models.SqlaTable) -> None:
-    """Checks the table existence in the database."""
-    with db.session.no_autoflush:
-        table_query = db.session.query(models.SqlaTable).filter(
-            models.SqlaTable.table_name == table.table_name,
-            models.SqlaTable.schema == table.schema,
-            models.SqlaTable.database_id == table.database.id,
-        )
-        if db.session.query(table_query.exists()).scalar():
-            raise Exception(get_dataset_exist_error_msg(table.full_name))
-
-    # Fail before adding if the table can't be found
-    try:
-        table.get_sqla_table_object()
-    except Exception as ex:
-        logger.exception("Got an error in pre_add for %s", table.name)
-        raise Exception(
-            _(
-                "Table [%{table}s] could not be found, "
-                "please double check your "
-                "database connection, schema, and "
-                "table name, error: {}"
-            ).format(table.name, str(ex))
-        ) from ex
-
-
 class BaseSupersetView(BaseView):
     @staticmethod
     def json_response(obj: Any, status: int = 200) -> FlaskResponse:


[superset] 02/11: fix(embedded sdk): Remove trailing slash from passed superset domain if there is one (#25020)

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 3.0
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 0caaad7b0a5bf6d7a48ab9ca950991d9ad467909
Author: Jack Fragassi <jf...@gmail.com>
AuthorDate: Fri Aug 25 09:24:58 2023 -0700

    fix(embedded sdk): Remove trailing slash from passed superset domain if there is one (#25020)
    
    (cherry picked from commit 74023793464c131af005addc572d9b3aa7aa0898)
---
 superset-embedded-sdk/package-lock.json | 4 ++--
 superset-embedded-sdk/package.json      | 2 +-
 superset-embedded-sdk/src/index.ts      | 4 ++++
 3 files changed, 7 insertions(+), 3 deletions(-)

diff --git a/superset-embedded-sdk/package-lock.json b/superset-embedded-sdk/package-lock.json
index 0762791ba6..c9190d46c7 100644
--- a/superset-embedded-sdk/package-lock.json
+++ b/superset-embedded-sdk/package-lock.json
@@ -1,12 +1,12 @@
 {
   "name": "@superset-ui/embedded-sdk",
-  "version": "0.1.0-alpha.9",
+  "version": "0.1.0-alpha.10",
   "lockfileVersion": 2,
   "requires": true,
   "packages": {
     "": {
       "name": "@superset-ui/embedded-sdk",
-      "version": "0.1.0-alpha.9",
+      "version": "0.1.0-alpha.10",
       "license": "Apache-2.0",
       "dependencies": {
         "@superset-ui/switchboard": "^0.18.26-0",
diff --git a/superset-embedded-sdk/package.json b/superset-embedded-sdk/package.json
index 1cab7f2c02..dfe1801ac9 100644
--- a/superset-embedded-sdk/package.json
+++ b/superset-embedded-sdk/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@superset-ui/embedded-sdk",
-  "version": "0.1.0-alpha.9",
+  "version": "0.1.0-alpha.10",
   "description": "SDK for embedding resources from Superset into your own application",
   "access": "public",
   "keywords": [
diff --git a/superset-embedded-sdk/src/index.ts b/superset-embedded-sdk/src/index.ts
index d2513422fa..a9ff0c485d 100644
--- a/superset-embedded-sdk/src/index.ts
+++ b/superset-embedded-sdk/src/index.ts
@@ -89,6 +89,10 @@ export async function embedDashboard({
 
   log('embedding');
 
+  if (supersetDomain.endsWith("/")) {
+    supersetDomain = supersetDomain.slice(0, -1);
+  }
+
   function calculateConfig() {
     let configNumber = 0
     if(dashboardUiConfig) {


[superset] 09/11: fix(sqllab): error while removing a referenced table (#25114)

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 3.0
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 90e7e769ce59108f4255eafbbc02222acf249945
Author: JUST.in DO IT <ju...@airbnb.com>
AuthorDate: Tue Aug 29 14:38:07 2023 -0700

    fix(sqllab): error while removing a referenced table (#25114)
    
    (cherry picked from commit 29355577f148d1210c40043ef6028804469d2c30)
---
 .../src/SqlLab/components/TableElement/index.tsx   | 11 ++++++--
 superset-frontend/src/SqlLab/reducers/sqlLab.js    |  3 ++
 .../src/SqlLab/reducers/sqlLab.test.js             | 32 ++++++++++++++++++++++
 3 files changed, 44 insertions(+), 2 deletions(-)

diff --git a/superset-frontend/src/SqlLab/components/TableElement/index.tsx b/superset-frontend/src/SqlLab/components/TableElement/index.tsx
index 147422b3c0..c47d1d86d3 100644
--- a/superset-frontend/src/SqlLab/components/TableElement/index.tsx
+++ b/superset-frontend/src/SqlLab/components/TableElement/index.tsx
@@ -21,7 +21,7 @@ import { useDispatch } from 'react-redux';
 import Collapse from 'src/components/Collapse';
 import Card from 'src/components/Card';
 import ButtonGroup from 'src/components/ButtonGroup';
-import { css, t, styled } from '@superset-ui/core';
+import { css, t, styled, useTheme } from '@superset-ui/core';
 import { debounce } from 'lodash';
 
 import {
@@ -111,6 +111,7 @@ const StyledCollapsePanel = styled(Collapse.Panel)`
 
 const TableElement = ({ table, ...props }: TableElementProps) => {
   const { dbId, schema, name, expanded } = table;
+  const theme = useTheme();
   const dispatch = useDispatch();
   const {
     data: tableMetadata,
@@ -258,7 +259,13 @@ const TableElement = ({ table, ...props }: TableElementProps) => {
       );
     }
     return (
-      <ButtonGroup className="ws-el-controls">
+      <ButtonGroup
+        css={css`
+          display: flex;
+          column-gap: ${theme.gridUnit * 1.5}px;
+          margin-right: ${theme.gridUnit}px;
+        `}
+      >
         {keyLink}
         <IconTooltip
           className={
diff --git a/superset-frontend/src/SqlLab/reducers/sqlLab.js b/superset-frontend/src/SqlLab/reducers/sqlLab.js
index 9f92372fd2..2b82f42d09 100644
--- a/superset-frontend/src/SqlLab/reducers/sqlLab.js
+++ b/superset-frontend/src/SqlLab/reducers/sqlLab.js
@@ -184,6 +184,9 @@ export default function sqlLabReducer(state = {}, action) {
         if (action.query) {
           at.dataPreviewQueryId = action.query.id;
         }
+        if (existingTable.initialized) {
+          at.id = existingTable.id;
+        }
         return alterInArr(state, 'tables', existingTable, at);
       }
       // for new table, associate Id of query for data preview
diff --git a/superset-frontend/src/SqlLab/reducers/sqlLab.test.js b/superset-frontend/src/SqlLab/reducers/sqlLab.test.js
index 8827813698..40597c41b0 100644
--- a/superset-frontend/src/SqlLab/reducers/sqlLab.test.js
+++ b/superset-frontend/src/SqlLab/reducers/sqlLab.test.js
@@ -248,6 +248,38 @@ describe('sqlLabReducer', () => {
       expect(newState.tables).toHaveLength(1);
       expect(newState.tables[0].extra).toBe(true);
     });
+    it('should overwrite table ID be ignored when the existing table is already initialized', () => {
+      const action = {
+        type: actions.MERGE_TABLE,
+        table: newTable,
+      };
+      newState = sqlLabReducer(newState, action);
+      expect(newState.tables).toHaveLength(1);
+      // Merging the initialized remote id
+      const remoteId = 1;
+      const syncAction = {
+        type: actions.MERGE_TABLE,
+        table: {
+          ...newTable,
+          id: remoteId,
+          initialized: true,
+        },
+      };
+      newState = sqlLabReducer(newState, syncAction);
+      expect(newState.tables).toHaveLength(1);
+      expect(newState.tables[0].initialized).toBe(true);
+      expect(newState.tables[0].id).toBe(remoteId);
+      const overwriteAction = {
+        type: actions.MERGE_TABLE,
+        table: {
+          id: 'rnd_new_id',
+          ...newTable,
+        },
+      };
+      newState = sqlLabReducer(newState, overwriteAction);
+      expect(newState.tables).toHaveLength(1);
+      expect(newState.tables[0].id).toBe(remoteId);
+    });
     it('should expand and collapse a table', () => {
       const collapseTableAction = {
         type: actions.COLLAPSE_TABLE,


[superset] 11/11: docs: Update UPDATING.md regarding potential breaking change to `ab_user.email` column (#25115)

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 3.0
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 798b493f3a1bf0e36c1d95a3b4b46f67baf08ee0
Author: John Bodley <45...@users.noreply.github.com>
AuthorDate: Wed Aug 30 04:42:23 2023 -0700

    docs: Update UPDATING.md regarding potential breaking change to `ab_user.email` column (#25115)
    
    Co-authored-by: Michael S. Molina <70...@users.noreply.github.com>
---
 UPDATING.md                                     |  1 +
 docs/docs/installation/configuring-superset.mdx | 23 +++++++++++------------
 2 files changed, 12 insertions(+), 12 deletions(-)

diff --git a/UPDATING.md b/UPDATING.md
index 19c60a19b7..d11a78b29f 100644
--- a/UPDATING.md
+++ b/UPDATING.md
@@ -32,6 +32,7 @@ assists people when migrating to a new version.
 
 ## 3.0.0
 
+- [25053](https://github.com/apache/superset/pull/25053): Extends the `ab_user.email` column from 64 to 320 characters which has an associated unique key constraint. This will problematic for MySQL metadata databases which use the InnoDB storage engine with the `innodb_large_prefix` parameter disabled as the key prefix limit is 767 bytes. Enabling said parameter and ensuring that the table uses either the use the `DYNAMIC` or `COMPRESSED` row format should remedy the problem. See [here]( [...]
 - [24911](https://github.com/apache/superset/pull/24911): Changes the column type from `TEXT` to `MediumText` in table `logs`, potentially requiring a table lock on MySQL dbs or taking some time to complete on large deployments.
 - [24939](https://github.com/apache/superset/pull/24939): Augments the foreign key constraints for the `embedded_dashboards` table to include an explicit CASCADE ON DELETE to ensure the relevant records are deleted when a dashboard is deleted. Scheduled downtime may be advised.
 - [24938](https://github.com/apache/superset/pull/24938): Augments the foreign key constraints for the `dashboard_slices` table to include an explicit CASCADE ON DELETE to ensure the relevant records are deleted when a dashboard or slice is deleted. Scheduled downtime may be advised.
diff --git a/docs/docs/installation/configuring-superset.mdx b/docs/docs/installation/configuring-superset.mdx
index 66022521bf..9cb3aaefac 100644
--- a/docs/docs/installation/configuring-superset.mdx
+++ b/docs/docs/installation/configuring-superset.mdx
@@ -13,6 +13,7 @@ To configure your application, you need to create a file `superset_config.py` an
 `PYTHONPATH`. If your application was installed using docker-compose an alternative configuration is required. See [https://github.com/apache/superset/tree/master/docker#readme](https://github.com/apache/superset/tree/master/docker#readme) for details.
 
 The following is an example of just a few of the parameters you can set in your `superset_config.py` file:
+
 ```
 # Superset specific config
 ROW_LIMIT = 5000
@@ -72,7 +73,7 @@ WTF_CSRF_EXEMPT_LIST = [‘’]
 
 #### Adding an initial SECRET_KEY
 
-Superset requires a user-specified SECRET_KEY to start up.  This requirement was [added in version 2.1.0 to force secure configurations](https://preset.io/blog/superset-security-update-default-secret_key-vulnerability/).  Add a strong SECRET_KEY to your `superset_config.py` file like:
+Superset requires a user-specified SECRET_KEY to start up. This requirement was [added in version 2.1.0 to force secure configurations](https://preset.io/blog/superset-security-update-default-secret_key-vulnerability/). Add a strong SECRET_KEY to your `superset_config.py` file like:
 
 ```python
 SECRET_KEY = 'YOUR_OWN_RANDOM_GENERATED_SECRET_KEY'
@@ -83,7 +84,7 @@ You can generate a strong secure key with `openssl rand -base64 42`.
 #### Rotating to a newer SECRET_KEY
 
 If you wish to change your existing SECRET_KEY, add the existing SECRET_KEY to your `superset_config.py` file as
-`PREVIOUS_SECRET_KEY = `and provide your new key as `SECRET_KEY =`.  You can find your current SECRET_KEY with these
+`PREVIOUS_SECRET_KEY = `and provide your new key as `SECRET_KEY =`. You can find your current SECRET_KEY with these
 commands - if running Superset with Docker, execute from within the Superset application container:
 
 ```python
@@ -103,23 +104,21 @@ database engine on a separate host or container.
 
 Superset supports the following database engines/versions:
 
-| Database Engine                                           | Supported Versions                |
-| --------------------------------------------------------- | --------------------------------- |
-| [PostgreSQL](https://www.postgresql.org/)                 | 10.X, 11.X, 12.X, 13.X, 14.X      |
-| [MySQL](https://www.mysql.com/)                           | 5.X                               |
-
+| Database Engine                           | Supported Versions                 |
+| ----------------------------------------- | ---------------------------------- |
+| [PostgreSQL](https://www.postgresql.org/) | 10.X, 11.X, 12.X, 13.X, 14.X, 15.X |
+| [MySQL](https://www.mysql.com/)           | 5.7, 8.X                           |
 
 Use the following database drivers and connection strings:
 
-| Database                                  | PyPI package                      | Connection String                                                        |
-| ----------------------------------------- | --------------------------------- | ------------------------------------------------------------------------ |
-| [PostgreSQL](https://www.postgresql.org/) | `pip install psycopg2`            | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>`   |
-| [MySQL](https://www.mysql.com/)           | `pip install mysqlclient`         | `mysql://<UserName>:<DBPassword>@<Database Host>/<Database Name>`        |
+| Database                                  | PyPI package              | Connection String                                                      |
+| ----------------------------------------- | ------------------------- | ---------------------------------------------------------------------- |
+| [PostgreSQL](https://www.postgresql.org/) | `pip install psycopg2`    | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
+| [MySQL](https://www.mysql.com/)           | `pip install mysqlclient` | `mysql://<UserName>:<DBPassword>@<Database Host>/<Database Name>`      |
 
 To configure Superset metastore set `SQLALCHEMY_DATABASE_URI` config key on `superset_config`
 to the appropriate connection string.
 
-
 ### Running on a WSGI HTTP Server
 
 While you can run Superset on NGINX or Apache, we recommend using Gunicorn in async mode. This


[superset] 06/11: fix(assets import): Ensure old datasource ids are not referenced in imported charts (#25086)

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 3.0
in repository https://gitbox.apache.org/repos/asf/superset.git

commit f34e21be698479b96314ccbd945cff7d2b817918
Author: Jack Fragassi <jf...@gmail.com>
AuthorDate: Mon Aug 28 09:47:19 2023 -0700

    fix(assets import): Ensure old datasource ids are not referenced in imported charts (#25086)
    
    (cherry picked from commit b240b795b5bae4e9f7bd6b5e4ff73e771c76d8dd)
---
 superset/commands/importers/v1/assets.py | 9 ++++++++-
 tests/integration_tests/commands_test.py | 3 +++
 2 files changed, 11 insertions(+), 1 deletion(-)

diff --git a/superset/commands/importers/v1/assets.py b/superset/commands/importers/v1/assets.py
index 1ab2e486cf..f0720d70b1 100644
--- a/superset/commands/importers/v1/assets.py
+++ b/superset/commands/importers/v1/assets.py
@@ -79,6 +79,7 @@ class ImportAssetsCommand(BaseCommand):
         )
         self._configs: dict[str, Any] = {}
 
+    # pylint: disable=too-many-locals
     @staticmethod
     def _import(session: Session, configs: dict[str, Any]) -> None:
         # import databases first
@@ -110,7 +111,13 @@ class ImportAssetsCommand(BaseCommand):
         chart_ids: dict[str, int] = {}
         for file_name, config in configs.items():
             if file_name.startswith("charts/"):
-                config.update(dataset_info[config["dataset_uuid"]])
+                dataset_dict = dataset_info[config["dataset_uuid"]]
+                config.update(dataset_dict)
+                # pylint: disable=line-too-long
+                dataset_uid = f"{dataset_dict['datasource_id']}__{dataset_dict['datasource_type']}"
+                config["params"].update({"datasource": dataset_uid})
+                if "query_context" in config:
+                    del config["query_context"]
                 chart = import_chart(session, config, overwrite=True)
                 chart_ids[str(chart.uuid)] = chart.id
 
diff --git a/tests/integration_tests/commands_test.py b/tests/integration_tests/commands_test.py
index 86ebdc0951..6512a141be 100644
--- a/tests/integration_tests/commands_test.py
+++ b/tests/integration_tests/commands_test.py
@@ -141,6 +141,9 @@ class TestImportAssetsCommand(SupersetTestCase):
         dataset = chart.table
         assert str(dataset.uuid) == dataset_config["uuid"]
 
+        assert chart.query_context is None
+        assert json.loads(chart.params)["datasource"] == dataset.uid
+
         database = dataset.database
         assert str(database.uuid) == database_config["uuid"]
 


[superset] 04/11: refactor(pinot): The `python_date_format` for a temporal column was not being passed to `get_timestamp_expr` (#24942)

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 3.0
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 76da1b59f9341190e5b36b6552b7b319747a9c56
Author: Erich <13...@users.noreply.github.com>
AuthorDate: Sun Aug 27 12:46:39 2023 -0400

    refactor(pinot): The `python_date_format` for a temporal column was not being passed to `get_timestamp_expr` (#24942)
    
    (cherry picked from commit c2a21d2da09dd8e2b8c9c811092007c9fa3ea564)
---
 superset/db_engine_specs/pinot.py                  | 134 +++++++--------------
 .../db_engine_specs/pinot_tests.py                 |  39 +++---
 tests/unit_tests/db_engine_specs/test_pinot.py     |  57 +++++++++
 3 files changed, 125 insertions(+), 105 deletions(-)

diff --git a/superset/db_engine_specs/pinot.py b/superset/db_engine_specs/pinot.py
index a0662366d1..2cafd5ecb0 100644
--- a/superset/db_engine_specs/pinot.py
+++ b/superset/db_engine_specs/pinot.py
@@ -14,15 +14,15 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-from typing import Optional
-
-from sqlalchemy.sql.expression import ColumnClause
+from sqlalchemy import types
+from sqlalchemy.engine.interfaces import Dialect
+from sqlalchemy.types import TypeEngine
 
 from superset.constants import TimeGrain
-from superset.db_engine_specs.base import BaseEngineSpec, TimestampExpression
+from superset.db_engine_specs.base import BaseEngineSpec
 
 
-class PinotEngineSpec(BaseEngineSpec):  # pylint: disable=abstract-method
+class PinotEngineSpec(BaseEngineSpec):
     engine = "pinot"
     engine_name = "Apache Pinot"
     allows_subqueries = False
@@ -30,93 +30,51 @@ class PinotEngineSpec(BaseEngineSpec):  # pylint: disable=abstract-method
     allows_alias_in_select = False
     allows_alias_in_orderby = False
 
-    # Pinot does its own conversion below
+    # https://docs.pinot.apache.org/users/user-guide-query/supported-transformations#datetime-functions
     _time_grain_expressions = {
-        TimeGrain.SECOND: "1:SECONDS",
-        TimeGrain.MINUTE: "1:MINUTES",
-        TimeGrain.FIVE_MINUTES: "5:MINUTES",
-        TimeGrain.TEN_MINUTES: "10:MINUTES",
-        TimeGrain.FIFTEEN_MINUTES: "15:MINUTES",
-        TimeGrain.THIRTY_MINUTES: "30:MINUTES",
-        TimeGrain.HOUR: "1:HOURS",
-        TimeGrain.DAY: "1:DAYS",
-        TimeGrain.WEEK: "week",
-        TimeGrain.MONTH: "month",
-        TimeGrain.QUARTER: "quarter",
-        TimeGrain.YEAR: "year",
-    }
-
-    _python_to_java_time_patterns: dict[str, str] = {
-        "%Y": "yyyy",
-        "%m": "MM",
-        "%d": "dd",
-        "%H": "HH",
-        "%M": "mm",
-        "%S": "ss",
-    }
-
-    _use_date_trunc_function: dict[str, bool] = {
-        TimeGrain.SECOND: False,
-        TimeGrain.MINUTE: False,
-        TimeGrain.FIVE_MINUTES: False,
-        TimeGrain.TEN_MINUTES: False,
-        TimeGrain.FIFTEEN_MINUTES: False,
-        TimeGrain.THIRTY_MINUTES: False,
-        TimeGrain.HOUR: False,
-        TimeGrain.DAY: False,
-        TimeGrain.WEEK: True,
-        TimeGrain.MONTH: True,
-        TimeGrain.QUARTER: True,
-        TimeGrain.YEAR: True,
+        None: "{col}",
+        TimeGrain.SECOND: "CAST(DATE_TRUNC('second', "
+        + "CAST({col} AS TIMESTAMP)) AS TIMESTAMP)",
+        TimeGrain.MINUTE: "CAST(DATE_TRUNC('minute', "
+        + "CAST({col} AS TIMESTAMP)) AS TIMESTAMP)",
+        TimeGrain.FIVE_MINUTES: "CAST(ROUND(DATE_TRUNC('minute', "
+        + "CAST({col} AS TIMESTAMP)), 300000) AS TIMESTAMP)",
+        TimeGrain.TEN_MINUTES: "CAST(ROUND(DATE_TRUNC('minute', "
+        + "CAST({col} AS TIMESTAMP)), 600000) AS TIMESTAMP)",
+        TimeGrain.FIFTEEN_MINUTES: "CAST(ROUND(DATE_TRUNC('minute', "
+        + "CAST({col} AS TIMESTAMP)), 900000) AS TIMESTAMP)",
+        TimeGrain.THIRTY_MINUTES: "CAST(ROUND(DATE_TRUNC('minute', "
+        + "CAST({col} AS TIMESTAMP)), 1800000) AS TIMESTAMP)",
+        TimeGrain.HOUR: "CAST(DATE_TRUNC('hour', CAST({col} AS TIMESTAMP)) AS TIMESTAMP)",
+        TimeGrain.DAY: "CAST(DATE_TRUNC('day', CAST({col} AS TIMESTAMP)) AS TIMESTAMP)",
+        TimeGrain.WEEK: "CAST(DATE_TRUNC('week', CAST({col} AS TIMESTAMP)) AS TIMESTAMP)",
+        TimeGrain.MONTH: "CAST(DATE_TRUNC('month', "
+        + "CAST({col} AS TIMESTAMP)) AS TIMESTAMP)",
+        TimeGrain.QUARTER: "CAST(DATE_TRUNC('quarter', "
+        + "CAST({col} AS TIMESTAMP)) AS TIMESTAMP)",
+        TimeGrain.YEAR: "CAST(DATE_TRUNC('year', CAST({col} AS TIMESTAMP)) AS TIMESTAMP)",
     }
 
     @classmethod
-    def get_timestamp_expr(
-        cls,
-        col: ColumnClause,
-        pdf: Optional[str],
-        time_grain: Optional[str],
-    ) -> TimestampExpression:
-        if not pdf:
-            raise NotImplementedError(f"Empty date format for '{col}'")
-        is_epoch = pdf in ("epoch_s", "epoch_ms")
+    def epoch_to_dttm(cls) -> str:
+        return (
+            "DATETIMECONVERT({col}, '1:SECONDS:EPOCH', '1:SECONDS:EPOCH', '1:SECONDS')"
+        )
 
-        # The DATETIMECONVERT pinot udf is documented at
-        # Per https://github.com/apache/incubator-pinot/wiki/dateTimeConvert-UDF
-        # We are not really converting any time units, just bucketing them.
-        tf = ""
-        java_date_format = ""
-        if not is_epoch:
-            java_date_format = pdf
-            for (
-                python_pattern,
-                java_pattern,
-            ) in cls._python_to_java_time_patterns.items():
-                java_date_format = java_date_format.replace(
-                    python_pattern, java_pattern
-                )
-            tf = f"1:SECONDS:SIMPLE_DATE_FORMAT:{java_date_format}"
-        else:
-            seconds_or_ms = "MILLISECONDS" if pdf == "epoch_ms" else "SECONDS"
-            tf = f"1:{seconds_or_ms}:EPOCH"
-        if time_grain:
-            granularity = cls.get_time_grain_expressions().get(time_grain)
-            if not granularity:
-                raise NotImplementedError(f"No pinot grain spec for '{time_grain}'")
-        else:
-            return TimestampExpression("{{col}}", col)
+    @classmethod
+    def epoch_ms_to_dttm_(cls) -> str:
+        return (
+            "DATETIMECONVERT({col}, '1:MILLISECONDS:EPOCH', "
+            + "'1:MILLISECONDS:EPOCH', '1:MILLISECONDS')"
+        )
 
-        # In pinot the output is a string since there is no timestamp column like pg
-        if cls._use_date_trunc_function.get(time_grain):
-            if is_epoch:
-                time_expr = f"DATETRUNC('{granularity}', {{col}}, '{seconds_or_ms}')"
-            else:
-                time_expr = (
-                    f"ToDateTime(DATETRUNC('{granularity}', "
-                    + f"FromDateTime({{col}}, '{java_date_format}'), "
-                    + f"'MILLISECONDS'), '{java_date_format}')"
-                )
-        else:
-            time_expr = f"DATETIMECONVERT({{col}}, '{tf}', '{tf}', '{granularity}')"
+    @classmethod
+    def column_datatype_to_string(
+        cls, sqla_column_type: TypeEngine, dialect: Dialect
+    ) -> str:
+        # Pinot driver infers TIMESTAMP column as LONG, so make the quick fix.
+        # When the Pinot driver fix this bug, current method could be removed.
+        if isinstance(sqla_column_type, types.TIMESTAMP):
+            return sqla_column_type.compile().upper()
 
-        return TimestampExpression(time_expr, col)
+        return super().column_datatype_to_string(sqla_column_type, dialect)
diff --git a/tests/integration_tests/db_engine_specs/pinot_tests.py b/tests/integration_tests/db_engine_specs/pinot_tests.py
old mode 100644
new mode 100755
index c6e364a8ea..3998d20940
--- a/tests/integration_tests/db_engine_specs/pinot_tests.py
+++ b/tests/integration_tests/db_engine_specs/pinot_tests.py
@@ -27,61 +27,66 @@ class TestPinotDbEngineSpec(TestDbEngineSpec):
         col = column("tstamp")
         expr = PinotEngineSpec.get_timestamp_expr(col, "epoch_s", "P1D")
         result = str(expr.compile())
+        expected = (
+            "CAST(DATE_TRUNC('day', CAST("
+            + "DATETIMECONVERT(tstamp, '1:SECONDS:EPOCH', "
+            + "'1:SECONDS:EPOCH', '1:SECONDS') AS TIMESTAMP)) AS TIMESTAMP)"
+        )
         self.assertEqual(
             result,
-            "DATETIMECONVERT(tstamp, '1:SECONDS:EPOCH', '1:SECONDS:EPOCH', '1:DAYS')",
+            expected,
         )
 
     def test_pinot_time_expression_simple_date_format_1d_grain(self):
         col = column("tstamp")
         expr = PinotEngineSpec.get_timestamp_expr(col, "%Y-%m-%d %H:%M:%S", "P1D")
         result = str(expr.compile())
+        expected = "CAST(DATE_TRUNC('day', CAST(tstamp AS TIMESTAMP)) AS TIMESTAMP)"
         self.assertEqual(
             result,
-            (
-                "DATETIMECONVERT(tstamp, "
-                + "'1:SECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss', "
-                + "'1:SECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss', '1:DAYS')"
-            ),
+            expected,
         )
 
     def test_pinot_time_expression_simple_date_format_10m_grain(self):
         col = column("tstamp")
         expr = PinotEngineSpec.get_timestamp_expr(col, "%Y-%m-%d %H:%M:%S", "PT10M")
         result = str(expr.compile())
+        expected = (
+            "CAST(ROUND(DATE_TRUNC('minute', CAST(tstamp AS "
+            + "TIMESTAMP)), 600000) AS TIMESTAMP)"
+        )
         self.assertEqual(
             result,
-            (
-                "DATETIMECONVERT(tstamp, "
-                + "'1:SECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss', "
-                + "'1:SECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss', '10:MINUTES')"
-            ),
+            expected,
         )
 
     def test_pinot_time_expression_simple_date_format_1w_grain(self):
         col = column("tstamp")
         expr = PinotEngineSpec.get_timestamp_expr(col, "%Y-%m-%d %H:%M:%S", "P1W")
         result = str(expr.compile())
+        expected = "CAST(DATE_TRUNC('week', CAST(tstamp AS TIMESTAMP)) AS TIMESTAMP)"
         self.assertEqual(
             result,
-            (
-                "ToDateTime(DATETRUNC('week', FromDateTime(tstamp, "
-                + "'yyyy-MM-dd HH:mm:ss'), 'MILLISECONDS'), 'yyyy-MM-dd HH:mm:ss')"
-            ),
+            expected,
         )
 
     def test_pinot_time_expression_sec_one_1m_grain(self):
         col = column("tstamp")
         expr = PinotEngineSpec.get_timestamp_expr(col, "epoch_s", "P1M")
         result = str(expr.compile())
+        expected = (
+            "CAST(DATE_TRUNC('month', CAST("
+            + "DATETIMECONVERT(tstamp, '1:SECONDS:EPOCH', "
+            + "'1:SECONDS:EPOCH', '1:SECONDS') AS TIMESTAMP)) AS TIMESTAMP)"
+        )
         self.assertEqual(
             result,
-            "DATETRUNC('month', tstamp, 'SECONDS')",
+            expected,
         )
 
     def test_invalid_get_time_expression_arguments(self):
         with self.assertRaises(NotImplementedError):
-            PinotEngineSpec.get_timestamp_expr(column("tstamp"), None, "P1M")
+            PinotEngineSpec.get_timestamp_expr(column("tstamp"), None, "P0.25Y")
 
         with self.assertRaises(NotImplementedError):
             PinotEngineSpec.get_timestamp_expr(
diff --git a/tests/unit_tests/db_engine_specs/test_pinot.py b/tests/unit_tests/db_engine_specs/test_pinot.py
new file mode 100644
index 0000000000..a1648f5f60
--- /dev/null
+++ b/tests/unit_tests/db_engine_specs/test_pinot.py
@@ -0,0 +1,57 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from unittest import mock
+
+import pytest
+from sqlalchemy import column
+
+
+@pytest.mark.parametrize(
+    "time_grain,expected_result",
+    [
+        ("PT1S", "CAST(DATE_TRUNC('second', CAST(col AS TIMESTAMP)) AS TIMESTAMP)"),
+        (
+            "PT5M",
+            "CAST(ROUND(DATE_TRUNC('minute', CAST(col AS TIMESTAMP)), 300000) AS TIMESTAMP)",
+        ),
+        ("P1W", "CAST(DATE_TRUNC('week', CAST(col AS TIMESTAMP)) AS TIMESTAMP)"),
+        ("P1M", "CAST(DATE_TRUNC('month', CAST(col AS TIMESTAMP)) AS TIMESTAMP)"),
+        ("P3M", "CAST(DATE_TRUNC('quarter', CAST(col AS TIMESTAMP)) AS TIMESTAMP)"),
+        ("P1Y", "CAST(DATE_TRUNC('year', CAST(col AS TIMESTAMP)) AS TIMESTAMP)"),
+    ],
+)
+def test_timegrain_expressions(time_grain: str, expected_result: str) -> None:
+    """
+    DB Eng Specs (pinot): Test time grain expressions
+    """
+    from superset.db_engine_specs.pinot import PinotEngineSpec as spec
+
+    actual = str(
+        spec.get_timestamp_expr(col=column("col"), pdf=None, time_grain=time_grain)
+    )
+    assert actual == expected_result
+
+
+def test_extras_without_ssl() -> None:
+    from superset.db_engine_specs.pinot import PinotEngineSpec as spec
+    from tests.integration_tests.fixtures.database import default_db_extra
+
+    db = mock.Mock()
+    db.extra = default_db_extra
+    db.server_cert = None
+    extras = spec.get_extra_params(db)
+    assert "connect_args" not in extras["engine_params"]


[superset] 01/11: chore: remove CssTemplate and Annotation access from gamma role (#24826)

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 3.0
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 55c57b92770fdcafaefbd9b1d4589019d10703b2
Author: Lily Kuang <li...@preset.io>
AuthorDate: Thu Aug 24 16:39:56 2023 -0700

    chore: remove CssTemplate and Annotation access from gamma role (#24826)
    
    (cherry picked from commit 6ac906f38807e0d0fa044c92e4d0984497bf140f)
---
 superset/security/manager.py              | 22 +++++++++++-----------
 tests/integration_tests/security_tests.py |  3 ---
 2 files changed, 11 insertions(+), 14 deletions(-)

diff --git a/superset/security/manager.py b/superset/security/manager.py
index 028fd8762f..1d6ff1a2eb 100644
--- a/superset/security/manager.py
+++ b/superset/security/manager.py
@@ -156,8 +156,6 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
     }
 
     GAMMA_READ_ONLY_MODEL_VIEWS = {
-        "Annotation",
-        "CssTemplate",
         "Dataset",
         "Datasource",
     } | READ_ONLY_MODEL_VIEWS
@@ -180,19 +178,21 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
     } | USER_MODEL_VIEWS
 
     ALPHA_ONLY_VIEW_MENUS = {
-        "Manage",
-        "CSS Templates",
-        "Annotation Layers",
-        "Queries",
-        "Import dashboards",
-        "Upload a CSV",
-        "ReportSchedule",
         "Alerts & Report",
-        "TableSchemaView",
-        "CsvToDatabaseView",
+        "Annotation Layers",
+        "Annotation",
+        "CSS Templates",
         "ColumnarToDatabaseView",
+        "CssTemplate",
+        "CsvToDatabaseView",
         "ExcelToDatabaseView",
+        "Import dashboards",
         "ImportExportRestApi",
+        "Manage",
+        "Queries",
+        "ReportSchedule",
+        "TableSchemaView",
+        "Upload a CSV",
     }
 
     ADMIN_ONLY_PERMISSIONS = {
diff --git a/tests/integration_tests/security_tests.py b/tests/integration_tests/security_tests.py
index f741ec4315..90be0edd17 100644
--- a/tests/integration_tests/security_tests.py
+++ b/tests/integration_tests/security_tests.py
@@ -1346,7 +1346,6 @@ class TestRolePermission(SupersetTestCase):
         self.assert_cannot_menu("Alerts & Report", perm_set)
 
     def assert_can_gamma(self, perm_set):
-        self.assert_can_read("CssTemplate", perm_set)
         self.assert_can_read("Dataset", perm_set)
 
         # make sure that user can create slices and dashboards
@@ -1514,8 +1513,6 @@ class TestRolePermission(SupersetTestCase):
         # make sure that user can create slices and dashboards
         self.assert_can_all("Dashboard", gamma_perm_set)
         self.assert_can_read("Dataset", gamma_perm_set)
-        self.assert_can_read("Annotation", gamma_perm_set)
-        self.assert_can_read("CssTemplate", gamma_perm_set)
 
         # make sure that user can create slices and dashboards
         self.assert_can_all("Chart", gamma_perm_set)


[superset] 10/11: fix: Date format when importing international timestamps (#25113)

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 3.0
in repository https://gitbox.apache.org/repos/asf/superset.git

commit caa3b6d5ba8e421470d191963b5952b94d64ac35
Author: Michael S. Molina <70...@users.noreply.github.com>
AuthorDate: Wed Aug 30 08:45:01 2023 -0300

    fix: Date format when importing international timestamps (#25113)
    
    (cherry picked from commit 00550d7c02fd47e69700c846c7aeb50585ac2637)
---
 .../form_view/csv_to_database_view/edit.html       | 229 ++++++++++-----------
 superset/views/database/forms.py                   |   4 +
 superset/views/database/views.py                   |   2 +
 3 files changed, 120 insertions(+), 115 deletions(-)

diff --git a/superset/templates/superset/form_view/csv_to_database_view/edit.html b/superset/templates/superset/form_view/csv_to_database_view/edit.html
index 9bbda3366f..0ea4b7b0e5 100644
--- a/superset/templates/superset/form_view/csv_to_database_view/edit.html
+++ b/superset/templates/superset/form_view/csv_to_database_view/edit.html
@@ -1,142 +1,141 @@
-{#
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
-#}
-{% extends "appbuilder/base.html" %}
-{% import 'appbuilder/general/lib.html' as lib %}
-{% import "superset/macros.html" as macros %}
-{% set begin_sep_label = '<td class="col-sm-2" style="border-left: 0; border-top: 0;">' %}
-  {% set end_sep_label = '</td>' %}
-{% set begin_sep_field = '<td style="border-right: 0; border-top: 0;">' %}
-  {% set end_sep_field = '</td>' %}
-{% import 'superset/form_view/database_schemas_selector.html' as schemas_selector %}
-{% import 'superset/form_view/csv_scripts.html' as csv_scripts %}
-{% import 'superset/form_view/csv_macros.html' as csv_macros %}
-{% block content %}
-{{ lib.panel_begin(title, "edit") }}
+{# Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with this work
+for additional information regarding copyright ownership. The ASF licenses this
+file to you under the Apache License, Version 2.0 (the "License"); you may not
+use this file except in compliance with the License. You may obtain a copy of
+the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by
+applicable law or agreed to in writing, software distributed under the License
+is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied. See the License for the specific language
+governing permissions and limitations under the License. #} {% extends
+"appbuilder/base.html" %} {% import 'appbuilder/general/lib.html' as lib %} {%
+import "superset/macros.html" as macros %} {% set begin_sep_label = '
+<td class="col-sm-2" style="border-left: 0; border-top: 0">
+  ' %} {% set end_sep_label = '
+</td>
+' %} {% set begin_sep_field = '
+<td style="border-right: 0; border-top: 0">' %} {% set end_sep_field = '</td>
+' %} {% import 'superset/form_view/database_schemas_selector.html' as
+schemas_selector %} {% import 'superset/form_view/csv_scripts.html' as
+csv_scripts %} {% import 'superset/form_view/csv_macros.html' as csv_macros %}
+{% block content %} {{ lib.panel_begin(title, "edit") }}
 <div id="Home" class="tab-pane active">
   <form id="model_form" action="" method="post" enctype="multipart/form-data">
     {{form.hidden_tag()}}
     <div class="form-group">
-      <div class="col-md-12" style="padding: 0;">
+      <div class="col-md-12" style="padding: 0">
         <table class="table table-bordered">
           <tbody>
             <tr>
-              {{ lib.render_field(form.csv_file, begin_sep_label, end_sep_label, begin_sep_field, end_sep_field) }}
+              {{ lib.render_field(form.csv_file, begin_sep_label, end_sep_label,
+              begin_sep_field, end_sep_field) }}
             </tr>
             <tr>
-              {{ lib.render_field(form.table_name, begin_sep_label, end_sep_label, begin_sep_field, end_sep_field) }}
+              {{ lib.render_field(form.table_name, begin_sep_label,
+              end_sep_label, begin_sep_field, end_sep_field) }}
             </tr>
             <tr>
-              {{ lib.render_field(form.database, begin_sep_label, end_sep_label, begin_sep_field, end_sep_field) }}
+              {{ lib.render_field(form.database, begin_sep_label, end_sep_label,
+              begin_sep_field, end_sep_field) }}
             </tr>
             <tr>
-              {{ lib.render_field(form.schema, begin_sep_label, end_sep_label, begin_sep_field, end_sep_field) }}
+              {{ lib.render_field(form.schema, begin_sep_label, end_sep_label,
+              begin_sep_field, end_sep_field) }}
             </tr>
             <tr>
-              {{ csv_macros.render_delimiter_field(form.delimiter, begin_sep_label, end_sep_label, begin_sep_field, end_sep_field) }}
+              {{ csv_macros.render_delimiter_field(form.delimiter,
+              begin_sep_label, end_sep_label, begin_sep_field, end_sep_field) }}
             </tr>
           </tbody>
         </table>
       </div>
     </div>
-    {% call csv_macros.render_collapsable_form_group("accordion1", "File Settings") %}
-      <tr>
-        {{ lib.render_field(form.if_exists, begin_sep_label, end_sep_label, begin_sep_field,
-        end_sep_field) }}
-      </tr>
-      <tr>
-        {{ lib.render_field(form.skip_initial_space, begin_sep_label, end_sep_label, begin_sep_field,
-        end_sep_field) }}
-      </tr>
-      <tr>
-        {{ lib.render_field(form.skip_blank_lines, begin_sep_label, end_sep_label, begin_sep_field,
-        end_sep_field) }}
-      </tr>
-      <tr>
-        {{ lib.render_field(form.parse_dates, begin_sep_label, end_sep_label, begin_sep_field,
-        end_sep_field) }}
-      </tr>
-      <tr>
-        {{ lib.render_field(form.infer_datetime_format, begin_sep_label, end_sep_label, begin_sep_field,
-        end_sep_field) }}
-      </tr>
-      <tr>
-        {{ lib.render_field(form.decimal, begin_sep_label, end_sep_label, begin_sep_field,
-        end_sep_field) }}
-      </tr>
-      <tr>
-        {{ lib.render_field(form.null_values, begin_sep_label, end_sep_label, begin_sep_field,
-        end_sep_field) }}
-      </tr>
-    {% endcall %}
-    {% call csv_macros.render_collapsable_form_group("accordion2", "Columns") %}
-      <tr>
-        {{ lib.render_field(form.index_col, begin_sep_label, end_sep_label, begin_sep_field,
-        end_sep_field) }}
-      </tr>
-      <tr>
-        {{ lib.render_field(form.dataframe_index, begin_sep_label, end_sep_label, begin_sep_field,
-        end_sep_field) }}
-      </tr>
-      <tr>
-        {{ lib.render_field(form.index_label, begin_sep_label, end_sep_label, begin_sep_field,
-        end_sep_field) }}
-      </tr>
-      <tr>
-        {{ lib.render_field(form.use_cols, begin_sep_label, end_sep_label, begin_sep_field,
-        end_sep_field) }}
-      </tr>
-      <tr>
-        {{ lib.render_field(form.overwrite_duplicate, begin_sep_label, end_sep_label, begin_sep_field,
-        end_sep_field) }}
-      </tr>
-      <tr>
-        {{ lib.render_field(form.dtype, begin_sep_label, end_sep_label, begin_sep_field,
-        end_sep_field) }}
-      </tr>
-    {% endcall %}
-    {% call csv_macros.render_collapsable_form_group("accordion3", "Rows") %}
-      <tr>
-        {{ lib.render_field(form.header, begin_sep_label, end_sep_label, begin_sep_field, end_sep_field)
-        }}
-      </tr>
-      <tr>
-        {{ lib.render_field(form.nrows, begin_sep_label, end_sep_label, begin_sep_field, end_sep_field)
-        }}
-      </tr>
-      <tr>
-        {{ lib.render_field(form.skiprows, begin_sep_label, end_sep_label, begin_sep_field,
-        end_sep_field) }}
-      </tr>
+    {% call csv_macros.render_collapsable_form_group("accordion1", "File
+    Settings") %}
+    <tr>
+      {{ lib.render_field(form.if_exists, begin_sep_label, end_sep_label,
+      begin_sep_field, end_sep_field) }}
+    </tr>
+    <tr>
+      {{ lib.render_field(form.skip_initial_space, begin_sep_label,
+      end_sep_label, begin_sep_field, end_sep_field) }}
+    </tr>
+    <tr>
+      {{ lib.render_field(form.skip_blank_lines, begin_sep_label, end_sep_label,
+      begin_sep_field, end_sep_field) }}
+    </tr>
+    <tr>
+      {{ lib.render_field(form.parse_dates, begin_sep_label, end_sep_label,
+      begin_sep_field, end_sep_field) }}
+    </tr>
+    <tr>
+      {{ lib.render_field(form.infer_datetime_format, begin_sep_label,
+      end_sep_label, begin_sep_field, end_sep_field) }}
+    </tr>
+    <tr>
+      {{ lib.render_field(form.day_first, begin_sep_label, end_sep_label,
+      begin_sep_field, end_sep_field) }}
+    </tr>
+    <tr>
+      {{ lib.render_field(form.decimal, begin_sep_label, end_sep_label,
+      begin_sep_field, end_sep_field) }}
+    </tr>
+    <tr>
+      {{ lib.render_field(form.null_values, begin_sep_label, end_sep_label,
+      begin_sep_field, end_sep_field) }}
+    </tr>
+    {% endcall %} {% call csv_macros.render_collapsable_form_group("accordion2",
+    "Columns") %}
+    <tr>
+      {{ lib.render_field(form.index_col, begin_sep_label, end_sep_label,
+      begin_sep_field, end_sep_field) }}
+    </tr>
+    <tr>
+      {{ lib.render_field(form.dataframe_index, begin_sep_label, end_sep_label,
+      begin_sep_field, end_sep_field) }}
+    </tr>
+    <tr>
+      {{ lib.render_field(form.index_label, begin_sep_label, end_sep_label,
+      begin_sep_field, end_sep_field) }}
+    </tr>
+    <tr>
+      {{ lib.render_field(form.use_cols, begin_sep_label, end_sep_label,
+      begin_sep_field, end_sep_field) }}
+    </tr>
+    <tr>
+      {{ lib.render_field(form.overwrite_duplicate, begin_sep_label,
+      end_sep_label, begin_sep_field, end_sep_field) }}
+    </tr>
+    <tr>
+      {{ lib.render_field(form.dtype, begin_sep_label, end_sep_label,
+      begin_sep_field, end_sep_field) }}
+    </tr>
+    {% endcall %} {% call csv_macros.render_collapsable_form_group("accordion3",
+    "Rows") %}
+    <tr>
+      {{ lib.render_field(form.header, begin_sep_label, end_sep_label,
+      begin_sep_field, end_sep_field) }}
+    </tr>
+    <tr>
+      {{ lib.render_field(form.nrows, begin_sep_label, end_sep_label,
+      begin_sep_field, end_sep_field) }}
+    </tr>
+    <tr>
+      {{ lib.render_field(form.skiprows, begin_sep_label, end_sep_label,
+      begin_sep_field, end_sep_field) }}
+    </tr>
     {% endcall %}
     <div class="form-group">
-      <div class="col-xs-12" style="padding: 0;">
+      <div class="col-xs-12" style="padding: 0">
         {{ lib.render_form_controls() }}
       </div>
     </div>
   </form>
 </div>
-{% endblock %}
-{% block add_tail_js %}
-<script src="{{url_for('appbuilder.static',filename='js/ab_keep_tab.js')}}" nonce="{{ macros.get_nonce() }}"></script>
-{% endblock %}
-{% block tail_js %}
-  {{ super() }}
-  {{ schemas_selector }}
-  {{ csv_scripts }}
-{% endblock %}
+{% endblock %} {% block add_tail_js %}
+<script
+  src="{{url_for('appbuilder.static',filename='js/ab_keep_tab.js')}}"
+  nonce="{{ macros.get_nonce() }}"
+></script>
+{% endblock %} {% block tail_js %} {{ super() }} {{ schemas_selector }} {{
+csv_scripts }} {% endblock %}
diff --git a/superset/views/database/forms.py b/superset/views/database/forms.py
index b906e5e70b..a16114c172 100644
--- a/superset/views/database/forms.py
+++ b/superset/views/database/forms.py
@@ -199,6 +199,10 @@ class CsvToDatabaseForm(UploadToDatabaseForm):
         _("Interpret Datetime Format Automatically"),
         description=_("Interpret the datetime format automatically"),
     )
+    day_first = BooleanField(
+        _("Day First"),
+        description=_("DD/MM format dates, international and European format"),
+    )
     decimal = StringField(
         _("Decimal Character"),
         default=".",
diff --git a/superset/views/database/views.py b/superset/views/database/views.py
index 1c7ff25942..296228cd54 100644
--- a/superset/views/database/views.py
+++ b/superset/views/database/views.py
@@ -168,6 +168,7 @@ class CsvToDatabaseView(CustomFormView):
         form.skip_initial_space.data = False
         form.skip_blank_lines.data = True
         form.infer_datetime_format.data = True
+        form.day_first.data = False
         form.decimal.data = "."
         form.if_exists.data = "fail"
 
@@ -199,6 +200,7 @@ class CsvToDatabaseView(CustomFormView):
                     header=form.header.data if form.header.data else 0,
                     index_col=form.index_col.data,
                     infer_datetime_format=form.infer_datetime_format.data,
+                    dayfirst=form.day_first.data,
                     iterator=True,
                     keep_default_na=not form.null_values.data,
                     mangle_dupe_cols=form.overwrite_duplicate.data,


[superset] 05/11: fix: Filter names overflow wrap (#25087)

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 3.0
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 9ceba619c3f2dddd37ea14f5dc3d20a77942d37c
Author: Michael S. Molina <70...@users.noreply.github.com>
AuthorDate: Mon Aug 28 11:28:16 2023 -0300

    fix: Filter names overflow wrap (#25087)
    
    (cherry picked from commit b5bac6c87600ccca301d40ae97b67d4366cdf84c)
---
 .../components/nativeFilters/FilterBar/FilterControls/FilterControl.tsx | 2 +-
 .../src/dashboard/components/nativeFilters/FilterCard/Styles.ts         | 1 +
 2 files changed, 2 insertions(+), 1 deletion(-)

diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterControl.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterControl.tsx
index c826d5dbff..515fed1907 100644
--- a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterControl.tsx
+++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/FilterControl.tsx
@@ -43,7 +43,7 @@ const VerticalFilterControlTitle = styled.h4`
   font-size: ${({ theme }) => theme.typography.sizes.s}px;
   color: ${({ theme }) => theme.colors.grayscale.dark1};
   margin: 0;
-  overflow-wrap: break-word;
+  overflow-wrap: anywhere;
 `;
 
 const HorizontalFilterControlTitle = styled(VerticalFilterControlTitle)`
diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterCard/Styles.ts b/superset-frontend/src/dashboard/components/nativeFilters/FilterCard/Styles.ts
index 8090201f1c..7ccd461cf9 100644
--- a/superset-frontend/src/dashboard/components/nativeFilters/FilterCard/Styles.ts
+++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterCard/Styles.ts
@@ -96,4 +96,5 @@ export const TooltipTrigger = styled.div`
 export const InternalRow = styled.div`
   display: flex;
   align-items: center;
+  overflow: hidden;
 `;


[superset] 07/11: fix: Allow embedded guest user datasource access with dashboard context (#25081)

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 3.0
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 6a461260fc45be524148f628b19a11eb274ef9d0
Author: Jack Fragassi <jf...@gmail.com>
AuthorDate: Mon Aug 28 09:48:21 2023 -0700

    fix: Allow embedded guest user datasource access with dashboard context (#25081)
    
    (cherry picked from commit 2b8d8da22acc6ffbd49ca256b08aa2fe60e0d718)
---
 superset/security/manager.py                       |  10 +-
 superset/viz.py                                    |   1 +
 .../fixtures/birth_names_dashboard.py              |   8 +
 .../fixtures/world_bank_dashboard.py               |   8 +
 .../security/guest_token_security_tests.py         | 258 ++++++++++++++++++++-
 tests/integration_tests/security_tests.py          |   2 +-
 6 files changed, 277 insertions(+), 10 deletions(-)

diff --git a/superset/security/manager.py b/superset/security/manager.py
index b1cb15aadf..6c47c6e163 100644
--- a/superset/security/manager.py
+++ b/superset/security/manager.py
@@ -1886,17 +1886,23 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
                 or self.is_owner(datasource)
                 or (
                     # Grant access to the datasource only if dashboard RBAC is enabled
+                    # or the user is an embedded guest user with access to the dashboard
                     # and said datasource is associated with the dashboard chart in
                     # question.
                     form_data
-                    and is_feature_enabled("DASHBOARD_RBAC")
                     and (dashboard_id := form_data.get("dashboardId"))
                     and (
                         dashboard_ := self.get_session.query(Dashboard)
                         .filter(Dashboard.id == dashboard_id)
                         .one_or_none()
                     )
-                    and dashboard_.roles
+                    and (
+                        (is_feature_enabled("DASHBOARD_RBAC") and dashboard_.roles)
+                        or (
+                            is_feature_enabled("EMBEDDED_SUPERSET")
+                            and self.is_guest_user()
+                        )
+                    )
                     and (
                         (
                             # Native filter.
diff --git a/superset/viz.py b/superset/viz.py
index 3051f104e2..4c240efccc 100644
--- a/superset/viz.py
+++ b/superset/viz.py
@@ -1654,6 +1654,7 @@ class FilterBoxViz(BaseViz):
                 query_obj["orderby"] = [(metric, asc)]
             self.get_query_context_factory().create(
                 datasource={"id": self.datasource.id, "type": self.datasource.type},
+                form_data=self.form_data,
                 queries=[query_obj],
             ).raise_for_access()
             df = self.get_df_payload(query_obj=query_obj).get("df")
diff --git a/tests/integration_tests/fixtures/birth_names_dashboard.py b/tests/integration_tests/fixtures/birth_names_dashboard.py
index d9a4a5d9e0..513a9f84a2 100644
--- a/tests/integration_tests/fixtures/birth_names_dashboard.py
+++ b/tests/integration_tests/fixtures/birth_names_dashboard.py
@@ -59,6 +59,14 @@ def load_birth_names_dashboard_with_slices_module_scope(load_birth_names_data):
         _cleanup(dash_id_to_delete, slices_ids_to_delete)
 
 
+@pytest.fixture(scope="class")
+def load_birth_names_dashboard_with_slices_class_scope(load_birth_names_data):
+    with app.app_context():
+        dash_id_to_delete, slices_ids_to_delete = _create_dashboards()
+        yield
+        _cleanup(dash_id_to_delete, slices_ids_to_delete)
+
+
 def _create_dashboards():
     table = _create_table(
         table_name=BIRTH_NAMES_TBL_NAME,
diff --git a/tests/integration_tests/fixtures/world_bank_dashboard.py b/tests/integration_tests/fixtures/world_bank_dashboard.py
index 18ceba9af2..a53cd76aa9 100644
--- a/tests/integration_tests/fixtures/world_bank_dashboard.py
+++ b/tests/integration_tests/fixtures/world_bank_dashboard.py
@@ -84,6 +84,14 @@ def load_world_bank_dashboard_with_slices_module_scope(load_world_bank_data):
         _cleanup(dash_id_to_delete, slices_ids_to_delete)
 
 
+@pytest.fixture(scope="class")
+def load_world_bank_dashboard_with_slices_class_scope(load_world_bank_data):
+    with app.app_context():
+        dash_id_to_delete, slices_ids_to_delete = create_dashboard_for_loaded_data()
+        yield
+        _cleanup(dash_id_to_delete, slices_ids_to_delete)
+
+
 def create_dashboard_for_loaded_data():
     with app.app_context():
         table = create_table_metadata(WB_HEALTH_POPULATION, get_example_database())
diff --git a/tests/integration_tests/security/guest_token_security_tests.py b/tests/integration_tests/security/guest_token_security_tests.py
index 5f50bf4b50..4cd8a77f76 100644
--- a/tests/integration_tests/security/guest_token_security_tests.py
+++ b/tests/integration_tests/security/guest_token_security_tests.py
@@ -15,25 +15,34 @@
 # specific language governing permissions and limitations
 # under the License.
 """Unit tests for Superset"""
-from unittest import mock
+import json
+from unittest.mock import Mock, patch
 
 import pytest
 from flask import g
 
 from superset import db, security_manager
+from superset.connectors.sqla.models import SqlaTable
 from superset.daos.dashboard import EmbeddedDashboardDAO
 from superset.exceptions import SupersetSecurityException
 from superset.models.dashboard import Dashboard
 from superset.security.guest_token import GuestTokenResourceType
 from superset.sql_parse import Table
+from superset.utils.core import get_example_default_schema
+from superset.utils.database import get_example_database
 from tests.integration_tests.base_tests import SupersetTestCase
 from tests.integration_tests.fixtures.birth_names_dashboard import (
-    load_birth_names_dashboard_with_slices,
+    load_birth_names_dashboard_with_slices_class_scope,
     load_birth_names_data,
 )
+from tests.integration_tests.fixtures.world_bank_dashboard import (
+    load_world_bank_dashboard_with_slices,
+    load_world_bank_dashboard_with_slices_class_scope,
+    load_world_bank_data,
+)
 
 
-@mock.patch.dict(
+@patch.dict(
     "superset.extensions.feature_flag_manager._feature_flags",
     EMBEDDED_SUPERSET=True,
 )
@@ -55,7 +64,7 @@ class TestGuestUserSecurity(SupersetTestCase):
         is_guest = security_manager.is_guest_user(self.authorized_guest())
         self.assertTrue(is_guest)
 
-    @mock.patch.dict(
+    @patch.dict(
         "superset.extensions.feature_flag_manager._feature_flags",
         EMBEDDED_SUPERSET=False,
     )
@@ -91,14 +100,14 @@ class TestGuestUserSecurity(SupersetTestCase):
         self.assertEqual(guest.roles, roles)
 
 
-@mock.patch.dict(
+@patch.dict(
     "superset.extensions.feature_flag_manager._feature_flags",
     EMBEDDED_SUPERSET=True,
 )
-@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
+@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices_class_scope")
 class TestGuestUserDashboardAccess(SupersetTestCase):
     def setUp(self) -> None:
-        self.dash = db.session.query(Dashboard).filter_by(slug="births").first()
+        self.dash = self.get_dash_by_slug("births")
         self.embedded = EmbeddedDashboardDAO.upsert(self.dash, [])
         self.authorized_guest = security_manager.get_guest_user_from_token(
             {
@@ -195,3 +204,238 @@ class TestGuestUserDashboardAccess(SupersetTestCase):
 
         db.session.delete(dash)
         db.session.commit()
+
+
+@patch.dict(
+    "superset.extensions.feature_flag_manager._feature_flags",
+    EMBEDDED_SUPERSET=True,
+)
+@pytest.mark.usefixtures(
+    "create_dataset",
+    "load_birth_names_dashboard_with_slices_class_scope",
+    "load_world_bank_dashboard_with_slices_class_scope",
+)
+class TestGuestUserDatasourceAccess(SupersetTestCase):
+    """
+    Guest users should only have access to datasources that are associated with a
+    dashboard they have access to, and only with that dashboard context present
+    """
+
+    @pytest.fixture(scope="class")
+    def create_dataset(self):
+        with self.create_app().app_context():
+            dataset = SqlaTable(
+                table_name="dummy_sql_table",
+                database=get_example_database(),
+                schema=get_example_default_schema(),
+                sql="select 123 as intcol, 'abc' as strcol",
+            )
+            session = db.session
+            session.add(dataset)
+            session.commit()
+
+            yield dataset
+
+            # rollback
+            session.delete(dataset)
+            session.commit()
+
+    def setUp(self) -> None:
+        self.dash = self.get_dash_by_slug("births")
+        self.other_dash = self.get_dash_by_slug("world_health")
+        self.embedded = EmbeddedDashboardDAO.upsert(self.dash, [])
+        self.authorized_guest = security_manager.get_guest_user_from_token(
+            {
+                "user": {},
+                "resources": [{"type": "dashboard", "id": str(self.embedded.uuid)}],
+            }
+        )
+        self.unauthorized_guest = security_manager.get_guest_user_from_token(
+            {
+                "user": {},
+                "resources": [
+                    {"type": "dashboard", "id": "06383667-3e02-4e5e-843f-44e9c5896b6c"}
+                ],
+            }
+        )
+        self.chart = self.get_slice("Girls", db.session, expunge_from_session=False)
+        self.datasource = self.chart.datasource
+        self.other_chart = self.get_slice(
+            "Treemap", db.session, expunge_from_session=False
+        )
+        self.other_datasource = self.other_chart.datasource
+        self.native_filter_datasource = (
+            db.session.query(SqlaTable).filter_by(table_name="dummy_sql_table").first()
+        )
+        self.dash.json_metadata = json.dumps(
+            {
+                "native_filter_configuration": [
+                    {
+                        "id": "NATIVE_FILTER-ABCDEFGH",
+                        "targets": [{"datasetId": self.native_filter_datasource.id}],
+                    },
+                ]
+            }
+        )
+
+    def test_raise_for_access__happy_path(self):
+        g.user = self.authorized_guest
+        for kwarg in ["viz", "query_context"]:
+            security_manager.raise_for_access(
+                **{
+                    kwarg: Mock(
+                        datasource=self.datasource,
+                        form_data={
+                            "dashboardId": self.dash.id,
+                            "slice_id": self.chart.id,
+                        },
+                    )
+                }
+            )
+
+    def test_raise_for_access__native_filter_happy_path(self):
+        g.user = self.authorized_guest
+        for kwarg in ["viz", "query_context"]:
+            security_manager.raise_for_access(
+                **{
+                    kwarg: Mock(
+                        datasource=self.native_filter_datasource,
+                        form_data={
+                            "dashboardId": self.dash.id,
+                            "native_filter_id": "NATIVE_FILTER-ABCDEFGH",
+                            "type": "NATIVE_FILTER",
+                        },
+                    )
+                }
+            )
+
+    def test_raise_for_access__no_dashboard_in_form_data(self):
+        g.user = self.authorized_guest
+        for kwarg in ["viz", "query_context"]:
+            with self.assertRaises(SupersetSecurityException):
+                security_manager.raise_for_access(
+                    **{
+                        kwarg: Mock(
+                            datasource=self.datasource,
+                            form_data={
+                                "slice_id": self.chart.id,
+                            },
+                        )
+                    }
+                )
+
+    def test_raise_for_access__no_chart_in_form_data(self):
+        g.user = self.authorized_guest
+        for kwarg in ["viz", "query_context"]:
+            with self.assertRaises(SupersetSecurityException):
+                security_manager.raise_for_access(
+                    **{
+                        kwarg: Mock(
+                            datasource=self.datasource,
+                            form_data={
+                                "dashboardId": self.dash.id,
+                            },
+                        )
+                    }
+                )
+
+    def test_raise_for_access__chart_not_on_dashboard(self):
+        g.user = self.authorized_guest
+        for kwarg in ["viz", "query_context"]:
+            with self.assertRaises(SupersetSecurityException):
+                security_manager.raise_for_access(
+                    **{
+                        kwarg: Mock(
+                            datasource=self.other_datasource,
+                            form_data={
+                                "dashboardId": self.dash.id,
+                                "slice_id": self.other_chart.id,
+                            },
+                        )
+                    }
+                )
+
+    def test_raise_for_access__chart_doesnt_belong_to_datasource(self):
+        g.user = self.authorized_guest
+        for kwarg in ["viz", "query_context"]:
+            with self.assertRaises(SupersetSecurityException):
+                security_manager.raise_for_access(
+                    **{
+                        kwarg: Mock(
+                            datasource=self.other_datasource,
+                            form_data={
+                                "dashboardId": self.dash.id,
+                                "slice_id": self.chart.id,
+                            },
+                        )
+                    }
+                )
+
+    def test_raise_for_access__native_filter_no_id_in_form_data(self):
+        g.user = self.authorized_guest
+        for kwarg in ["viz", "query_context"]:
+            with self.assertRaises(SupersetSecurityException):
+                security_manager.raise_for_access(
+                    **{
+                        kwarg: Mock(
+                            datasource=self.native_filter_datasource,
+                            form_data={
+                                "dashboardId": self.dash.id,
+                                "type": "NATIVE_FILTER",
+                            },
+                        )
+                    }
+                )
+
+    def test_raise_for_access__native_filter_datasource_not_associated(self):
+        g.user = self.authorized_guest
+        for kwarg in ["viz", "query_context"]:
+            with self.assertRaises(SupersetSecurityException):
+                security_manager.raise_for_access(
+                    **{
+                        kwarg: Mock(
+                            datasource=self.other_datasource,
+                            form_data={
+                                "dashboardId": self.dash.id,
+                                "native_filter_id": "NATIVE_FILTER-ABCDEFGH",
+                                "type": "NATIVE_FILTER",
+                            },
+                        )
+                    }
+                )
+
+    @patch.dict(
+        "superset.extensions.feature_flag_manager._feature_flags",
+        EMBEDDED_SUPERSET=False,
+    )
+    def test_raise_for_access__embedded_feature_flag_off(self):
+        g.user = self.authorized_guest
+        for kwarg in ["viz", "query_context"]:
+            with self.assertRaises(SupersetSecurityException):
+                security_manager.raise_for_access(
+                    **{
+                        kwarg: Mock(
+                            datasource=self.datasource,
+                            form_data={
+                                "dashboardId": self.dash.id,
+                                "slice_id": self.chart.id,
+                            },
+                        )
+                    }
+                )
+
+    def test_raise_for_access__unauthorized_guest_user(self):
+        g.user = self.unauthorized_guest
+        for kwarg in ["viz", "query_context"]:
+            with self.assertRaises(SupersetSecurityException):
+                security_manager.raise_for_access(
+                    **{
+                        kwarg: Mock(
+                            datasource=self.datasource,
+                            form_data={
+                                "dashboardId": self.dash.id,
+                                "slice_id": self.chart.id,
+                            },
+                        )
+                    }
+                )
diff --git a/tests/integration_tests/security_tests.py b/tests/integration_tests/security_tests.py
index 90be0edd17..9eaabf3680 100644
--- a/tests/integration_tests/security_tests.py
+++ b/tests/integration_tests/security_tests.py
@@ -1649,7 +1649,7 @@ class TestSecurityManager(SupersetTestCase):
     def test_raise_for_access_query_context(
         self, mock_can_access_schema, mock_can_access, mock_is_owner, mock_g
     ):
-        query_context = Mock(datasource=self.get_datasource_mock())
+        query_context = Mock(datasource=self.get_datasource_mock(), form_data={})
 
         mock_can_access_schema.return_value = True
         security_manager.raise_for_access(query_context=query_context)


[superset] 08/11: fix(sqllab): rendering performance regression by resultset (#25091)

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 3.0
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 429ff9b0f8625724e9dae967f604cad89c25ce97
Author: JUST.in DO IT <ju...@airbnb.com>
AuthorDate: Mon Aug 28 12:27:18 2023 -0700

    fix(sqllab): rendering performance regression by resultset (#25091)
---
 .../{SouthPane.test.jsx => SouthPane.test.tsx}     |  9 ++--
 .../src/SqlLab/components/SouthPane/index.tsx      | 56 ++++++++++++----------
 .../SqlLab/components/SqlEditor/SqlEditor.test.jsx | 24 +++++++---
 3 files changed, 56 insertions(+), 33 deletions(-)

diff --git a/superset-frontend/src/SqlLab/components/SouthPane/SouthPane.test.jsx b/superset-frontend/src/SqlLab/components/SouthPane/SouthPane.test.tsx
similarity index 93%
rename from superset-frontend/src/SqlLab/components/SouthPane/SouthPane.test.jsx
rename to superset-frontend/src/SqlLab/components/SouthPane/SouthPane.test.tsx
index 276d8eea66..80a102ff21 100644
--- a/superset-frontend/src/SqlLab/components/SouthPane/SouthPane.test.jsx
+++ b/superset-frontend/src/SqlLab/components/SouthPane/SouthPane.test.tsx
@@ -20,11 +20,12 @@ import React from 'react';
 import configureStore from 'redux-mock-store';
 import thunk from 'redux-thunk';
 import { render, screen, waitFor } from 'spec/helpers/testing-library';
-import SouthPane from 'src/SqlLab/components/SouthPane';
+import SouthPane, { SouthPaneProps } from 'src/SqlLab/components/SouthPane';
 import '@testing-library/jest-dom/extend-expect';
 import { STATUS_OPTIONS } from 'src/SqlLab/constants';
 import { initialState, table, defaultQueryEditor } from 'src/SqlLab/fixtures';
 import { denormalizeTimestamp } from '@superset-ui/core';
+import { Store } from 'redux';
 
 const mockedProps = {
   queryEditorId: defaultQueryEditor.id,
@@ -42,6 +43,8 @@ const mockedEmptyProps = {
   defaultQueryLimit: 100,
 };
 
+jest.mock('src/SqlLab/components/SqlEditorLeftBar', () => jest.fn());
+
 const latestQueryProgressMsg = 'LATEST QUERY MESSAGE - LCly_kkIN';
 
 const middlewares = [thunk];
@@ -100,14 +103,14 @@ const store = mockStore({
     },
   },
 });
-const setup = (props, store) =>
+const setup = (props: SouthPaneProps, store: Store) =>
   render(<SouthPane {...props} />, {
     useRedux: true,
     ...(store && { store }),
   });
 
 describe('SouthPane', () => {
-  const renderAndWait = (props, store) =>
+  const renderAndWait = (props: SouthPaneProps, store: Store) =>
     waitFor(async () => setup(props, store));
 
   it('Renders an empty state for results', async () => {
diff --git a/superset-frontend/src/SqlLab/components/SouthPane/index.tsx b/superset-frontend/src/SqlLab/components/SouthPane/index.tsx
index c2b0cc3beb..621b5762a8 100644
--- a/superset-frontend/src/SqlLab/components/SouthPane/index.tsx
+++ b/superset-frontend/src/SqlLab/components/SouthPane/index.tsx
@@ -16,8 +16,8 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-import React, { createRef } from 'react';
-import { useDispatch, useSelector } from 'react-redux';
+import React, { createRef, useMemo } from 'react';
+import { shallowEqual, useDispatch, useSelector } from 'react-redux';
 import shortid from 'shortid';
 import Alert from 'src/components/Alert';
 import Tabs from 'src/components/Tabs';
@@ -105,11 +105,29 @@ const SouthPane = ({
   defaultQueryLimit,
 }: SouthPaneProps) => {
   const dispatch = useDispatch();
-
-  const { editorQueries, dataPreviewQueries, databases, offline, user } =
-    useSelector(({ sqlLab }: SqlLabRootState) => {
-      const { databases, offline, user, queries, tables } = sqlLab;
-      const dataPreviewQueries = tables
+  const user = useSelector(
+    ({ sqlLab }: SqlLabRootState) => sqlLab.user,
+    shallowEqual,
+  );
+  const { databases, offline, queries, tables } = useSelector(
+    ({ sqlLab: { databases, offline, queries, tables } }: SqlLabRootState) => ({
+      databases,
+      offline,
+      queries,
+      tables,
+    }),
+    shallowEqual,
+  );
+  const editorQueries = useMemo(
+    () =>
+      Object.values(queries).filter(
+        ({ sqlEditorId }) => sqlEditorId === queryEditorId,
+      ),
+    [queries, queryEditorId],
+  );
+  const dataPreviewQueries = useMemo(
+    () =>
+      tables
         .filter(
           ({ dataPreviewQueryId, queryEditorId: qeId }) =>
             dataPreviewQueryId &&
@@ -119,18 +137,13 @@ const SouthPane = ({
         .map(({ name, dataPreviewQueryId }) => ({
           ...queries[dataPreviewQueryId],
           tableName: name,
-        }));
-      const editorQueries = Object.values(queries).filter(
-        ({ sqlEditorId }) => sqlEditorId === queryEditorId,
-      );
-      return {
-        editorQueries,
-        dataPreviewQueries,
-        databases,
-        offline: offline ?? false,
-        user,
-      };
-    });
+        })),
+    [queries, queryEditorId, tables],
+  );
+  const latestQuery = useMemo(
+    () => editorQueries.find(({ id }) => id === latestQueryId),
+    [editorQueries, latestQueryId],
+  );
 
   const activeSouthPaneTab =
     useSelector<SqlLabRootState, string>(
@@ -148,11 +161,6 @@ const SouthPane = ({
   );
 
   const renderResults = () => {
-    let latestQuery;
-    if (editorQueries.length > 0) {
-      // get the latest query
-      latestQuery = editorQueries.find(({ id }) => id === latestQueryId);
-    }
     let results;
     if (latestQuery) {
       if (latestQuery?.extra?.errors) {
diff --git a/superset-frontend/src/SqlLab/components/SqlEditor/SqlEditor.test.jsx b/superset-frontend/src/SqlLab/components/SqlEditor/SqlEditor.test.jsx
index ed3f3c9de2..23424ff264 100644
--- a/superset-frontend/src/SqlLab/components/SqlEditor/SqlEditor.test.jsx
+++ b/superset-frontend/src/SqlLab/components/SqlEditor/SqlEditor.test.jsx
@@ -30,6 +30,7 @@ import {
   defaultQueryEditor,
 } from 'src/SqlLab/fixtures';
 import SqlEditorLeftBar from 'src/SqlLab/components/SqlEditorLeftBar';
+import ResultSet from 'src/SqlLab/components/ResultSet';
 import { api } from 'src/hooks/apiResources/queryApi';
 import { getExtensionsRegistry } from '@superset-ui/core';
 import setupExtensions from 'src/setup/setupExtensions';
@@ -46,6 +47,7 @@ jest.mock('src/components/AsyncAceEditor', () => ({
   ),
 }));
 jest.mock('src/SqlLab/components/SqlEditorLeftBar', () => jest.fn());
+jest.mock('src/SqlLab/components/ResultSet', () => jest.fn());
 
 fetchMock.get('glob:*/api/v1/database/*/function_names/', {
   function_names: [],
@@ -56,10 +58,17 @@ fetchMock.post('glob:*/sqllab/execute/*', { result: [] });
 
 let store;
 let actions;
+const latestQuery = {
+  ...queries[0],
+  sqlEditorId: defaultQueryEditor.id,
+};
 const mockInitialState = {
   ...initialState,
   sqlLab: {
     ...initialState.sqlLab,
+    queries: {
+      [latestQuery.id]: { ...latestQuery, startDttm: new Date().getTime() },
+    },
     databases: {
       1991: {
         allow_ctas: false,
@@ -77,6 +86,7 @@ const mockInitialState = {
     unsavedQueryEditor: {
       id: defaultQueryEditor.id,
       dbId: 1991,
+      latestQueryId: latestQuery.id,
     },
   },
 };
@@ -107,7 +117,6 @@ const createStore = initState =>
 describe('SqlEditor', () => {
   const mockedProps = {
     queryEditor: initialState.sqlLab.queryEditors[0],
-    latestQuery: queries[0],
     tables: [table],
     getHeight: () => '100px',
     editorQueries: [],
@@ -125,6 +134,8 @@ describe('SqlEditor', () => {
     SqlEditorLeftBar.mockImplementation(() => (
       <div data-test="mock-sql-editor-left-bar" />
     ));
+    ResultSet.mockClear();
+    ResultSet.mockImplementation(() => <div data-test="mock-result-set" />);
   });
 
   afterEach(() => {
@@ -153,15 +164,18 @@ describe('SqlEditor', () => {
     expect(await findByTestId('react-ace')).toBeInTheDocument();
   });
 
-  it('avoids rerendering EditorLeftBar while typing', async () => {
+  it('avoids rerendering EditorLeftBar and ResultSet while typing', async () => {
     const { findByTestId } = setup(mockedProps, store);
     const editor = await findByTestId('react-ace');
     const sql = 'select *';
     const renderCount = SqlEditorLeftBar.mock.calls.length;
+    const renderCountForSouthPane = ResultSet.mock.calls.length;
     expect(SqlEditorLeftBar).toHaveBeenCalledTimes(renderCount);
+    expect(ResultSet).toHaveBeenCalledTimes(renderCountForSouthPane);
     fireEvent.change(editor, { target: { value: sql } });
     // Verify the rendering regression
     expect(SqlEditorLeftBar).toHaveBeenCalledTimes(renderCount);
+    expect(ResultSet).toHaveBeenCalledTimes(renderCountForSouthPane);
   });
 
   it('renders sql from unsaved change', async () => {
@@ -198,10 +212,8 @@ describe('SqlEditor', () => {
   });
 
   it('render a SouthPane', async () => {
-    const { findByText } = setup(mockedProps, store);
-    expect(
-      await findByText(/run a query to display results/i),
-    ).toBeInTheDocument();
+    const { findByTestId } = setup(mockedProps, store);
+    expect(await findByTestId('mock-result-set')).toBeInTheDocument();
   });
 
   it('runs query action with ctas false', async () => {