You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@superset.apache.org by mi...@apache.org on 2023/10/13 12:15:09 UTC

[superset] branch 3.0 updated (69c2378747 -> cd1b7a4c06)

This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a change to branch 3.0
in repository https://gitbox.apache.org/repos/asf/superset.git


    from 69c2378747 fix(RLS): Fix Info Tooltip + Button Alignment on RLS Modal (#25400)
     new 53b84b9664 fix: thubmnails loading - Talisman default config (#25486)
     new 254cc36b17 fix(Presto): catch DatabaseError when testing Presto views (#25559)
     new 732c5b1f08 fix(Charts): Set max row limit + removed the option to use an empty row limit value (#25579)
     new a0b2dc4266 fix(window): unavailable localStorage and sessionStorage (#25599)
     new c44f1a3299 fix: finestTemporalGrainFormatter (#25618)
     new cd1b7a4c06 fix: revert fix(sqllab): Force trino client async execution (#24859) (#25541)

The 6 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../src/shared-controls/sharedControls.tsx         |   9 +-
 .../superset-ui-core/src/validator/index.ts        |   1 +
 .../src/validator/validateMaxValue.ts              |   8 ++
 ...ateInteger.test.ts => validateMaxValue.test.ts} |  22 ++---
 .../src/SqlLab/reducers/getInitialState.js         | 102 +++++++++++----------
 superset-frontend/src/SqlLab/reducers/sqlLab.js    |  49 ++++++----
 .../explore/components/DatasourcePanel/index.tsx   |  14 ++-
 .../src/explore/components/SaveModal.tsx           |  19 +++-
 .../components/Select/SelectFilterPlugin.tsx       |   4 +-
 superset-frontend/src/hooks/useTabId.ts            |  23 ++++-
 superset/config.py                                 |   4 +-
 superset/db_engine_specs/base.py                   |  18 ----
 superset/db_engine_specs/presto.py                 |   6 +-
 superset/db_engine_specs/trino.py                  |  66 ++-----------
 superset/sql_lab.py                                |   7 +-
 .../db_engine_specs/presto_tests.py                |   4 +-
 tests/unit_tests/db_engine_specs/test_trino.py     |  31 +------
 tests/unit_tests/sql_lab_test.py                   |  10 +-
 18 files changed, 183 insertions(+), 214 deletions(-)
 create mode 100644 superset-frontend/packages/superset-ui-core/src/validator/validateMaxValue.ts
 copy superset-frontend/packages/superset-ui-core/test/validator/{validateInteger.test.ts => validateMaxValue.test.ts} (63%)


[superset] 03/06: fix(Charts): Set max row limit + removed the option to use an empty row limit value (#25579)

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 3.0
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 732c5b1f0808a1eef95c0faaa0e8fc0366cae84b
Author: Corbin Bullard <co...@gmail.com>
AuthorDate: Wed Oct 11 13:31:21 2023 -0400

    fix(Charts): Set max row limit + removed the option to use an empty row limit value (#25579)
    
    (cherry picked from commit f556ef53f3177746ec2526b4b963da4ef00c2d58)
---
 .../src/shared-controls/sharedControls.tsx         |  9 +++++++-
 .../superset-ui-core/src/validator/index.ts        |  1 +
 .../src/validator/validateMaxValue.ts              |  8 ++++++++
 .../validator/validateMaxValue.test.ts}            | 24 +++++++++++++++++-----
 4 files changed, 36 insertions(+), 6 deletions(-)

diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/sharedControls.tsx b/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/sharedControls.tsx
index abf5153bb0..69fa8a6864 100644
--- a/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/sharedControls.tsx
+++ b/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/sharedControls.tsx
@@ -47,6 +47,8 @@ import {
   isDefined,
   hasGenericChartAxes,
   NO_TIME_RANGE,
+  validateNonEmpty,
+  validateMaxValue,
 } from '@superset-ui/core';
 
 import {
@@ -245,7 +247,12 @@ const row_limit: SharedControlConfig<'SelectControl'> = {
   type: 'SelectControl',
   freeForm: true,
   label: t('Row limit'),
-  validators: [legacyValidateInteger],
+  clearable: false,
+  validators: [
+    validateNonEmpty,
+    legacyValidateInteger,
+    v => validateMaxValue(v, 100000),
+  ],
   default: 10000,
   choices: formatSelectOptions(ROW_LIMIT_OPTIONS),
   description: t('Limits the number of rows that get displayed.'),
diff --git a/superset-frontend/packages/superset-ui-core/src/validator/index.ts b/superset-frontend/packages/superset-ui-core/src/validator/index.ts
index 532efcc959..fb37328c02 100644
--- a/superset-frontend/packages/superset-ui-core/src/validator/index.ts
+++ b/superset-frontend/packages/superset-ui-core/src/validator/index.ts
@@ -22,3 +22,4 @@ export { default as legacyValidateNumber } from './legacyValidateNumber';
 export { default as validateInteger } from './validateInteger';
 export { default as validateNumber } from './validateNumber';
 export { default as validateNonEmpty } from './validateNonEmpty';
+export { default as validateMaxValue } from './validateMaxValue';
diff --git a/superset-frontend/packages/superset-ui-core/src/validator/validateMaxValue.ts b/superset-frontend/packages/superset-ui-core/src/validator/validateMaxValue.ts
new file mode 100644
index 0000000000..24c1da1c79
--- /dev/null
+++ b/superset-frontend/packages/superset-ui-core/src/validator/validateMaxValue.ts
@@ -0,0 +1,8 @@
+import { t } from '../translation';
+
+export default function validateMaxValue(v: unknown, max: Number) {
+  if (Number(v) > +max) {
+    return t('Value cannot exceed %s', max);
+  }
+  return false;
+}
diff --git a/superset-frontend/packages/superset-ui-core/src/validator/index.ts b/superset-frontend/packages/superset-ui-core/test/validator/validateMaxValue.test.ts
similarity index 52%
copy from superset-frontend/packages/superset-ui-core/src/validator/index.ts
copy to superset-frontend/packages/superset-ui-core/test/validator/validateMaxValue.test.ts
index 532efcc959..70f3d332c5 100644
--- a/superset-frontend/packages/superset-ui-core/src/validator/index.ts
+++ b/superset-frontend/packages/superset-ui-core/test/validator/validateMaxValue.test.ts
@@ -17,8 +17,22 @@
  * under the License.
  */
 
-export { default as legacyValidateInteger } from './legacyValidateInteger';
-export { default as legacyValidateNumber } from './legacyValidateNumber';
-export { default as validateInteger } from './validateInteger';
-export { default as validateNumber } from './validateNumber';
-export { default as validateNonEmpty } from './validateNonEmpty';
+import { validateMaxValue } from '@superset-ui/core';
+import './setup';
+
+describe('validateInteger()', () => {
+  it('returns the warning message if invalid', () => {
+    expect(validateMaxValue(10.1, 10)).toBeTruthy();
+    expect(validateMaxValue(1, 0)).toBeTruthy();
+    expect(validateMaxValue('2', 1)).toBeTruthy();
+  });
+  it('returns false if the input is valid', () => {
+    expect(validateMaxValue(0, 1)).toBeFalsy();
+    expect(validateMaxValue(10, 10)).toBeFalsy();
+    expect(validateMaxValue(undefined, 1)).toBeFalsy();
+    expect(validateMaxValue(NaN, NaN)).toBeFalsy();
+    expect(validateMaxValue(null, 1)).toBeFalsy();
+    expect(validateMaxValue('1', 1)).toBeFalsy();
+    expect(validateMaxValue('a', 1)).toBeFalsy();
+  });
+});


[superset] 04/06: fix(window): unavailable localStorage and sessionStorage (#25599)

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 3.0
in repository https://gitbox.apache.org/repos/asf/superset.git

commit a0b2dc4266ea834ab397d463580cc3856d01fe4d
Author: Fabien <18...@users.noreply.github.com>
AuthorDate: Wed Oct 11 19:31:30 2023 +0200

    fix(window): unavailable localStorage and sessionStorage (#25599)
---
 .../src/SqlLab/reducers/getInitialState.js         | 102 +++++++++++----------
 superset-frontend/src/SqlLab/reducers/sqlLab.js    |  49 ++++++----
 .../explore/components/DatasourcePanel/index.tsx   |  14 ++-
 .../src/explore/components/SaveModal.tsx           |  19 +++-
 superset-frontend/src/hooks/useTabId.ts            |  23 ++++-
 5 files changed, 127 insertions(+), 80 deletions(-)

diff --git a/superset-frontend/src/SqlLab/reducers/getInitialState.js b/superset-frontend/src/SqlLab/reducers/getInitialState.js
index dd24054e11..00b0128148 100644
--- a/superset-frontend/src/SqlLab/reducers/getInitialState.js
+++ b/superset-frontend/src/SqlLab/reducers/getInitialState.js
@@ -137,57 +137,61 @@ export default function getInitialState({
 
   const queries = { ...queries_ };
 
-  /**
-   * If the `SQLLAB_BACKEND_PERSISTENCE` feature flag is off, or if the user
-   * hasn't used SQL Lab after it has been turned on, the state will be stored
-   * in the browser's local storage.
-   */
-  if (
-    localStorage.getItem('redux') &&
-    JSON.parse(localStorage.getItem('redux')).sqlLab
-  ) {
-    const { sqlLab } = JSON.parse(localStorage.getItem('redux'));
+  try {
+    /**
+     * If the `SQLLAB_BACKEND_PERSISTENCE` feature flag is off, or if the user
+     * hasn't used SQL Lab after it has been turned on, the state will be stored
+     * in the browser's local storage.
+     */
+    if (
+      localStorage.getItem('redux') &&
+      JSON.parse(localStorage.getItem('redux')).sqlLab
+    ) {
+      const { sqlLab } = JSON.parse(localStorage.getItem('redux'));
 
-    if (sqlLab.queryEditors.length === 0) {
-      // migration was successful
-      localStorage.removeItem('redux');
-    } else {
-      unsavedQueryEditor = sqlLab.unsavedQueryEditor || {};
-      // add query editors and tables to state with a special flag so they can
-      // be migrated if the `SQLLAB_BACKEND_PERSISTENCE` feature flag is on
-      sqlLab.queryEditors.forEach(qe => {
-        queryEditors = {
-          ...queryEditors,
-          [qe.id]: {
-            ...queryEditors[qe.id],
-            ...qe,
-            name: qe.title || qe.name,
-            ...(unsavedQueryEditor.id === qe.id && unsavedQueryEditor),
-            inLocalStorage: true,
-            loaded: true,
-          },
-        };
-      });
-      const expandedTables = new Set();
-      tables = sqlLab.tables.reduce((merged, table) => {
-        const expanded = !expandedTables.has(table.queryEditorId);
-        if (expanded) {
-          expandedTables.add(table.queryEditorId);
-        }
-        return {
-          ...merged,
-          [table.id]: {
-            ...tables[table.id],
-            ...table,
-            expanded,
-          },
-        };
-      }, tables);
-      Object.values(sqlLab.queries).forEach(query => {
-        queries[query.id] = { ...query, inLocalStorage: true };
-      });
-      tabHistory.push(...sqlLab.tabHistory);
+      if (sqlLab.queryEditors.length === 0) {
+        // migration was successful
+        localStorage.removeItem('redux');
+      } else {
+        unsavedQueryEditor = sqlLab.unsavedQueryEditor || {};
+        // add query editors and tables to state with a special flag so they can
+        // be migrated if the `SQLLAB_BACKEND_PERSISTENCE` feature flag is on
+        sqlLab.queryEditors.forEach(qe => {
+          queryEditors = {
+            ...queryEditors,
+            [qe.id]: {
+              ...queryEditors[qe.id],
+              ...qe,
+              name: qe.title || qe.name,
+              ...(unsavedQueryEditor.id === qe.id && unsavedQueryEditor),
+              inLocalStorage: true,
+              loaded: true,
+            },
+          };
+        });
+        const expandedTables = new Set();
+        tables = sqlLab.tables.reduce((merged, table) => {
+          const expanded = !expandedTables.has(table.queryEditorId);
+          if (expanded) {
+            expandedTables.add(table.queryEditorId);
+          }
+          return {
+            ...merged,
+            [table.id]: {
+              ...tables[table.id],
+              ...table,
+              expanded,
+            },
+          };
+        }, tables);
+        Object.values(sqlLab.queries).forEach(query => {
+          queries[query.id] = { ...query, inLocalStorage: true };
+        });
+        tabHistory.push(...sqlLab.tabHistory);
+      }
     }
+  } catch (error) {
+    // continue regardless of error
   }
 
   return {
diff --git a/superset-frontend/src/SqlLab/reducers/sqlLab.js b/superset-frontend/src/SqlLab/reducers/sqlLab.js
index 66850ce079..e6e0a54ed9 100644
--- a/superset-frontend/src/SqlLab/reducers/sqlLab.js
+++ b/superset-frontend/src/SqlLab/reducers/sqlLab.js
@@ -424,13 +424,16 @@ export default function sqlLabReducer(state = {}, action) {
       return { ...state, activeSouthPaneTab: action.tabId };
     },
     [actions.MIGRATE_QUERY_EDITOR]() {
-      // remove migrated query editor from localStorage
-      const { sqlLab } = JSON.parse(localStorage.getItem('redux'));
-      sqlLab.queryEditors = sqlLab.queryEditors.filter(
-        qe => qe.id !== action.oldQueryEditor.id,
-      );
-      localStorage.setItem('redux', JSON.stringify({ sqlLab }));
-
+      try {
+        // remove migrated query editor from localStorage
+        const { sqlLab } = JSON.parse(localStorage.getItem('redux'));
+        sqlLab.queryEditors = sqlLab.queryEditors.filter(
+          qe => qe.id !== action.oldQueryEditor.id,
+        );
+        localStorage.setItem('redux', JSON.stringify({ sqlLab }));
+      } catch (error) {
+        // continue regardless of error
+      }
       // replace localStorage query editor with the server backed one
       return addToArr(
         removeFromArr(state, 'queryEditors', action.oldQueryEditor),
@@ -439,12 +442,16 @@ export default function sqlLabReducer(state = {}, action) {
       );
     },
     [actions.MIGRATE_TABLE]() {
-      // remove migrated table from localStorage
-      const { sqlLab } = JSON.parse(localStorage.getItem('redux'));
-      sqlLab.tables = sqlLab.tables.filter(
-        table => table.id !== action.oldTable.id,
-      );
-      localStorage.setItem('redux', JSON.stringify({ sqlLab }));
+      try {
+        // remove migrated table from localStorage
+        const { sqlLab } = JSON.parse(localStorage.getItem('redux'));
+        sqlLab.tables = sqlLab.tables.filter(
+          table => table.id !== action.oldTable.id,
+        );
+        localStorage.setItem('redux', JSON.stringify({ sqlLab }));
+      } catch (error) {
+        // continue regardless of error
+      }
 
       // replace localStorage table with the server backed one
       return addToArr(
@@ -454,12 +461,16 @@ export default function sqlLabReducer(state = {}, action) {
       );
     },
     [actions.MIGRATE_TAB_HISTORY]() {
-      // remove migrated tab from localStorage tabHistory
-      const { sqlLab } = JSON.parse(localStorage.getItem('redux'));
-      sqlLab.tabHistory = sqlLab.tabHistory.filter(
-        tabId => tabId !== action.oldId,
-      );
-      localStorage.setItem('redux', JSON.stringify({ sqlLab }));
+      try {
+        // remove migrated tab from localStorage tabHistory
+        const { sqlLab } = JSON.parse(localStorage.getItem('redux'));
+        sqlLab.tabHistory = sqlLab.tabHistory.filter(
+          tabId => tabId !== action.oldId,
+        );
+        localStorage.setItem('redux', JSON.stringify({ sqlLab }));
+      } catch (error) {
+        // continue regardless of error
+      }
       const tabHistory = state.tabHistory.filter(
         tabId => tabId !== action.oldId,
       );
diff --git a/superset-frontend/src/explore/components/DatasourcePanel/index.tsx b/superset-frontend/src/explore/components/DatasourcePanel/index.tsx
index 1d85c8235f..80ed37a301 100644
--- a/superset-frontend/src/explore/components/DatasourcePanel/index.tsx
+++ b/superset-frontend/src/explore/components/DatasourcePanel/index.tsx
@@ -336,7 +336,11 @@ export default function DataSourcePanel({
   );
 
   const showInfoboxCheck = () => {
-    if (sessionStorage.getItem('showInfobox') === 'false') return false;
+    try {
+      if (sessionStorage.getItem('showInfobox') === 'false') return false;
+    } catch (error) {
+      // continue regardless of error
+    }
     return true;
   };
 
@@ -366,7 +370,13 @@ export default function DataSourcePanel({
             <StyledInfoboxWrapper>
               <Alert
                 closable
-                onClose={() => sessionStorage.setItem('showInfobox', 'false')}
+                onClose={() => {
+                  try {
+                    sessionStorage.setItem('showInfobox', 'false');
+                  } catch (error) {
+                    // continue regardless of error
+                  }
+                }}
                 type="info"
                 message=""
                 description={
diff --git a/superset-frontend/src/explore/components/SaveModal.tsx b/superset-frontend/src/explore/components/SaveModal.tsx
index 86ff27bb42..d390e46388 100644
--- a/superset-frontend/src/explore/components/SaveModal.tsx
+++ b/superset-frontend/src/explore/components/SaveModal.tsx
@@ -119,7 +119,12 @@ class SaveModal extends React.Component<SaveModalProps, SaveModalState> {
   async componentDidMount() {
     let { dashboardId } = this.props;
     if (!dashboardId) {
-      const lastDashboard = sessionStorage.getItem(SK_DASHBOARD_ID);
+      let lastDashboard = null;
+      try {
+        lastDashboard = sessionStorage.getItem(SK_DASHBOARD_ID);
+      } catch (error) {
+        // continue regardless of error
+      }
       dashboardId = lastDashboard && parseInt(lastDashboard, 10);
     }
     if (dashboardId) {
@@ -249,10 +254,14 @@ class SaveModal extends React.Component<SaveModalProps, SaveModalState> {
         );
       }
 
-      if (dashboard) {
-        sessionStorage.setItem(SK_DASHBOARD_ID, `${dashboard.id}`);
-      } else {
-        sessionStorage.removeItem(SK_DASHBOARD_ID);
+      try {
+        if (dashboard) {
+          sessionStorage.setItem(SK_DASHBOARD_ID, `${dashboard.id}`);
+        } else {
+          sessionStorage.removeItem(SK_DASHBOARD_ID);
+        }
+      } catch (error) {
+        // continue regardless of error
       }
 
       // Go to new dashboard url
diff --git a/superset-frontend/src/hooks/useTabId.ts b/superset-frontend/src/hooks/useTabId.ts
index 4f60763c88..56857cca64 100644
--- a/superset-frontend/src/hooks/useTabId.ts
+++ b/superset-frontend/src/hooks/useTabId.ts
@@ -49,16 +49,29 @@ export function useTabId() {
     }
 
     const updateTabId = () => {
-      const lastTabId = window.localStorage.getItem('last_tab_id');
+      let lastTabId;
+      try {
+        lastTabId = window.localStorage.getItem('last_tab_id');
+      } catch (error) {
+        // continue regardless of error
+      }
       const newTabId = String(
         lastTabId ? Number.parseInt(lastTabId, 10) + 1 : 1,
       );
-      window.sessionStorage.setItem('tab_id', newTabId);
-      window.localStorage.setItem('last_tab_id', newTabId);
+      try {
+        window.sessionStorage.setItem('tab_id', newTabId);
+        window.localStorage.setItem('last_tab_id', newTabId);
+      } catch (error) {
+        // continue regardless of error
+      }
       setTabId(newTabId);
     };
-
-    const storedTabId = window.sessionStorage.getItem('tab_id');
+    let storedTabId;
+    try {
+      storedTabId = window.sessionStorage.getItem('tab_id');
+    } catch (error) {
+      // continue regardless of error
+    }
     if (storedTabId) {
       channel.postMessage({
         type: 'REQUESTING_TAB_ID',


[superset] 01/06: fix: thubmnails loading - Talisman default config (#25486)

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 3.0
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 53b84b9664e9d4586366ff5bb62f34ca0e0fd62a
Author: Igor Khrol <kh...@gmail.com>
AuthorDate: Wed Oct 11 20:30:09 2023 +0300

    fix: thubmnails loading - Talisman default config (#25486)
    
    (cherry picked from commit 52f631a038dae9d353bae6e0f4cde1f96b1899f1)
---
 superset/config.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/superset/config.py b/superset/config.py
index bda7d0e5f0..4233799d0d 100644
--- a/superset/config.py
+++ b/superset/config.py
@@ -1407,7 +1407,7 @@ TALISMAN_ENABLED = utils.cast_to_boolean(os.environ.get("TALISMAN_ENABLED", True
 TALISMAN_CONFIG = {
     "content_security_policy": {
         "default-src": ["'self'"],
-        "img-src": ["'self'", "data:"],
+        "img-src": ["'self'", "blob:", "data:"],
         "worker-src": ["'self'", "blob:"],
         "connect-src": [
             "'self'",
@@ -1429,7 +1429,7 @@ TALISMAN_CONFIG = {
 TALISMAN_DEV_CONFIG = {
     "content_security_policy": {
         "default-src": ["'self'"],
-        "img-src": ["'self'", "data:"],
+        "img-src": ["'self'", "blob:", "data:"],
         "worker-src": ["'self'", "blob:"],
         "connect-src": [
             "'self'",


[superset] 06/06: fix: revert fix(sqllab): Force trino client async execution (#24859) (#25541)

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 3.0
in repository https://gitbox.apache.org/repos/asf/superset.git

commit cd1b7a4c06462fb284b0f2e696b0a108ce1834a8
Author: Ville Brofeldt <33...@users.noreply.github.com>
AuthorDate: Fri Oct 13 04:58:20 2023 -0700

    fix: revert fix(sqllab): Force trino client async execution (#24859) (#25541)
    
    (cherry picked from commit e56e0de45880c20b0eb51d84bc7e5b8898f61c94)
---
 superset/db_engine_specs/base.py               | 18 -------
 superset/db_engine_specs/trino.py              | 66 +++-----------------------
 superset/sql_lab.py                            |  7 ++-
 tests/unit_tests/db_engine_specs/test_trino.py | 31 +-----------
 tests/unit_tests/sql_lab_test.py               | 10 ++--
 5 files changed, 18 insertions(+), 114 deletions(-)

diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py
index 6be3ab24b0..5836e6163f 100644
--- a/superset/db_engine_specs/base.py
+++ b/superset/db_engine_specs/base.py
@@ -1053,24 +1053,6 @@ class BaseEngineSpec:  # pylint: disable=too-many-public-methods
         query object"""
         # TODO: Fix circular import error caused by importing sql_lab.Query
 
-    @classmethod
-    def execute_with_cursor(
-        cls, cursor: Any, sql: str, query: Query, session: Session
-    ) -> None:
-        """
-        Trigger execution of a query and handle the resulting cursor.
-
-        For most implementations this just makes calls to `execute` and
-        `handle_cursor` consecutively, but in some engines (e.g. Trino) we may
-        need to handle client limitations such as lack of async support and
-        perform a more complicated operation to get information from the cursor
-        in a timely manner and facilitate operations such as query stop
-        """
-        logger.debug("Query %d: Running query: %s", query.id, sql)
-        cls.execute(cursor, sql, async_=True)
-        logger.debug("Query %d: Handling cursor", query.id)
-        cls.handle_cursor(cursor, query, session)
-
     @classmethod
     def extract_error_message(cls, ex: Exception) -> str:
         return f"{cls.engine} error: {cls._extract_error_message(ex)}"
diff --git a/superset/db_engine_specs/trino.py b/superset/db_engine_specs/trino.py
index f758f1fadd..eff78c4fa4 100644
--- a/superset/db_engine_specs/trino.py
+++ b/superset/db_engine_specs/trino.py
@@ -17,8 +17,6 @@
 from __future__ import annotations
 
 import logging
-import threading
-import time
 from typing import Any, TYPE_CHECKING
 
 import simplejson as json
@@ -156,22 +154,15 @@ class TrinoEngineSpec(PrestoBaseEngineSpec):
 
     @classmethod
     def handle_cursor(cls, cursor: Cursor, query: Query, session: Session) -> None:
-        """
-        Handle a trino client cursor.
-
-        WARNING: if you execute a query, it will block until complete and you
-        will not be able to handle the cursor until complete. Use
-        `execute_with_cursor` instead, to handle this asynchronously.
-        """
-
-        # Adds the executed query id to the extra payload so the query can be cancelled
-        cancel_query_id = cursor.query_id
-        logger.debug("Query %d: queryId %s found in cursor", query.id, cancel_query_id)
-        query.set_extra_json_key(key=QUERY_CANCEL_KEY, value=cancel_query_id)
-
         if tracking_url := cls.get_tracking_url(cursor):
             query.tracking_url = tracking_url
 
+        # Adds the executed query id to the extra payload so the query can be cancelled
+        query.set_extra_json_key(
+            key=QUERY_CANCEL_KEY,
+            value=(cancel_query_id := cursor.stats["queryId"]),
+        )
+
         session.commit()
 
         # if query cancelation was requested prior to the handle_cursor call, but
@@ -185,51 +176,6 @@ class TrinoEngineSpec(PrestoBaseEngineSpec):
 
         super().handle_cursor(cursor=cursor, query=query, session=session)
 
-    @classmethod
-    def execute_with_cursor(
-        cls, cursor: Any, sql: str, query: Query, session: Session
-    ) -> None:
-        """
-        Trigger execution of a query and handle the resulting cursor.
-
-        Trino's client blocks until the query is complete, so we need to run it
-        in another thread and invoke `handle_cursor` to poll for the query ID
-        to appear on the cursor in parallel.
-        """
-        execute_result: dict[str, Any] = {}
-
-        def _execute(results: dict[str, Any]) -> None:
-            logger.debug("Query %d: Running query: %s", query.id, sql)
-
-            # Pass result / exception information back to the parent thread
-            try:
-                cls.execute(cursor, sql)
-                results["complete"] = True
-            except Exception as ex:  # pylint: disable=broad-except
-                results["complete"] = True
-                results["error"] = ex
-
-        execute_thread = threading.Thread(target=_execute, args=(execute_result,))
-        execute_thread.start()
-
-        # Wait for a query ID to be available before handling the cursor, as
-        # it's required by that method; it may never become available on error.
-        while not cursor.query_id and not execute_result.get("complete"):
-            time.sleep(0.1)
-
-        logger.debug("Query %d: Handling cursor", query.id)
-        cls.handle_cursor(cursor, query, session)
-
-        # Block until the query completes; same behaviour as the client itself
-        logger.debug("Query %d: Waiting for query to complete", query.id)
-        while not execute_result.get("complete"):
-            time.sleep(0.5)
-
-        # Unfortunately we'll mangle the stack trace due to the thread, but
-        # throwing the original exception allows mapping database errors as normal
-        if err := execute_result.get("error"):
-            raise err
-
     @classmethod
     def prepare_cancel_query(cls, query: Query, session: Session) -> None:
         if QUERY_CANCEL_KEY not in query.extra:
diff --git a/superset/sql_lab.py b/superset/sql_lab.py
index ca157b3240..afc682b10f 100644
--- a/superset/sql_lab.py
+++ b/superset/sql_lab.py
@@ -191,7 +191,7 @@ def get_sql_results(  # pylint: disable=too-many-arguments
                 return handle_query_error(ex, query, session)
 
 
-def execute_sql_statement(  # pylint: disable=too-many-arguments
+def execute_sql_statement(  # pylint: disable=too-many-arguments,too-many-statements
     sql_statement: str,
     query: Query,
     session: Session,
@@ -271,7 +271,10 @@ def execute_sql_statement(  # pylint: disable=too-many-arguments
             )
         session.commit()
         with stats_timing("sqllab.query.time_executing_query", stats_logger):
-            db_engine_spec.execute_with_cursor(cursor, sql, query, session)
+            logger.debug("Query %d: Running query: %s", query.id, sql)
+            db_engine_spec.execute(cursor, sql, async_=True)
+            logger.debug("Query %d: Handling cursor", query.id)
+            db_engine_spec.handle_cursor(cursor, query, session)
 
         with stats_timing("sqllab.query.time_fetching_results", stats_logger):
             logger.debug(
diff --git a/tests/unit_tests/db_engine_specs/test_trino.py b/tests/unit_tests/db_engine_specs/test_trino.py
index 1b50a683a0..963953d18b 100644
--- a/tests/unit_tests/db_engine_specs/test_trino.py
+++ b/tests/unit_tests/db_engine_specs/test_trino.py
@@ -352,7 +352,7 @@ def test_handle_cursor_early_cancel(
     query_id = "myQueryId"
 
     cursor_mock = engine_mock.return_value.__enter__.return_value
-    cursor_mock.query_id = query_id
+    cursor_mock.stats = {"queryId": query_id}
     session_mock = mocker.MagicMock()
 
     query = Query()
@@ -366,32 +366,3 @@ def test_handle_cursor_early_cancel(
         assert cancel_query_mock.call_args[1]["cancel_query_id"] == query_id
     else:
         assert cancel_query_mock.call_args is None
-
-
-def test_execute_with_cursor_in_parallel(mocker: MockerFixture):
-    """Test that `execute_with_cursor` fetches query ID from the cursor"""
-    from superset.db_engine_specs.trino import TrinoEngineSpec
-
-    query_id = "myQueryId"
-
-    mock_cursor = mocker.MagicMock()
-    mock_cursor.query_id = None
-
-    mock_query = mocker.MagicMock()
-    mock_session = mocker.MagicMock()
-
-    def _mock_execute(*args, **kwargs):
-        mock_cursor.query_id = query_id
-
-    mock_cursor.execute.side_effect = _mock_execute
-
-    TrinoEngineSpec.execute_with_cursor(
-        cursor=mock_cursor,
-        sql="SELECT 1 FROM foo",
-        query=mock_query,
-        session=mock_session,
-    )
-
-    mock_query.set_extra_json_key.assert_called_once_with(
-        key=QUERY_CANCEL_KEY, value=query_id
-    )
diff --git a/tests/unit_tests/sql_lab_test.py b/tests/unit_tests/sql_lab_test.py
index edc1fd2ec4..29f45eab68 100644
--- a/tests/unit_tests/sql_lab_test.py
+++ b/tests/unit_tests/sql_lab_test.py
@@ -55,8 +55,8 @@ def test_execute_sql_statement(mocker: MockerFixture, app: None) -> None:
     )
 
     database.apply_limit_to_sql.assert_called_with("SELECT 42 AS answer", 2, force=True)
-    db_engine_spec.execute_with_cursor.assert_called_with(
-        cursor, "SELECT 42 AS answer LIMIT 2", query, session
+    db_engine_spec.execute.assert_called_with(
+        cursor, "SELECT 42 AS answer LIMIT 2", async_=True
     )
     SupersetResultSet.assert_called_with([(42,)], cursor.description, db_engine_spec)
 
@@ -106,8 +106,10 @@ def test_execute_sql_statement_with_rls(
         101,
         force=True,
     )
-    db_engine_spec.execute_with_cursor.assert_called_with(
-        cursor, "SELECT * FROM sales WHERE organization_id=42 LIMIT 101", query, session
+    db_engine_spec.execute.assert_called_with(
+        cursor,
+        "SELECT * FROM sales WHERE organization_id=42 LIMIT 101",
+        async_=True,
     )
     SupersetResultSet.assert_called_with([(42,)], cursor.description, db_engine_spec)
 


[superset] 05/06: fix: finestTemporalGrainFormatter (#25618)

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 3.0
in repository https://gitbox.apache.org/repos/asf/superset.git

commit c44f1a32993c32fcf8fff1b3bab9fe60c72d086d
Author: Beto Dealmeida <ro...@dealmeida.net>
AuthorDate: Wed Oct 11 15:57:23 2023 -0600

    fix: finestTemporalGrainFormatter (#25618)
    
    (cherry picked from commit 62bffaf935e6745dc4a122c4f4f71ef548511d31)
---
 .../src/filters/components/Select/SelectFilterPlugin.tsx              | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/superset-frontend/src/filters/components/Select/SelectFilterPlugin.tsx b/superset-frontend/src/filters/components/Select/SelectFilterPlugin.tsx
index bef70e68f3..7d8ab55fb5 100644
--- a/superset-frontend/src/filters/components/Select/SelectFilterPlugin.tsx
+++ b/superset-frontend/src/filters/components/Select/SelectFilterPlugin.tsx
@@ -117,9 +117,9 @@ export default function PluginFilterSelect(props: PluginFilterSelectProps) {
   const labelFormatter = useMemo(
     () =>
       getDataRecordFormatter({
-        timeFormatter: finestTemporalGrainFormatter(data.map(el => el.col)),
+        timeFormatter: finestTemporalGrainFormatter(data.map(el => el[col])),
       }),
-    [data],
+    [data, col],
   );
 
   const updateDataMask = useCallback(


[superset] 02/06: fix(Presto): catch DatabaseError when testing Presto views (#25559)

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 3.0
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 254cc36b17ee5a751ef5cd633b8dc7a045b40616
Author: Rui Zhao <10...@users.noreply.github.com>
AuthorDate: Wed Oct 11 10:31:07 2023 -0700

    fix(Presto): catch DatabaseError when testing Presto views (#25559)
    
    Co-authored-by: Rui Zhao <zh...@dropbox.com>
    (cherry picked from commit be3714e1314df69627614c5229bacaa7839ccfc6)
---
 superset/db_engine_specs/presto.py                      | 6 +++---
 tests/integration_tests/db_engine_specs/presto_tests.py | 4 +++-
 2 files changed, 6 insertions(+), 4 deletions(-)

diff --git a/superset/db_engine_specs/presto.py b/superset/db_engine_specs/presto.py
index fc10099d85..561978f61a 100644
--- a/superset/db_engine_specs/presto.py
+++ b/superset/db_engine_specs/presto.py
@@ -1269,11 +1269,11 @@ class PrestoEngineSpec(PrestoBaseEngineSpec):
             sql = f"SHOW CREATE VIEW {schema}.{table}"
             try:
                 cls.execute(cursor, sql)
+                rows = cls.fetch_data(cursor, 1)
+
+                return rows[0][0]
             except DatabaseError:  # not a VIEW
                 return None
-            rows = cls.fetch_data(cursor, 1)
-
-            return rows[0][0]
 
     @classmethod
     def get_tracking_url(cls, cursor: Cursor) -> str | None:
diff --git a/tests/integration_tests/db_engine_specs/presto_tests.py b/tests/integration_tests/db_engine_specs/presto_tests.py
index 393f89621c..7e151648a6 100644
--- a/tests/integration_tests/db_engine_specs/presto_tests.py
+++ b/tests/integration_tests/db_engine_specs/presto_tests.py
@@ -925,9 +925,11 @@ class TestPrestoDbEngineSpec(TestDbEngineSpec):
     def test_get_create_view_database_error(self):
         from pyhive.exc import DatabaseError
 
-        mock_execute = mock.MagicMock(side_effect=DatabaseError())
+        mock_execute = mock.MagicMock()
+        mock_fetch_data = mock.MagicMock(side_effect=DatabaseError())
         database = mock.MagicMock()
         database.get_raw_connection().__enter__().cursor().execute = mock_execute
+        database.get_raw_connection().__enter__().cursor().fetchall = mock_fetch_data
         schema = "schema"
         table = "table"
         result = PrestoEngineSpec.get_create_view(database, schema=schema, table=table)