You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@superset.apache.org by mi...@apache.org on 2024/03/07 18:21:33 UTC

(superset) branch 4.0 updated (19efb146b2 -> ba6b86bbc4)

This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a change to branch 4.0
in repository https://gitbox.apache.org/repos/asf/superset.git


    from 19efb146b2 fix(Alerts & Reports): Fixing bug that resets cron value to default when empty   (#27262)
     new 94aeef5f82 fix: improve explore REST api validations (#27395)
     new 4f3a7f3931 fix(API): Updating assets via the API should preserve ownership configuration (#27364)
     new db6df71fad fix(sqllab): Close already removed tab (#27391)
     new 0baa2af200 fix: Re-enable CI checks on release branches (#27390)
     new 2060e545e6 fix: typescript errors in 4.0 (#27402)
     new ba6b86bbc4 fix: missing shared color in mixed timeseries (#27403)

The 6 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .github/workflows/check_db_migration_confict.yml   |   3 +-
 .github/workflows/codeql-analysis.yml              |  12 +--
 .github/workflows/docker.yml                       |   1 +
 .github/workflows/embedded-sdk-release.yml         |   5 +-
 .github/workflows/generate-FOSSA-report.yml        |   5 +-
 .github/workflows/pre-commit.yml                   |   3 +-
 .github/workflows/prefer-typescript.yml            |   3 +-
 .github/workflows/release.yml                      |   3 +-
 .github/workflows/superset-cli.yml                 |   7 +-
 .github/workflows/superset-docs-deploy.yml         |   3 +-
 .github/workflows/superset-e2e.yml                 |   3 +-
 .github/workflows/superset-frontend.yml            |   1 +
 .github/workflows/superset-helm-release.yml        |   1 +
 .../workflows/superset-python-integrationtest.yml  |  15 +--
 .github/workflows/superset-python-misc.yml         |   3 +-
 .github/workflows/superset-python-presto-hive.yml  |  11 +-
 .github/workflows/superset-python-unittest.yml     |   9 +-
 .github/workflows/superset-translations.yml        |   5 +-
 .github/workflows/superset-websocket.yml           |   3 +-
 .github/workflows/tech-debt.yml                    |   5 +-
 .../superset-ui-core/src/query/api/v1/makeApi.ts   |   4 +-
 .../src/MixedTimeseries/transformProps.ts          |   5 +-
 superset-frontend/src/SqlLab/actions/sqlLab.js     |  19 ++--
 .../AlteredSliceTag/AlteredSliceTagMocks.ts        |   2 +-
 .../src/components/AlteredSliceTag/index.tsx       |   6 +-
 .../src/components/TelemetryPixel/index.tsx        |   1 +
 .../src/hooks/apiResources/queryApi.ts             |   5 +-
 superset/commands/base.py                          |  22 +++-
 superset/commands/chart/update.py                  |   5 +-
 superset/commands/dashboard/update.py              |   9 +-
 superset/commands/dataset/update.py                |   5 +-
 superset/commands/explore/get.py                   |  11 +-
 superset/commands/report/update.py                 |   7 +-
 superset/commands/utils.py                         |  21 +++-
 superset/views/datasource/views.py                 |   4 +-
 superset/views/sql_lab/views.py                    |  20 +++-
 tests/integration_tests/charts/api_tests.py        |  53 ++++++++-
 tests/integration_tests/dashboards/api_tests.py    |  37 +++++++
 tests/integration_tests/datasets/api_tests.py      |  55 ++++++++++
 tests/integration_tests/explore/api_tests.py       |  20 +++-
 tests/integration_tests/reports/api_tests.py       |  87 +++++++++++++++
 tests/integration_tests/sql_lab/api_tests.py       |  59 +++++++++++
 tests/unit_tests/commands/test_utils.py            | 118 +++++++++++++++++++++
 43 files changed, 593 insertions(+), 83 deletions(-)
 create mode 100644 tests/unit_tests/commands/test_utils.py


(superset) 05/06: fix: typescript errors in 4.0 (#27402)

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 4.0
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 2060e545e61b53d44fc5fe6361c5b89993dfa4c9
Author: JUST.in DO IT <ju...@airbnb.com>
AuthorDate: Wed Mar 6 10:16:51 2024 -0800

    fix: typescript errors in 4.0 (#27402)
    
    (cherry picked from commit ce0b70cc8685aa4d83c0c4ad0fb8f03fb7e28ede)
---
 .../packages/superset-ui-core/src/query/api/v1/makeApi.ts           | 4 ++--
 .../src/components/AlteredSliceTag/AlteredSliceTagMocks.ts          | 2 +-
 superset-frontend/src/components/AlteredSliceTag/index.tsx          | 6 +++---
 superset-frontend/src/components/TelemetryPixel/index.tsx           | 1 +
 4 files changed, 7 insertions(+), 6 deletions(-)

diff --git a/superset-frontend/packages/superset-ui-core/src/query/api/v1/makeApi.ts b/superset-frontend/packages/superset-ui-core/src/query/api/v1/makeApi.ts
index 900197fcb6..dc62b9094b 100644
--- a/superset-frontend/packages/superset-ui-core/src/query/api/v1/makeApi.ts
+++ b/superset-frontend/packages/superset-ui-core/src/query/api/v1/makeApi.ts
@@ -115,11 +115,11 @@ export default function makeApi<
         jsonPayload: undefined as JsonObject | undefined,
       };
       if (requestType === 'search') {
-        requestConfig.searchParams = payload as URLSearchParams;
+        requestConfig.searchParams = payload as unknown as URLSearchParams;
       } else if (requestType === 'rison') {
         requestConfig.endpoint = `${endpoint}?q=${rison.encode(payload)}`;
       } else if (requestType === 'form') {
-        requestConfig.postPayload = payload as FormData;
+        requestConfig.postPayload = payload as unknown as FormData;
       } else {
         requestConfig.jsonPayload = payload as JsonObject;
       }
diff --git a/superset-frontend/src/components/AlteredSliceTag/AlteredSliceTagMocks.ts b/superset-frontend/src/components/AlteredSliceTag/AlteredSliceTagMocks.ts
index 233f519446..6428b503b8 100644
--- a/superset-frontend/src/components/AlteredSliceTag/AlteredSliceTagMocks.ts
+++ b/superset-frontend/src/components/AlteredSliceTag/AlteredSliceTagMocks.ts
@@ -17,7 +17,7 @@
  * under the License.
  */
 import { QueryFormData } from '@superset-ui/core';
-import { ControlPanelConfig } from 'packages/superset-ui-chart-controls/src/types';
+import { ControlPanelConfig } from '@superset-ui/chart-controls';
 import { DiffType, RowType } from './index';
 
 export const defaultProps: Record<string, Partial<QueryFormData>> = {
diff --git a/superset-frontend/src/components/AlteredSliceTag/index.tsx b/superset-frontend/src/components/AlteredSliceTag/index.tsx
index dfedc9f5b6..28f47657b9 100644
--- a/superset-frontend/src/components/AlteredSliceTag/index.tsx
+++ b/superset-frontend/src/components/AlteredSliceTag/index.tsx
@@ -179,7 +179,7 @@ class AlteredSliceTag extends React.Component<
         return '[]';
       }
       return value
-        .map(v => {
+        .map((v: FilterItemType) => {
           const filterVal =
             v.comparator && v.comparator.constructor === Array
               ? `[${v.comparator.join(', ')}]`
@@ -198,14 +198,14 @@ class AlteredSliceTag extends React.Component<
       return value.map(v => safeStringify(v)).join(', ');
     }
     if (controlsMap[key]?.type === 'MetricsControl' && Array.isArray(value)) {
-      const formattedValue = value.map(v => v?.label ?? v);
+      const formattedValue = value.map((v: FilterItemType) => v?.label ?? v);
       return formattedValue.length ? formattedValue.join(', ') : '[]';
     }
     if (typeof value === 'boolean') {
       return value ? 'true' : 'false';
     }
     if (Array.isArray(value)) {
-      const formattedValue = value.map(v => v?.label ?? v);
+      const formattedValue = value.map((v: FilterItemType) => v?.label ?? v);
       return formattedValue.length ? formattedValue.join(', ') : '[]';
     }
     if (typeof value === 'string' || typeof value === 'number') {
diff --git a/superset-frontend/src/components/TelemetryPixel/index.tsx b/superset-frontend/src/components/TelemetryPixel/index.tsx
index 6c7ce106e6..f0223ac70d 100644
--- a/superset-frontend/src/components/TelemetryPixel/index.tsx
+++ b/superset-frontend/src/components/TelemetryPixel/index.tsx
@@ -47,6 +47,7 @@ const TelemetryPixel = ({
   const pixelPath = `https://apachesuperset.gateway.scarf.sh/pixel/${PIXEL_ID}/${version}/${sha}/${build}`;
   return process.env.SCARF_ANALYTICS === 'false' ? null : (
     <img
+      // @ts-ignore
       referrerPolicy="no-referrer-when-downgrade"
       src={pixelPath}
       width={0}


(superset) 04/06: fix: Re-enable CI checks on release branches (#27390)

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 4.0
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 0baa2af200bae36aac69305665b19d1ef7f29492
Author: Michael S. Molina <70...@users.noreply.github.com>
AuthorDate: Wed Mar 6 12:23:07 2024 -0500

    fix: Re-enable CI checks on release branches (#27390)
    
    (cherry picked from commit a54a24e3b5b9bd6a0e8aaeb0b58907038b1a5be4)
---
 .github/workflows/check_db_migration_confict.yml      |  3 ++-
 .github/workflows/codeql-analysis.yml                 | 12 ++++++------
 .github/workflows/docker.yml                          |  1 +
 .github/workflows/embedded-sdk-release.yml            |  5 +++--
 .github/workflows/generate-FOSSA-report.yml           |  5 +++--
 .github/workflows/pre-commit.yml                      |  3 ++-
 .github/workflows/prefer-typescript.yml               |  3 ++-
 .github/workflows/release.yml                         |  3 ++-
 .github/workflows/superset-cli.yml                    |  7 ++++---
 .github/workflows/superset-docs-deploy.yml            |  3 ++-
 .github/workflows/superset-e2e.yml                    |  3 ++-
 .github/workflows/superset-frontend.yml               |  1 +
 .github/workflows/superset-helm-release.yml           |  1 +
 .github/workflows/superset-python-integrationtest.yml | 15 ++++++++-------
 .github/workflows/superset-python-misc.yml            |  3 ++-
 .github/workflows/superset-python-presto-hive.yml     | 11 ++++++-----
 .github/workflows/superset-python-unittest.yml        |  9 +++++----
 .github/workflows/superset-translations.yml           |  5 +++--
 .github/workflows/superset-websocket.yml              |  3 ++-
 .github/workflows/tech-debt.yml                       |  5 +++--
 20 files changed, 60 insertions(+), 41 deletions(-)

diff --git a/.github/workflows/check_db_migration_confict.yml b/.github/workflows/check_db_migration_confict.yml
index 637252ab3b..079ba954ed 100644
--- a/.github/workflows/check_db_migration_confict.yml
+++ b/.github/workflows/check_db_migration_confict.yml
@@ -4,7 +4,8 @@ on:
     paths:
       - "superset/migrations/**"
     branches:
-      - 'master'
+      - "master"
+      - "[0-9].[0-9]"
   pull_request:
     paths:
       - "superset/migrations/**"
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index 15193a195e..c200f9cb36 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -2,16 +2,16 @@ name: "CodeQL"
 
 on:
   push:
-    branches: [ "master" ]
+    branches: ["master", "[0-9].[0-9]"]
     paths:
-      - 'superset/**'
+      - "superset/**"
   pull_request:
     # The branches below must be a subset of the branches above
-    branches: [ "master" ]
+    branches: ["master"]
     paths:
-      - 'superset/**'
+      - "superset/**"
   schedule:
-    - cron: '0 4 * * *'
+    - cron: "0 4 * * *"
 
 # cancel previous workflow jobs for PRs
 concurrency:
@@ -30,7 +30,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        language: [ 'python', 'javascript' ]
+        language: ["python", "javascript"]
         # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
 
     steps:
diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml
index 1085c79f24..d02ad73265 100644
--- a/.github/workflows/docker.yml
+++ b/.github/workflows/docker.yml
@@ -4,6 +4,7 @@ on:
   push:
     branches:
       - "master"
+      - "[0-9].[0-9]"
   pull_request:
     branches:
       - "master"
diff --git a/.github/workflows/embedded-sdk-release.yml b/.github/workflows/embedded-sdk-release.yml
index 39ee461f6b..dcf7346e8f 100644
--- a/.github/workflows/embedded-sdk-release.yml
+++ b/.github/workflows/embedded-sdk-release.yml
@@ -3,7 +3,8 @@ name: Embedded SDK Release
 on:
   push:
     branches:
-      - 'master'
+      - "master"
+      - "[0-9].[0-9]"
 
 jobs:
   config:
@@ -31,7 +32,7 @@ jobs:
       - uses: actions/setup-node@v4
         with:
           node-version: "16"
-          registry-url: 'https://registry.npmjs.org'
+          registry-url: "https://registry.npmjs.org"
       - run: npm ci
       - run: npm run ci:release
         env:
diff --git a/.github/workflows/generate-FOSSA-report.yml b/.github/workflows/generate-FOSSA-report.yml
index 5c7693459b..fb30d7dcc5 100644
--- a/.github/workflows/generate-FOSSA-report.yml
+++ b/.github/workflows/generate-FOSSA-report.yml
@@ -4,6 +4,7 @@ on:
   push:
     branches:
       - "master"
+      - "[0-9].[0-9]"
 
 jobs:
   config:
@@ -33,8 +34,8 @@ jobs:
       - name: Setup Java
         uses: actions/setup-java@v4
         with:
-          distribution: 'temurin'
-          java-version: '11'
+          distribution: "temurin"
+          java-version: "11"
       - name: Generate fossa report
         env:
           FOSSA_API_KEY: ${{ secrets.FOSSA_API_KEY }}
diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml
index 3e2c98970b..f9a3681ce0 100644
--- a/.github/workflows/pre-commit.yml
+++ b/.github/workflows/pre-commit.yml
@@ -3,7 +3,8 @@ name: pre-commit checks
 on:
   push:
     branches:
-      - 'master'
+      - "master"
+      - "[0-9].[0-9]"
   pull_request:
     types: [synchronize, opened, reopened, ready_for_review]
 
diff --git a/.github/workflows/prefer-typescript.yml b/.github/workflows/prefer-typescript.yml
index 51abea6f87..f3179d3bcc 100644
--- a/.github/workflows/prefer-typescript.yml
+++ b/.github/workflows/prefer-typescript.yml
@@ -3,7 +3,8 @@ name: Prefer TypeScript
 on:
   push:
     branches:
-      - 'master'
+      - "master"
+      - "[0-9].[0-9]"
     paths:
       - "superset-frontend/src/**"
   pull_request:
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index e504c93b49..bbf0d7ba2d 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -3,7 +3,8 @@ name: release-workflow
 on:
   push:
     branches:
-      - 'master'
+      - "master"
+      - "[0-9].[0-9]"
 
 jobs:
   config:
diff --git a/.github/workflows/superset-cli.yml b/.github/workflows/superset-cli.yml
index bfe3dedc18..9c52f36497 100644
--- a/.github/workflows/superset-cli.yml
+++ b/.github/workflows/superset-cli.yml
@@ -3,7 +3,8 @@ name: Superset CLI tests
 on:
   push:
     branches:
-      - 'master'
+      - "master"
+      - "[0-9].[0-9]"
   pull_request:
     types: [synchronize, opened, reopened, ready_for_review]
 
@@ -55,8 +56,8 @@ jobs:
         uses: actions/setup-python@v5
         with:
           python-version: ${{ matrix.python-version }}
-          cache: 'pip'
-          cache-dependency-path: 'requirements/testing.txt'
+          cache: "pip"
+          cache-dependency-path: "requirements/testing.txt"
       - name: Install dependencies
         if: steps.check.outcome == 'failure'
         uses: ./.github/actions/cached-dependencies
diff --git a/.github/workflows/superset-docs-deploy.yml b/.github/workflows/superset-docs-deploy.yml
index aa733eba23..d8bcf7e644 100644
--- a/.github/workflows/superset-docs-deploy.yml
+++ b/.github/workflows/superset-docs-deploy.yml
@@ -6,6 +6,7 @@ on:
       - "docs/**"
     branches:
       - "master"
+      - "[0-9].[0-9]"
 
 jobs:
   config:
@@ -38,7 +39,7 @@ jobs:
       - name: Set up Node.js 16
         uses: actions/setup-node@v4
         with:
-          node-version: '16'
+          node-version: "16"
       - name: yarn install
         run: |
           yarn install --check-cache
diff --git a/.github/workflows/superset-e2e.yml b/.github/workflows/superset-e2e.yml
index 3126b9a6a0..71b415ee6e 100644
--- a/.github/workflows/superset-e2e.yml
+++ b/.github/workflows/superset-e2e.yml
@@ -3,7 +3,8 @@ name: E2E
 on:
   push:
     branches:
-      - 'master'
+      - "master"
+      - "[0-9].[0-9]"
   pull_request:
     types: [synchronize, opened, reopened, ready_for_review]
 
diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml
index 3ac99de33c..bce75a06bf 100644
--- a/.github/workflows/superset-frontend.yml
+++ b/.github/workflows/superset-frontend.yml
@@ -4,6 +4,7 @@ on:
   push:
     branches:
       - "master"
+      - "[0-9].[0-9]"
     paths:
       - "superset-frontend/**"
   pull_request:
diff --git a/.github/workflows/superset-helm-release.yml b/.github/workflows/superset-helm-release.yml
index b8db3d2186..fd60f82d49 100644
--- a/.github/workflows/superset-helm-release.yml
+++ b/.github/workflows/superset-helm-release.yml
@@ -4,6 +4,7 @@ on:
   push:
     branches:
       - "master"
+      - "[0-9].[0-9]"
     paths:
       - "helm/**"
 
diff --git a/.github/workflows/superset-python-integrationtest.yml b/.github/workflows/superset-python-integrationtest.yml
index 385edf89b5..6bf314d961 100644
--- a/.github/workflows/superset-python-integrationtest.yml
+++ b/.github/workflows/superset-python-integrationtest.yml
@@ -4,7 +4,8 @@ name: Python-Integration
 on:
   push:
     branches:
-      - 'master'
+      - "master"
+      - "[0-9].[0-9]"
   pull_request:
     types: [synchronize, opened, reopened, ready_for_review]
 
@@ -54,8 +55,8 @@ jobs:
         uses: actions/setup-python@v5
         with:
           python-version: ${{ matrix.python-version }}
-          cache: 'pip'
-          cache-dependency-path: 'requirements/testing.txt'
+          cache: "pip"
+          cache-dependency-path: "requirements/testing.txt"
       - name: Install dependencies
         if: steps.check.outcome == 'failure'
         uses: ./.github/actions/cached-dependencies
@@ -120,8 +121,8 @@ jobs:
         uses: actions/setup-python@v5
         with:
           python-version: ${{ matrix.python-version }}
-          cache: 'pip'
-          cache-dependency-path: 'requirements/testing.txt'
+          cache: "pip"
+          cache-dependency-path: "requirements/testing.txt"
       - name: Install dependencies
         if: steps.check.outcome == 'failure'
         uses: ./.github/actions/cached-dependencies
@@ -180,8 +181,8 @@ jobs:
         uses: actions/setup-python@v5
         with:
           python-version: ${{ matrix.python-version }}
-          cache: 'pip'
-          cache-dependency-path: 'requirements/testing.txt'
+          cache: "pip"
+          cache-dependency-path: "requirements/testing.txt"
       - name: Install dependencies
         if: steps.check.outcome == 'failure'
         uses: ./.github/actions/cached-dependencies
diff --git a/.github/workflows/superset-python-misc.yml b/.github/workflows/superset-python-misc.yml
index eb5da6f18f..e05e05f48b 100644
--- a/.github/workflows/superset-python-misc.yml
+++ b/.github/workflows/superset-python-misc.yml
@@ -4,7 +4,8 @@ name: Python Misc
 on:
   push:
     branches:
-      - 'master'
+      - "master"
+      - "[0-9].[0-9]"
     paths:
       - "superset/**"
   pull_request:
diff --git a/.github/workflows/superset-python-presto-hive.yml b/.github/workflows/superset-python-presto-hive.yml
index 57d4e05414..af2c844057 100644
--- a/.github/workflows/superset-python-presto-hive.yml
+++ b/.github/workflows/superset-python-presto-hive.yml
@@ -4,7 +4,8 @@ name: Python Presto/Hive
 on:
   push:
     branches:
-      - 'master'
+      - "master"
+      - "[0-9].[0-9]"
     paths:
       - "superset/**"
   pull_request:
@@ -70,8 +71,8 @@ jobs:
         uses: actions/setup-python@v5
         with:
           python-version: ${{ matrix.python-version }}
-          cache: 'pip'
-          cache-dependency-path: 'requirements/testing.txt'
+          cache: "pip"
+          cache-dependency-path: "requirements/testing.txt"
       - name: Install dependencies
         if: steps.check.outcome == 'failure'
         uses: ./.github/actions/cached-dependencies
@@ -147,8 +148,8 @@ jobs:
         uses: actions/setup-python@v5
         with:
           python-version: ${{ matrix.python-version }}
-          cache: 'pip'
-          cache-dependency-path: 'requirements/testing.txt'
+          cache: "pip"
+          cache-dependency-path: "requirements/testing.txt"
       - name: Install dependencies
         if: steps.check.outcome == 'failure'
         uses: ./.github/actions/cached-dependencies
diff --git a/.github/workflows/superset-python-unittest.yml b/.github/workflows/superset-python-unittest.yml
index 548f128eac..330fa218ac 100644
--- a/.github/workflows/superset-python-unittest.yml
+++ b/.github/workflows/superset-python-unittest.yml
@@ -4,7 +4,8 @@ name: Python-Unit
 on:
   push:
     branches:
-      - 'master'
+      - "master"
+      - "[0-9].[0-9]"
     paths:
       - "superset/**"
   pull_request:
@@ -43,9 +44,9 @@ jobs:
         uses: actions/setup-python@v5
         with:
           python-version: ${{ matrix.python-version }}
-          cache: 'pip'
-          cache-dependency-path: 'requirements/testing.txt'
-# TODO: separated requirements.txt file just for unit tests
+          cache: "pip"
+          cache-dependency-path: "requirements/testing.txt"
+      # TODO: separated requirements.txt file just for unit tests
       - name: Install dependencies
         if: steps.check.outcome == 'failure'
         uses: ./.github/actions/cached-dependencies
diff --git a/.github/workflows/superset-translations.yml b/.github/workflows/superset-translations.yml
index 647e27b3d7..4a3d9cc21a 100644
--- a/.github/workflows/superset-translations.yml
+++ b/.github/workflows/superset-translations.yml
@@ -3,7 +3,8 @@ name: Translations
 on:
   push:
     branches:
-      - 'master'
+      - "master"
+      - "[0-9].[0-9]"
   pull_request:
     types: [synchronize, opened, reopened, ready_for_review]
 
@@ -24,7 +25,7 @@ jobs:
       - name: Setup Node.js
         uses: actions/setup-node@v4
         with:
-          node-version:  '16'
+          node-version: "16"
       - name: Install dependencies
         uses: ./.github/actions/cached-dependencies
         with:
diff --git a/.github/workflows/superset-websocket.yml b/.github/workflows/superset-websocket.yml
index d0bf6f2f4e..c35573ada1 100644
--- a/.github/workflows/superset-websocket.yml
+++ b/.github/workflows/superset-websocket.yml
@@ -2,7 +2,8 @@ name: WebSocket server
 on:
   push:
     branches:
-      - 'master'
+      - "master"
+      - "[0-9].[0-9]"
     paths:
       - "superset-websocket/**"
   pull_request:
diff --git a/.github/workflows/tech-debt.yml b/.github/workflows/tech-debt.yml
index ccbf0ba181..1c6a9edd46 100644
--- a/.github/workflows/tech-debt.yml
+++ b/.github/workflows/tech-debt.yml
@@ -4,6 +4,7 @@ on:
   push:
     branches:
       - master
+      - "[0-9].[0-9]"
 
 jobs:
   config:
@@ -31,7 +32,7 @@ jobs:
       - name: Set up Node.js
         uses: actions/setup-node@v4
         with:
-          node-version: '16'
+          node-version: "16"
 
       - name: Install Dependencies
         run: npm install
@@ -39,7 +40,7 @@ jobs:
 
       - name: Run Script
         env:
-          SPREADSHEET_ID: '1oABNnzxJYzwUrHjr_c9wfYEq9dFL1ScVof9LlaAdxvo'
+          SPREADSHEET_ID: "1oABNnzxJYzwUrHjr_c9wfYEq9dFL1ScVof9LlaAdxvo"
           SERVICE_ACCOUNT_KEY: ${{ secrets.GSHEET_KEY }}
         run: npm run lint-stats
         continue-on-error: true


(superset) 06/06: fix: missing shared color in mixed timeseries (#27403)

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 4.0
in repository https://gitbox.apache.org/repos/asf/superset.git

commit ba6b86bbc4a5e23789b6e4889a306f064c65fc06
Author: JUST.in DO IT <ju...@airbnb.com>
AuthorDate: Thu Mar 7 10:04:49 2024 -0800

    fix: missing shared color in mixed timeseries (#27403)
    
    (cherry picked from commit 9ced2552dbeeaf60217b385d4c40cbaf4372c787)
---
 .../plugin-chart-echarts/src/MixedTimeseries/transformProps.ts       | 5 +++--
 superset/commands/base.py                                            | 2 +-
 2 files changed, 4 insertions(+), 3 deletions(-)

diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/transformProps.ts
index bb719c23e1..7488d49dba 100644
--- a/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/transformProps.ts
+++ b/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/transformProps.ts
@@ -410,8 +410,9 @@ export default function transformProps(
 
   rawSeriesB.forEach(entry => {
     const entryName = String(entry.name || '');
-    const seriesName = `${inverted[entryName] || entryName} (1)`;
-    const colorScaleKey = getOriginalSeries(seriesName, array);
+    const seriesEntry = inverted[entryName] || entryName;
+    const seriesName = `${seriesEntry} (1)`;
+    const colorScaleKey = getOriginalSeries(seriesEntry, array);
 
     const seriesFormatter = getFormatter(
       customFormattersSecondary,
diff --git a/superset/commands/base.py b/superset/commands/base.py
index 8b330d0669..d2efcda4fe 100644
--- a/superset/commands/base.py
+++ b/superset/commands/base.py
@@ -58,7 +58,7 @@ class CreateMixin:  # pylint: disable=too-few-public-methods
         return populate_owner_list(owner_ids, default_to_user=True)
 
 
-class UpdateMixin:  # pylint: disable=too-few-public-methods
+class UpdateMixin:
     @staticmethod
     def populate_owners(owner_ids: Optional[list[int]] = None) -> list[User]:
         """


(superset) 02/06: fix(API): Updating assets via the API should preserve ownership configuration (#27364)

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 4.0
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 4f3a7f3931b821b9152d88b5d22851e9adf1901d
Author: Vitor Avila <96...@users.noreply.github.com>
AuthorDate: Wed Mar 6 13:40:41 2024 -0300

    fix(API): Updating assets via the API should preserve ownership configuration (#27364)
    
    (cherry picked from commit 66bf70172f2cbd24b17b503588f2edbed0a63247)
---
 superset/commands/base.py                       |  20 +++-
 superset/commands/chart/update.py               |   5 +-
 superset/commands/dashboard/update.py           |   9 +-
 superset/commands/dataset/update.py             |   5 +-
 superset/commands/report/update.py              |   7 +-
 superset/commands/utils.py                      |  21 ++++-
 superset/views/datasource/views.py              |   4 +-
 tests/integration_tests/charts/api_tests.py     |  53 ++++++++++-
 tests/integration_tests/dashboards/api_tests.py |  37 ++++++++
 tests/integration_tests/datasets/api_tests.py   |  55 +++++++++++
 tests/integration_tests/reports/api_tests.py    |  87 +++++++++++++++++
 tests/unit_tests/commands/test_utils.py         | 118 ++++++++++++++++++++++++
 12 files changed, 404 insertions(+), 17 deletions(-)

diff --git a/superset/commands/base.py b/superset/commands/base.py
index caca50755d..8b330d0669 100644
--- a/superset/commands/base.py
+++ b/superset/commands/base.py
@@ -19,7 +19,7 @@ from typing import Any, Optional
 
 from flask_appbuilder.security.sqla.models import User
 
-from superset.commands.utils import populate_owners
+from superset.commands.utils import compute_owner_list, populate_owner_list
 
 
 class BaseCommand(ABC):
@@ -55,7 +55,7 @@ class CreateMixin:  # pylint: disable=too-few-public-methods
         :raises OwnersNotFoundValidationError: if at least one owner can't be resolved
         :returns: Final list of owners
         """
-        return populate_owners(owner_ids, default_to_user=True)
+        return populate_owner_list(owner_ids, default_to_user=True)
 
 
 class UpdateMixin:  # pylint: disable=too-few-public-methods
@@ -69,4 +69,18 @@ class UpdateMixin:  # pylint: disable=too-few-public-methods
         :raises OwnersNotFoundValidationError: if at least one owner can't be resolved
         :returns: Final list of owners
         """
-        return populate_owners(owner_ids, default_to_user=False)
+        return populate_owner_list(owner_ids, default_to_user=False)
+
+    @staticmethod
+    def compute_owners(
+        current_owners: Optional[list[User]],
+        new_owners: Optional[list[int]],
+    ) -> list[User]:
+        """
+        Handle list of owners for update events.
+
+        :param current_owners: list of current owners
+        :param new_owners: list of new owners specified in the update payload
+        :returns: Final list of owners
+        """
+        return compute_owner_list(current_owners, new_owners)
diff --git a/superset/commands/chart/update.py b/superset/commands/chart/update.py
index 40b36ebcc5..178344634e 100644
--- a/superset/commands/chart/update.py
+++ b/superset/commands/chart/update.py
@@ -90,7 +90,10 @@ class UpdateChartCommand(UpdateMixin, BaseCommand):
         if not is_query_context_update(self._properties):
             try:
                 security_manager.raise_for_ownership(self._model)
-                owners = self.populate_owners(owner_ids)
+                owners = self.compute_owners(
+                    self._model.owners,
+                    owner_ids,
+                )
                 self._properties["owners"] = owners
             except SupersetSecurityException as ex:
                 raise ChartForbiddenError() from ex
diff --git a/superset/commands/dashboard/update.py b/superset/commands/dashboard/update.py
index 22dcad4b2c..b2b11e5f6e 100644
--- a/superset/commands/dashboard/update.py
+++ b/superset/commands/dashboard/update.py
@@ -66,7 +66,7 @@ class UpdateDashboardCommand(UpdateMixin, BaseCommand):
 
     def validate(self) -> None:
         exceptions: list[ValidationError] = []
-        owners_ids: Optional[list[int]] = self._properties.get("owners")
+        owner_ids: Optional[list[int]] = self._properties.get("owners")
         roles_ids: Optional[list[int]] = self._properties.get("roles")
         slug: Optional[str] = self._properties.get("slug")
 
@@ -85,10 +85,11 @@ class UpdateDashboardCommand(UpdateMixin, BaseCommand):
             exceptions.append(DashboardSlugExistsValidationError())
 
         # Validate/Populate owner
-        if owners_ids is None:
-            owners_ids = [owner.id for owner in self._model.owners]
         try:
-            owners = self.populate_owners(owners_ids)
+            owners = self.compute_owners(
+                self._model.owners,
+                owner_ids,
+            )
             self._properties["owners"] = owners
         except ValidationError as ex:
             exceptions.append(ex)
diff --git a/superset/commands/dataset/update.py b/superset/commands/dataset/update.py
index 8a72c24fd5..5c0d87b230 100644
--- a/superset/commands/dataset/update.py
+++ b/superset/commands/dataset/update.py
@@ -100,7 +100,10 @@ class UpdateDatasetCommand(UpdateMixin, BaseCommand):
             exceptions.append(DatabaseChangeValidationError())
         # Validate/Populate owner
         try:
-            owners = self.populate_owners(owner_ids)
+            owners = self.compute_owners(
+                self._model.owners,
+                owner_ids,
+            )
             self._properties["owners"] = owners
         except ValidationError as ex:
             exceptions.append(ex)
diff --git a/superset/commands/report/update.py b/superset/commands/report/update.py
index a33ba6b59a..cb63ec5011 100644
--- a/superset/commands/report/update.py
+++ b/superset/commands/report/update.py
@@ -118,10 +118,11 @@ class UpdateReportScheduleCommand(UpdateMixin, BaseReportScheduleCommand):
             raise ReportScheduleForbiddenError() from ex
 
         # Validate/Populate owner
-        if owner_ids is None:
-            owner_ids = [owner.id for owner in self._model.owners]
         try:
-            owners = self.populate_owners(owner_ids)
+            owners = self.compute_owners(
+                self._model.owners,
+                owner_ids,
+            )
             self._properties["owners"] = owners
         except ValidationError as ex:
             exceptions.append(ex)
diff --git a/superset/commands/utils.py b/superset/commands/utils.py
index b7121ec89f..f01c96ba28 100644
--- a/superset/commands/utils.py
+++ b/superset/commands/utils.py
@@ -35,7 +35,7 @@ if TYPE_CHECKING:
     from superset.connectors.sqla.models import BaseDatasource
 
 
-def populate_owners(
+def populate_owner_list(
     owner_ids: list[int] | None,
     default_to_user: bool,
 ) -> list[User]:
@@ -62,6 +62,25 @@ def populate_owners(
     return owners
 
 
+def compute_owner_list(
+    current_owners: list[User] | None,
+    new_owners: list[int] | None,
+) -> list[User]:
+    """
+    Helper function for update commands, to properly handle the owners list.
+    Preserve the previous configuration unless included in the update payload.
+
+    :param current_owners: list of current owners
+    :param new_owners: list of new owners specified in the update payload
+    :returns: Final list of owners
+    """
+    current_owners = current_owners or []
+    owners_ids = (
+        [owner.id for owner in current_owners] if new_owners is None else new_owners
+    )
+    return populate_owner_list(owners_ids, default_to_user=False)
+
+
 def populate_roles(role_ids: list[int] | None = None) -> list[Role]:
     """
     Helper function for commands, will fetch all roles from roles id's
diff --git a/superset/views/datasource/views.py b/superset/views/datasource/views.py
index 2e46faf0af..eba3acf36e 100644
--- a/superset/views/datasource/views.py
+++ b/superset/views/datasource/views.py
@@ -31,7 +31,7 @@ from superset.commands.dataset.exceptions import (
     DatasetForbiddenError,
     DatasetNotFoundError,
 )
-from superset.commands.utils import populate_owners
+from superset.commands.utils import populate_owner_list
 from superset.connectors.sqla.models import SqlaTable
 from superset.connectors.sqla.utils import get_physical_table_metadata
 from superset.daos.datasource import DatasourceDAO
@@ -94,7 +94,7 @@ class Datasource(BaseSupersetView):
             except SupersetSecurityException as ex:
                 raise DatasetForbiddenError() from ex
 
-        datasource_dict["owners"] = populate_owners(
+        datasource_dict["owners"] = populate_owner_list(
             datasource_dict["owners"], default_to_user=False
         )
 
diff --git a/tests/integration_tests/charts/api_tests.py b/tests/integration_tests/charts/api_tests.py
index d0985124e2..829c7ba518 100644
--- a/tests/integration_tests/charts/api_tests.py
+++ b/tests/integration_tests/charts/api_tests.py
@@ -735,10 +735,59 @@ class TestChartApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase):
         db.session.delete(model)
         db.session.commit()
 
+    @pytest.mark.usefixtures("add_dashboard_to_chart")
+    def test_update_chart_preserve_ownership(self):
+        """
+        Chart API: Test update chart preserves owner list (if un-changed)
+        """
+        chart_data = {
+            "slice_name": "title1_changed",
+        }
+        admin = self.get_user("admin")
+        self.login(username="admin")
+        uri = f"api/v1/chart/{self.chart.id}"
+        rv = self.put_assert_metric(uri, chart_data, "put")
+        self.assertEqual(rv.status_code, 200)
+        self.assertEqual([admin], self.chart.owners)
+
+    @pytest.mark.usefixtures("add_dashboard_to_chart")
+    def test_update_chart_clear_owner_list(self):
+        """
+        Chart API: Test update chart admin can clear owner list
+        """
+        chart_data = {"slice_name": "title1_changed", "owners": []}
+        admin = self.get_user("admin")
+        self.login(username="admin")
+        uri = f"api/v1/chart/{self.chart.id}"
+        rv = self.put_assert_metric(uri, chart_data, "put")
+        self.assertEqual(rv.status_code, 200)
+        self.assertEqual([], self.chart.owners)
+
+    def test_update_chart_populate_owner(self):
+        """
+        Chart API: Test update admin can update chart with
+        no owners to a different owner
+        """
+        gamma = self.get_user("gamma")
+        admin = self.get_user("admin")
+        chart_id = self.insert_chart("title", [], 1).id
+        model = db.session.query(Slice).get(chart_id)
+        self.assertEqual(model.owners, [])
+        chart_data = {"owners": [gamma.id]}
+        self.login(username="admin")
+        uri = f"api/v1/chart/{chart_id}"
+        rv = self.put_assert_metric(uri, chart_data, "put")
+        self.assertEqual(rv.status_code, 200)
+        model_updated = db.session.query(Slice).get(chart_id)
+        self.assertNotIn(admin, model_updated.owners)
+        self.assertIn(gamma, model_updated.owners)
+        db.session.delete(model_updated)
+        db.session.commit()
+
     @pytest.mark.usefixtures("add_dashboard_to_chart")
     def test_update_chart_new_dashboards(self):
         """
-        Chart API: Test update set new owner to current user
+        Chart API: Test update chart associating it with new dashboard
         """
         chart_data = {
             "slice_name": "title1_changed",
@@ -754,7 +803,7 @@ class TestChartApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase):
     @pytest.mark.usefixtures("add_dashboard_to_chart")
     def test_not_update_chart_none_dashboards(self):
         """
-        Chart API: Test update set new owner to current user
+        Chart API: Test update chart without changing dashboards configuration
         """
         chart_data = {"slice_name": "title1_changed_again"}
         self.login(username="admin")
diff --git a/tests/integration_tests/dashboards/api_tests.py b/tests/integration_tests/dashboards/api_tests.py
index 623572c713..fcc1031c2f 100644
--- a/tests/integration_tests/dashboards/api_tests.py
+++ b/tests/integration_tests/dashboards/api_tests.py
@@ -1550,6 +1550,43 @@ class TestDashboardApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCas
         db.session.delete(model)
         db.session.commit()
 
+    def test_update_dashboard_clear_owner_list(self):
+        """
+        Dashboard API: Test update admin can clear up owners list
+        """
+        admin = self.get_user("admin")
+        dashboard_id = self.insert_dashboard("title1", "slug1", [admin.id]).id
+        self.login(username="admin")
+        uri = f"api/v1/dashboard/{dashboard_id}"
+        dashboard_data = {"owners": []}
+        rv = self.client.put(uri, json=dashboard_data)
+        self.assertEqual(rv.status_code, 200)
+        model = db.session.query(Dashboard).get(dashboard_id)
+        self.assertEqual([], model.owners)
+        db.session.delete(model)
+        db.session.commit()
+
+    def test_update_dashboard_populate_owner(self):
+        """
+        Dashboard API: Test update admin can update dashboard with
+        no owners to a different owner
+        """
+        self.login(username="admin")
+        gamma = self.get_user("gamma")
+        dashboard = self.insert_dashboard(
+            "title1",
+            "slug1",
+            [],
+        )
+        uri = f"api/v1/dashboard/{dashboard.id}"
+        dashboard_data = {"owners": [gamma.id]}
+        rv = self.client.put(uri, json=dashboard_data)
+        self.assertEqual(rv.status_code, 200)
+        model = db.session.query(Dashboard).get(dashboard.id)
+        self.assertEqual([gamma], model.owners)
+        db.session.delete(model)
+        db.session.commit()
+
     def test_update_dashboard_slug_formatting(self):
         """
         Dashboard API: Test update slug formatting
diff --git a/tests/integration_tests/datasets/api_tests.py b/tests/integration_tests/datasets/api_tests.py
index 1ebe5bd1f7..59d02a07d6 100644
--- a/tests/integration_tests/datasets/api_tests.py
+++ b/tests/integration_tests/datasets/api_tests.py
@@ -833,12 +833,66 @@ class TestDatasetApi(SupersetTestCase):
         assert rv.status_code == 422
         assert data == {"message": "Dataset could not be created."}
 
+    def test_update_dataset_preserve_ownership(self):
+        """
+        Dataset API: Test update dataset preserves owner list (if un-changed)
+        """
+
+        dataset = self.insert_default_dataset()
+        current_owners = dataset.owners
+        self.login(username="admin")
+        dataset_data = {"description": "new description"}
+        uri = f"api/v1/dataset/{dataset.id}"
+        rv = self.put_assert_metric(uri, dataset_data, "put")
+        assert rv.status_code == 200
+        model = db.session.query(SqlaTable).get(dataset.id)
+        assert model.owners == current_owners
+
+        db.session.delete(dataset)
+        db.session.commit()
+
+    def test_update_dataset_clear_owner_list(self):
+        """
+        Dataset API: Test update dataset admin can clear ownership config
+        """
+
+        dataset = self.insert_default_dataset()
+        self.login(username="admin")
+        dataset_data = {"owners": []}
+        uri = f"api/v1/dataset/{dataset.id}"
+        rv = self.put_assert_metric(uri, dataset_data, "put")
+        assert rv.status_code == 200
+        model = db.session.query(SqlaTable).get(dataset.id)
+        assert model.owners == []
+
+        db.session.delete(dataset)
+        db.session.commit()
+
+    def test_update_dataset_populate_owner(self):
+        """
+        Dataset API: Test update admin can update dataset with
+        no owners to a different owner
+        """
+        self.login(username="admin")
+        gamma = self.get_user("gamma")
+        dataset = self.insert_dataset("ab_permission", [], get_main_database())
+        dataset_data = {"owners": [gamma.id]}
+        uri = f"api/v1/dataset/{dataset.id}"
+        rv = self.put_assert_metric(uri, dataset_data, "put")
+        assert rv.status_code == 200
+        model = db.session.query(SqlaTable).get(dataset.id)
+        assert model.owners == [gamma]
+
+        db.session.delete(dataset)
+        db.session.commit()
+
     def test_update_dataset_item(self):
         """
         Dataset API: Test update dataset item
         """
 
         dataset = self.insert_default_dataset()
+        current_owners = dataset.owners
         self.login(username="admin")
         dataset_data = {"description": "changed_description"}
         uri = f"api/v1/dataset/{dataset.id}"
@@ -846,6 +900,7 @@ class TestDatasetApi(SupersetTestCase):
         assert rv.status_code == 200
         model = db.session.query(SqlaTable).get(dataset.id)
         assert model.description == dataset_data["description"]
+        assert model.owners == current_owners
 
         db.session.delete(dataset)
         db.session.commit()
diff --git a/tests/integration_tests/reports/api_tests.py b/tests/integration_tests/reports/api_tests.py
index 12d00d59f3..1b74d11f5a 100644
--- a/tests/integration_tests/reports/api_tests.py
+++ b/tests/integration_tests/reports/api_tests.py
@@ -1458,6 +1458,93 @@ class TestReportSchedulesApi(SupersetTestCase):
         rv = self.put_assert_metric(uri, report_schedule_data, "put")
         self.assertEqual(rv.status_code, 403)
 
+    @pytest.mark.usefixtures("create_report_schedules")
+    def test_update_report_preserve_ownership(self):
+        """
+        ReportSchedule API: Test update report preserves owner list (if un-changed)
+        """
+        self.login(username="admin")
+        existing_report = (
+            db.session.query(ReportSchedule)
+            .filter(ReportSchedule.name == "name1")
+            .one_or_none()
+        )
+        current_owners = existing_report.owners
+        report_schedule_data = {
+            "description": "Updated description",
+        }
+        uri = f"api/v1/report/{existing_report.id}"
+        rv = self.put_assert_metric(uri, report_schedule_data, "put")
+        updated_report = (
+            db.session.query(ReportSchedule)
+            .filter(ReportSchedule.name == "name1")
+            .one_or_none()
+        )
+        assert updated_report.owners == current_owners
+
+    @pytest.mark.usefixtures("create_report_schedules")
+    def test_update_report_clear_owner_list(self):
+        """
+        ReportSchedule API: Test update report admin can clear ownership config
+        """
+        self.login(username="admin")
+        existing_report = (
+            db.session.query(ReportSchedule)
+            .filter(ReportSchedule.name == "name1")
+            .one_or_none()
+        )
+        report_schedule_data = {
+            "owners": [],
+        }
+        uri = f"api/v1/report/{existing_report.id}"
+        rv = self.put_assert_metric(uri, report_schedule_data, "put")
+        updated_report = (
+            db.session.query(ReportSchedule)
+            .filter(ReportSchedule.name == "name1")
+            .one_or_none()
+        )
+        assert updated_report.owners == []
+
+    @pytest.mark.usefixtures("create_report_schedules")
+    def test_update_report_populate_owner(self):
+        """
+        ReportSchedule API: Test update admin can update report with
+        no owners to a different owner
+        """
+        gamma = self.get_user("gamma")
+        self.login(username="admin")
+
+        # Modify an existing report to make remove all owners
+        existing_report = (
+            db.session.query(ReportSchedule)
+            .filter(ReportSchedule.name == "name1")
+            .one_or_none()
+        )
+        report_update_data = {
+            "owners": [],
+        }
+        uri = f"api/v1/report/{existing_report.id}"
+        rv = self.put_assert_metric(uri, report_update_data, "put")
+        updated_report = (
+            db.session.query(ReportSchedule)
+            .filter(ReportSchedule.name == "name1")
+            .one_or_none()
+        )
+        assert updated_report.owners == []
+
+        # Populate the field
+        report_update_data = {
+            "owners": [gamma.id],
+        }
+        uri = f"api/v1/report/{updated_report.id}"
+        rv = self.put_assert_metric(uri, report_update_data, "put")
+        updated_report = (
+            db.session.query(ReportSchedule)
+            .filter(ReportSchedule.name == "name1")
+            .one_or_none()
+        )
+        assert updated_report.owners == [gamma]
+
     @pytest.mark.usefixtures("create_report_schedules")
     def test_delete_report_schedule(self):
         """
diff --git a/tests/unit_tests/commands/test_utils.py b/tests/unit_tests/commands/test_utils.py
new file mode 100644
index 0000000000..cb99ac37bf
--- /dev/null
+++ b/tests/unit_tests/commands/test_utils.py
@@ -0,0 +1,118 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from unittest.mock import MagicMock, patch
+
+import pytest
+
+from superset.commands.utils import compute_owner_list, populate_owner_list, User
+
+
+@patch("superset.commands.utils.g")
+def test_populate_owner_list_default_to_user(mock_user):
+    owner_list = populate_owner_list([], True)
+    assert owner_list == [mock_user.user]
+
+
+@patch("superset.commands.utils.g")
+def test_populate_owner_list_default_to_user_handle_none(mock_user):
+    owner_list = populate_owner_list(None, True)
+    assert owner_list == [mock_user.user]
+
+
+@patch("superset.commands.utils.g")
+@patch("superset.commands.utils.security_manager")
+@patch("superset.commands.utils.get_user_id")
+def test_populate_owner_list_admin_user(mock_user_id, mock_sm, mock_g):
+    test_user = User(id=1, first_name="First", last_name="Last")
+    mock_g.user = User(id=4, first_name="Admin", last_name="User")
+    mock_user_id.return_value = 4
+    mock_sm.is_admin = MagicMock(return_value=True)
+    mock_sm.get_user_by_id = MagicMock(return_value=test_user)
+
+    owner_list = populate_owner_list([1], False)
+    assert owner_list == [test_user]
+
+
+@patch("superset.commands.utils.g")
+@patch("superset.commands.utils.security_manager")
+@patch("superset.commands.utils.get_user_id")
+def test_populate_owner_list_admin_user_empty_list(mock_user_id, mock_sm, mock_g):
+    mock_g.user = User(id=4, first_name="Admin", last_name="User")
+    mock_user_id.return_value = 4
+    mock_sm.is_admin = MagicMock(return_value=True)
+    owner_list = populate_owner_list([], False)
+    assert owner_list == []
+
+
+@patch("superset.commands.utils.g")
+@patch("superset.commands.utils.security_manager")
+@patch("superset.commands.utils.get_user_id")
+def test_populate_owner_list_non_admin(mock_user_id, mock_sm, mock_g):
+    test_user = User(id=1, first_name="First", last_name="Last")
+    mock_g.user = User(id=4, first_name="Non", last_name="admin")
+    mock_user_id.return_value = 4
+    mock_sm.is_admin = MagicMock(return_value=False)
+    mock_sm.get_user_by_id = MagicMock(return_value=test_user)
+
+    owner_list = populate_owner_list([1], False)
+    assert owner_list == [mock_g.user, test_user]
+
+
+@patch("superset.commands.utils.populate_owner_list")
+def test_compute_owner_list_new_owners(mock_populate_owner_list):
+    current_owners = [User(id=1), User(id=2), User(id=3)]
+    new_owners = [4, 5, 6]
+
+    compute_owner_list(current_owners, new_owners)
+    mock_populate_owner_list.assert_called_once_with(new_owners, default_to_user=False)
+
+
+@patch("superset.commands.utils.populate_owner_list")
+def test_compute_owner_list_no_new_owners(mock_populate_owner_list):
+    current_owners = [User(id=1), User(id=2), User(id=3)]
+    new_owners = None
+
+    compute_owner_list(current_owners, new_owners)
+    mock_populate_owner_list.assert_called_once_with([1, 2, 3], default_to_user=False)
+
+
+@patch("superset.commands.utils.populate_owner_list")
+def test_compute_owner_list_new_owner_empty_list(mock_populate_owner_list):
+    current_owners = [User(id=1), User(id=2), User(id=3)]
+    new_owners = []
+
+    compute_owner_list(current_owners, new_owners)
+    mock_populate_owner_list.assert_called_once_with(new_owners, default_to_user=False)
+
+
+@patch("superset.commands.utils.populate_owner_list")
+def test_compute_owner_list_no_owners(mock_populate_owner_list):
+    current_owners = []
+    new_owners = [4, 5, 6]
+
+    compute_owner_list(current_owners, new_owners)
+    mock_populate_owner_list.assert_called_once_with(new_owners, default_to_user=False)
+
+
+@patch("superset.commands.utils.populate_owner_list")
+def test_compute_owner_list_no_owners_handle_none(mock_populate_owner_list):
+    current_owners = None
+    new_owners = [4, 5, 6]
+
+    compute_owner_list(current_owners, new_owners)
+    mock_populate_owner_list.assert_called_once_with(new_owners, default_to_user=False)


(superset) 03/06: fix(sqllab): Close already removed tab (#27391)

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 4.0
in repository https://gitbox.apache.org/repos/asf/superset.git

commit db6df71fad77437152b541e99cb58f2768090a5a
Author: JUST.in DO IT <ju...@airbnb.com>
AuthorDate: Wed Mar 6 08:59:56 2024 -0800

    fix(sqllab): Close already removed tab (#27391)
    
    (cherry picked from commit 5107cc0fd9134886d7a8eefd51fb242e520a542e)
---
 superset-frontend/src/SqlLab/actions/sqlLab.js     | 19 ++++---
 .../src/hooks/apiResources/queryApi.ts             |  5 +-
 superset/views/sql_lab/views.py                    | 20 ++++++--
 tests/integration_tests/sql_lab/api_tests.py       | 59 ++++++++++++++++++++++
 4 files changed, 90 insertions(+), 13 deletions(-)

diff --git a/superset-frontend/src/SqlLab/actions/sqlLab.js b/superset-frontend/src/SqlLab/actions/sqlLab.js
index 9cfd76e2e2..16bf3f530f 100644
--- a/superset-frontend/src/SqlLab/actions/sqlLab.js
+++ b/superset-frontend/src/SqlLab/actions/sqlLab.js
@@ -743,15 +743,18 @@ export function removeQueryEditor(queryEditor) {
 
     return sync
       .then(() => dispatch({ type: REMOVE_QUERY_EDITOR, queryEditor }))
-      .catch(() =>
-        dispatch(
-          addDangerToast(
-            t(
-              'An error occurred while removing tab. Please contact your administrator.',
+      .catch(({ status }) => {
+        if (status !== 404) {
+          return dispatch(
+            addDangerToast(
+              t(
+                'An error occurred while removing tab. Please contact your administrator.',
+              ),
             ),
-          ),
-        ),
-      );
+          );
+        }
+        return dispatch({ type: REMOVE_QUERY_EDITOR, queryEditor });
+      });
   };
 }
 
diff --git a/superset-frontend/src/hooks/apiResources/queryApi.ts b/superset-frontend/src/hooks/apiResources/queryApi.ts
index b7bf7f5b5d..4099422b54 100644
--- a/superset-frontend/src/hooks/apiResources/queryApi.ts
+++ b/superset-frontend/src/hooks/apiResources/queryApi.ts
@@ -64,7 +64,10 @@ export const supersetClientQuery: BaseQueryFn<
     }))
     .catch(response =>
       getClientErrorObject(response).then(errorObj => ({
-        error: errorObj,
+        error: {
+          error: errorObj?.message || errorObj?.error || response.statusText,
+          status: response.status,
+        },
       })),
     );
 
diff --git a/superset/views/sql_lab/views.py b/superset/views/sql_lab/views.py
index 5c122ea886..8c21eea69a 100644
--- a/superset/views/sql_lab/views.py
+++ b/superset/views/sql_lab/views.py
@@ -112,7 +112,10 @@ class TabStateView(BaseSupersetView):
     @has_access_api
     @expose("/<int:tab_state_id>", methods=("DELETE",))
     def delete(self, tab_state_id: int) -> FlaskResponse:
-        if _get_owner_id(tab_state_id) != get_user_id():
+        owner_id = _get_owner_id(tab_state_id)
+        if owner_id is None:
+            return Response(status=404)
+        if owner_id != get_user_id():
             return Response(status=403)
 
         db.session.query(TabState).filter(TabState.id == tab_state_id).delete(
@@ -127,7 +130,10 @@ class TabStateView(BaseSupersetView):
     @has_access_api
     @expose("/<int:tab_state_id>", methods=("GET",))
     def get(self, tab_state_id: int) -> FlaskResponse:
-        if _get_owner_id(tab_state_id) != get_user_id():
+        owner_id = _get_owner_id(tab_state_id)
+        if owner_id is None:
+            return Response(status=404)
+        if owner_id != get_user_id():
             return Response(status=403)
 
         tab_state = db.session.query(TabState).filter_by(id=tab_state_id).first()
@@ -157,7 +163,10 @@ class TabStateView(BaseSupersetView):
     @has_access_api
     @expose("<int:tab_state_id>", methods=("PUT",))
     def put(self, tab_state_id: int) -> FlaskResponse:
-        if _get_owner_id(tab_state_id) != get_user_id():
+        owner_id = _get_owner_id(tab_state_id)
+        if owner_id is None:
+            return Response(status=404)
+        if owner_id != get_user_id():
             return Response(status=403)
 
         fields = {k: json.loads(v) for k, v in request.form.to_dict().items()}
@@ -172,7 +181,10 @@ class TabStateView(BaseSupersetView):
     @has_access_api
     @expose("<int:tab_state_id>/migrate_query", methods=("POST",))
     def migrate_query(self, tab_state_id: int) -> FlaskResponse:
-        if _get_owner_id(tab_state_id) != get_user_id():
+        owner_id = _get_owner_id(tab_state_id)
+        if owner_id is None:
+            return Response(status=404)
+        if owner_id != get_user_id():
             return Response(status=403)
 
         client_id = json.loads(request.form["queryId"])
diff --git a/tests/integration_tests/sql_lab/api_tests.py b/tests/integration_tests/sql_lab/api_tests.py
index da050c2363..597f961346 100644
--- a/tests/integration_tests/sql_lab/api_tests.py
+++ b/tests/integration_tests/sql_lab/api_tests.py
@@ -137,6 +137,65 @@ class TestSqlLabApi(SupersetTestCase):
         result = resp["result"]
         self.assertEqual(len(result["queries"]), 1)
 
+    @mock.patch.dict(
+        "superset.extensions.feature_flag_manager._feature_flags",
+        {"SQLLAB_BACKEND_PERSISTENCE": True},
+        clear=True,
+    )
+    def test_deleted_tab(self):
+        username = "admin"
+        self.login(username)
+        data = {
+            "queryEditor": json.dumps(
+                {
+                    "title": "Untitled Query 2",
+                    "dbId": 1,
+                    "schema": None,
+                    "autorun": False,
+                    "sql": "SELECT ...",
+                    "queryLimit": 1000,
+                }
+            )
+        }
+        resp = self.get_json_resp("/tabstateview/", data=data)
+        tab_state_id = resp["id"]
+        resp = self.client.delete("/tabstateview/" + str(tab_state_id))
+        assert resp.status_code == 200
+        resp = self.client.get("/tabstateview/" + str(tab_state_id))
+        assert resp.status_code == 404
+        resp = self.client.put(
+            "/tabstateview/" + str(tab_state_id),
+            json=data,
+        )
+        assert resp.status_code == 404
+
+    @mock.patch.dict(
+        "superset.extensions.feature_flag_manager._feature_flags",
+        {"SQLLAB_BACKEND_PERSISTENCE": True},
+        clear=True,
+    )
+    def test_delete_tab_already_removed(self):
+        username = "admin"
+        self.login(username)
+        data = {
+            "queryEditor": json.dumps(
+                {
+                    "title": "Untitled Query 3",
+                    "dbId": 1,
+                    "schema": None,
+                    "autorun": False,
+                    "sql": "SELECT ...",
+                    "queryLimit": 1000,
+                }
+            )
+        }
+        resp = self.get_json_resp("/tabstateview/", data=data)
+        tab_state_id = resp["id"]
+        resp = self.client.delete("/tabstateview/" + str(tab_state_id))
+        assert resp.status_code == 200
+        resp = self.client.delete("/tabstateview/" + str(tab_state_id))
+        assert resp.status_code == 404
+
     def test_get_access_denied(self):
         new_role = Role(name="Dummy Role", permissions=[])
         db.session.add(new_role)


(superset) 01/06: fix: improve explore REST api validations (#27395)

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 4.0
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 94aeef5f827eda424edd5b212651b3e263e96571
Author: Daniel Vaz Gaspar <da...@gmail.com>
AuthorDate: Tue Mar 5 17:44:51 2024 +0000

    fix: improve explore REST api validations (#27395)
    
    (cherry picked from commit a3d2e0bf447fad5d4495eea97529118b562f4e3c)
---
 superset/commands/explore/get.py             | 11 +++++++++--
 tests/integration_tests/explore/api_tests.py | 20 +++++++++++++++++++-
 2 files changed, 28 insertions(+), 3 deletions(-)

diff --git a/superset/commands/explore/get.py b/superset/commands/explore/get.py
index 9d715bd63d..a0ff176109 100644
--- a/superset/commands/explore/get.py
+++ b/superset/commands/explore/get.py
@@ -37,6 +37,7 @@ from superset.daos.exceptions import DatasourceNotFound
 from superset.exceptions import SupersetException
 from superset.explore.exceptions import WrongEndpointError
 from superset.explore.permalink.exceptions import ExplorePermalinkGetFailedError
+from superset.extensions import security_manager
 from superset.utils import core as utils
 from superset.views.utils import (
     get_datasource_info,
@@ -61,7 +62,6 @@ class GetExploreCommand(BaseCommand, ABC):
     # pylint: disable=too-many-locals,too-many-branches,too-many-statements
     def run(self) -> Optional[dict[str, Any]]:
         initial_form_data = {}
-
         if self._permalink_key is not None:
             command = GetExplorePermalinkCommand(self._permalink_key)
             permalink_value = command.run()
@@ -110,12 +110,19 @@ class GetExploreCommand(BaseCommand, ABC):
             self._datasource_type = SqlaTable.type
 
         datasource: Optional[BaseDatasource] = None
+
         if self._datasource_id is not None:
             with contextlib.suppress(DatasourceNotFound):
                 datasource = DatasourceDAO.get_datasource(
                     cast(str, self._datasource_type), self._datasource_id
                 )
-        datasource_name = datasource.name if datasource else _("[Missing Dataset]")
+
+        datasource_name = _("[Missing Dataset]")
+
+        if datasource:
+            datasource_name = datasource.name
+            security_manager.can_access_datasource(datasource)
+
         viz_type = form_data.get("viz_type")
         if not viz_type and datasource and datasource.default_endpoint:
             raise WrongEndpointError(redirect=datasource.default_endpoint)
diff --git a/tests/integration_tests/explore/api_tests.py b/tests/integration_tests/explore/api_tests.py
index c0b7f5fcd4..6d33f1c916 100644
--- a/tests/integration_tests/explore/api_tests.py
+++ b/tests/integration_tests/explore/api_tests.py
@@ -197,7 +197,7 @@ def test_get_from_permalink_unknown_key(test_client, login_as_admin):
 
 
 @patch("superset.security.SupersetSecurityManager.can_access_datasource")
-def test_get_dataset_access_denied(
+def test_get_dataset_access_denied_with_form_data_key(
     mock_can_access_datasource, test_client, login_as_admin, dataset
 ):
     message = "Dataset access denied"
@@ -214,6 +214,24 @@ def test_get_dataset_access_denied(
     assert data["message"] == message
 
 
+@patch("superset.security.SupersetSecurityManager.can_access_datasource")
+def test_get_dataset_access_denied(
+    mock_can_access_datasource, test_client, login_as_admin, dataset
+):
+    message = "Dataset access denied"
+    mock_can_access_datasource.side_effect = DatasetAccessDeniedError(
+        message=message, datasource_id=dataset.id, datasource_type=dataset.type
+    )
+    resp = test_client.get(
+        f"api/v1/explore/?datasource_id={dataset.id}&datasource_type={dataset.type}"
+    )
+    data = json.loads(resp.data.decode("utf-8"))
+    assert resp.status_code == 403
+    assert data["datasource_id"] == dataset.id
+    assert data["datasource_type"] == dataset.type
+    assert data["message"] == message
+
+
 @patch("superset.daos.datasource.DatasourceDAO.get_datasource")
 def test_wrong_endpoint(mock_get_datasource, test_client, login_as_admin, dataset):
     dataset.default_endpoint = "another_endpoint"