You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@superset.apache.org by el...@apache.org on 2023/10/20 22:54:38 UTC

[superset] branch 2.1 updated (79d0cd5389 -> e9b4cef04d)

This is an automated email from the ASF dual-hosted git repository.

elizabeth pushed a change to branch 2.1
in repository https://gitbox.apache.org/repos/asf/superset.git


 discard 79d0cd5389 chore: add latest-official docker tag (#25322)
 discard bf795f9515 fix: Chart series limit doesn't work for some databases (#25150)
 discard 1b534292bc fix: CTE queries with non-SELECT statements (#25014)
 discard b55c950c37 chore: remove CssTemplate and Annotation access from gamma role (#24826)
    omit 61837ed41f fix: validation errors appearing after ssh tunnel switch (#24849)
    omit 2a49cda8f1 fix: SSH Tunnel creation with dynamic form (#24196)
    omit 3d840d92e6 fix: Allow chart import to update the dataset an existing chart points to (#24821)
     new 365e47c2ed fix: Allow chart import to update the dataset an existing chart points to (#24821)
     new 2867f057d2 fix: SSH Tunnel creation with dynamic form (#24196)
     new ad90832fd1 fix: validation errors appearing after ssh tunnel switch (#24849)
     new 95873007bb chore: remove CssTemplate and Annotation access from gamma role (#24826)
     new f16dcd4fed fix: CTE queries with non-SELECT statements (#25014)
     new 17f1d35876 fix: Chart series limit doesn't work for some databases (#25150)
     new e9b4cef04d chore: add latest-official docker tag (#25322)

This update added new revisions after undoing existing revisions.
That is to say, some revisions that were in the old version of the
branch are not in the new version.  This situation occurs
when a user --force pushes a change and generates a repository
containing something like this:

 * -- * -- B -- O -- O -- O   (79d0cd5389)
            \
             N -- N -- N   refs/heads/2.1 (e9b4cef04d)

You should already have received notification emails for all of the O
revisions, and so the following emails describe only the N revisions
from the common base, B.

Any revisions marked "omit" are not gone; other references still
refer to them.  Any revisions marked "discard" are gone forever.

The 7 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 superset/models/helpers.py | 32 --------------------------------
 1 file changed, 32 deletions(-)


[superset] 06/07: fix: Chart series limit doesn't work for some databases (#25150)

Posted by el...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

elizabeth pushed a commit to branch 2.1
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 17f1d3587616e6cd67ca33a1e8f0d5c73fc17e89
Author: KSPT-taylorjohn <52...@users.noreply.github.com>
AuthorDate: Thu Aug 31 18:05:39 2023 -0400

    fix: Chart series limit doesn't work for some databases (#25150)
---
 superset/models/helpers.py | 18 ++++++++++++------
 1 file changed, 12 insertions(+), 6 deletions(-)

diff --git a/superset/models/helpers.py b/superset/models/helpers.py
index 4c68e807d4..795cc6faa3 100644
--- a/superset/models/helpers.py
+++ b/superset/models/helpers.py
@@ -100,6 +100,7 @@ config = app.config
 logger = logging.getLogger(__name__)
 
 VIRTUAL_TABLE_ALIAS = "virtual_table"
+SERIES_LIMIT_SUBQ_ALIAS = "series_limit"
 ADVANCED_DATA_TYPES = config["ADVANCED_DATA_TYPES"]
 
 
@@ -1387,7 +1388,13 @@ class ExploreMixin:  # pylint: disable=too-many-public-methods
         }
         columns = columns or []
         groupby = groupby or []
-        series_column_names = utils.get_column_names(series_columns or [])
+        db_engine_spec = self.db_engine_spec
+        series_column_labels = [
+            db_engine_spec.make_label_compatible(column)
+            for column in utils.get_column_names(
+                columns=series_columns or [],
+            )
+        ]
         # deprecated, to be removed in 2.0
         if is_timeseries and timeseries_limit:
             series_limit = timeseries_limit
@@ -1400,8 +1407,7 @@ class ExploreMixin:  # pylint: disable=too-many-public-methods
         template_kwargs["removed_filters"] = removed_filters
         template_kwargs["applied_filters"] = applied_template_filters
         template_processor = self.get_template_processor(**template_kwargs)
-        db_engine_spec = self.db_engine_spec
-        prequeries: List[str] = []
+        prequeries: list[str] = []
         orderby = orderby or []
         need_groupby = bool(metrics is not None or groupby)
         metrics = metrics or []
@@ -1541,8 +1547,8 @@ class ExploreMixin:  # pylint: disable=too-many-public-methods
                     )
                 groupby_all_columns[outer.name] = outer
                 if (
-                    is_timeseries and not series_column_names
-                ) or outer.name in series_column_names:
+                    is_timeseries and not series_column_labels
+                ) or outer.name in series_column_labels:
                     groupby_series_columns[outer.name] = outer
                 select_exprs.append(outer)
         elif columns:
@@ -1922,7 +1928,7 @@ class ExploreMixin:  # pylint: disable=too-many-public-methods
                     col_name = db_engine_spec.make_label_compatible(gby_name + "__")
                     on_clause.append(gby_obj == sa.column(col_name))
 
-                tbl = tbl.join(subq.alias(), and_(*on_clause))
+                tbl = tbl.join(subq.alias(SERIES_LIMIT_SUBQ_ALIAS), and_(*on_clause))
 
                 # run prequery to get top groups
                 prequery_obj = {


[superset] 05/07: fix: CTE queries with non-SELECT statements (#25014)

Posted by el...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

elizabeth pushed a commit to branch 2.1
in repository https://gitbox.apache.org/repos/asf/superset.git

commit f16dcd4fed8b401fd99cf903189d1f74758421e4
Author: Daniel Vaz Gaspar <da...@gmail.com>
AuthorDate: Sat Aug 19 15:49:15 2023 +0100

    fix: CTE queries with non-SELECT statements (#25014)
---
 superset/sql_parse.py               |  55 +++++++++++++++++++
 tests/unit_tests/sql_parse_tests.py | 103 ++++++++++++++++++++++++++++++++++++
 2 files changed, 158 insertions(+)

diff --git a/superset/sql_parse.py b/superset/sql_parse.py
index a3c1af87b0..216b4e8825 100644
--- a/superset/sql_parse.py
+++ b/superset/sql_parse.py
@@ -216,9 +216,53 @@ class ParsedQuery:
     def limit(self) -> Optional[int]:
         return self._limit
 
+    def _get_cte_tables(self, parsed: dict[str, Any]) -> list[dict[str, Any]]:
+        if "with" not in parsed:
+            return []
+        return parsed["with"].get("cte_tables", [])
+
+    def _check_cte_is_select(self, oxide_parse: list[dict[str, Any]]) -> bool:
+        """
+        Check if a oxide parsed CTE contains only SELECT statements
+
+        :param oxide_parse: parsed CTE
+        :return: True if CTE is a SELECT statement
+        """
+        for query in oxide_parse:
+            parsed_query = query["Query"]
+            cte_tables = self._get_cte_tables(parsed_query)
+            for cte_table in cte_tables:
+                is_select = all(
+                    key == "Select" for key in cte_table["query"]["body"].keys()
+                )
+                if not is_select:
+                    return False
+        return True
+
     def is_select(self) -> bool:
         # make sure we strip comments; prevents a bug with coments in the CTE
         parsed = sqlparse.parse(self.strip_comments())
+
+        # Check if this is a CTE
+        if parsed[0].is_group and parsed[0][0].ttype == Keyword.CTE:
+            if sqloxide_parse is not None:
+                try:
+                    if not self._check_cte_is_select(
+                        sqloxide_parse(self.strip_comments(), dialect="ansi")
+                    ):
+                        return False
+                except ValueError:
+                    # sqloxide was not able to parse the query, so let's continue with
+                    # sqlparse
+                    pass
+            inner_cte = self.get_inner_cte_expression(parsed[0].tokens) or []
+            # Check if the inner CTE is a not a SELECT
+            if any(token.ttype == DDL for token in inner_cte) or any(
+                token.ttype == DML and token.normalized != "SELECT"
+                for token in inner_cte
+            ):
+                return False
+
         if parsed[0].get_type() == "SELECT":
             return True
 
@@ -240,6 +284,17 @@ class ParsedQuery:
             token.ttype == DML and token.value == "SELECT" for token in parsed[0]
         )
 
+    def get_inner_cte_expression(self, tokens: TokenList) -> Optional[TokenList]:
+        for token in tokens:
+            if self._is_identifier(token):
+                for identifier_token in token.tokens:
+                    if (
+                        isinstance(identifier_token, Parenthesis)
+                        and identifier_token.is_group
+                    ):
+                        return identifier_token.tokens
+        return None
+
     def is_valid_ctas(self) -> bool:
         parsed = sqlparse.parse(self.strip_comments())
         return parsed[-1].get_type() == "SELECT"
diff --git a/tests/unit_tests/sql_parse_tests.py b/tests/unit_tests/sql_parse_tests.py
index d6939fa080..09eeabce2f 100644
--- a/tests/unit_tests/sql_parse_tests.py
+++ b/tests/unit_tests/sql_parse_tests.py
@@ -1008,6 +1008,109 @@ FROM foo f"""
     assert sql.is_select()
 
 
+def test_cte_is_select_lowercase() -> None:
+    """
+    Some CTEs with lowercase select are not correctly identified as SELECTS.
+    """
+    sql = ParsedQuery(
+        """WITH foo AS(
+select
+  FLOOR(__time TO WEEK) AS "week",
+  name,
+  COUNT(DISTINCT user_id) AS "unique_users"
+FROM "druid"."my_table"
+GROUP BY 1,2
+)
+select
+  f.week,
+  f.name,
+  f.unique_users
+FROM foo f"""
+    )
+    assert sql.is_select()
+
+
+def test_cte_insert_is_not_select() -> None:
+    """
+    Some CTEs with lowercase select are not correctly identified as SELECTS.
+    """
+    sql = ParsedQuery(
+        """WITH foo AS(
+        INSERT INTO foo (id) VALUES (1) RETURNING 1
+        ) select * FROM foo f"""
+    )
+    assert sql.is_select() is False
+
+
+def test_cte_delete_is_not_select() -> None:
+    """
+    Some CTEs with lowercase select are not correctly identified as SELECTS.
+    """
+    sql = ParsedQuery(
+        """WITH foo AS(
+        DELETE FROM foo RETURNING *
+        ) select * FROM foo f"""
+    )
+    assert sql.is_select() is False
+
+
+def test_cte_is_not_select_lowercase() -> None:
+    """
+    Some CTEs with lowercase select are not correctly identified as SELECTS.
+    """
+    sql = ParsedQuery(
+        """WITH foo AS(
+        insert into foo (id) values (1) RETURNING 1
+        ) select * FROM foo f"""
+    )
+    assert sql.is_select() is False
+
+
+def test_cte_with_multiple_selects() -> None:
+    sql = ParsedQuery(
+        "WITH a AS ( select * from foo1 ), b as (select * from foo2) SELECT * FROM a;"
+    )
+    assert sql.is_select()
+
+
+def test_cte_with_multiple_with_non_select() -> None:
+    sql = ParsedQuery(
+        """WITH a AS (
+        select * from foo1
+        ), b as (
+        update foo2 set id=2
+        ) SELECT * FROM a"""
+    )
+    assert sql.is_select() is False
+    sql = ParsedQuery(
+        """WITH a AS (
+         update foo2 set name=2
+         ),
+        b as (
+        select * from foo1
+        ) SELECT * FROM a"""
+    )
+    assert sql.is_select() is False
+    sql = ParsedQuery(
+        """WITH a AS (
+         update foo2 set name=2
+         ),
+        b as (
+        update foo1 set name=2
+        ) SELECT * FROM a"""
+    )
+    assert sql.is_select() is False
+    sql = ParsedQuery(
+        """WITH a AS (
+        INSERT INTO foo (id) VALUES (1)
+        ),
+        b as (
+        select 1
+        ) SELECT * FROM a"""
+    )
+    assert sql.is_select() is False
+
+
 def test_unknown_select() -> None:
     """
     Test that `is_select` works when sqlparse fails to identify the type.


[superset] 01/07: fix: Allow chart import to update the dataset an existing chart points to (#24821)

Posted by el...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

elizabeth pushed a commit to branch 2.1
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 365e47c2edc69e14b1ea69fdd6640b89472e9391
Author: Jack Fragassi <jf...@gmail.com>
AuthorDate: Fri Jul 28 16:08:03 2023 -0700

    fix: Allow chart import to update the dataset an existing chart points to (#24821)
---
 superset/charts/commands/importers/v1/utils.py |  5 ++++-
 superset/models/helpers.py                     | 16 ++++++++++------
 2 files changed, 14 insertions(+), 7 deletions(-)

diff --git a/superset/charts/commands/importers/v1/utils.py b/superset/charts/commands/importers/v1/utils.py
index d4aeb17a1e..bbbe67db92 100644
--- a/superset/charts/commands/importers/v1/utils.py
+++ b/superset/charts/commands/importers/v1/utils.py
@@ -46,7 +46,10 @@ def import_chart(
     # TODO (betodealmeida): move this logic to import_from_dict
     config["params"] = json.dumps(config["params"])
 
-    chart = Slice.import_from_dict(session, config, recursive=False)
+    chart = Slice.import_from_dict(
+        session, config, recursive=False, allow_reparenting=True
+    )
+
     if chart.id is None:
         session.flush()
 
diff --git a/superset/models/helpers.py b/superset/models/helpers.py
index bce9970884..4c68e807d4 100644
--- a/superset/models/helpers.py
+++ b/superset/models/helpers.py
@@ -184,7 +184,7 @@ class ImportExportMixin:
     __mapper__: Mapper
 
     @classmethod
-    def _unique_constrains(cls) -> List[Set[str]]:
+    def _unique_constraints(cls) -> list[set[str]]:
         """Get all (single column and multi column) unique constraints"""
         unique = [
             {c.name for c in u.columns}
@@ -245,7 +245,8 @@ class ImportExportMixin:
         dict_rep: Dict[Any, Any],
         parent: Optional[Any] = None,
         recursive: bool = True,
-        sync: Optional[List[str]] = None,
+        sync: Optional[list[str]] = None,
+        allow_reparenting: bool = False,
     ) -> Any:
         """Import obj from a dictionary"""
         if sync is None:
@@ -258,7 +259,7 @@ class ImportExportMixin:
             | {"uuid"}
         )
         new_children = {c: dict_rep[c] for c in cls.export_children if c in dict_rep}
-        unique_constrains = cls._unique_constrains()
+        unique_constraints = cls._unique_constraints()
 
         filters = []  # Using these filters to check if obj already exists
 
@@ -279,8 +280,11 @@ class ImportExportMixin:
             for k, v in parent_refs.items():
                 dict_rep[k] = getattr(parent, v)
 
-        # Add filter for parent obj
-        filters.extend([getattr(cls, k) == dict_rep.get(k) for k in parent_refs.keys()])
+        if not allow_reparenting:
+            # Add filter for parent obj
+            filters.extend(
+                [getattr(cls, k) == dict_rep.get(k) for k in parent_refs.keys()]
+            )
 
         # Add filter for unique constraints
         ucs = [
@@ -291,7 +295,7 @@ class ImportExportMixin:
                     if dict_rep.get(k) is not None
                 ]
             )
-            for cs in unique_constrains
+            for cs in unique_constraints
         ]
         filters.append(or_(*ucs))
 


[superset] 07/07: chore: add latest-official docker tag (#25322)

Posted by el...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

elizabeth pushed a commit to branch 2.1
in repository https://gitbox.apache.org/repos/asf/superset.git

commit e9b4cef04def440b3bf82714cee25b5ff6c6e35b
Author: Elizabeth Thompson <es...@gmail.com>
AuthorDate: Wed Oct 18 16:59:30 2023 -0700

    chore: add latest-official docker tag (#25322)
---
 .github/workflows/docker-release.yml               |   3 +-
 .github/workflows/docker.yml                       |   2 +-
 .../workflows => scripts}/docker_build_push.sh     |  82 +++++++++++-
 scripts/tag_latest_release.sh                      | 140 +++++++++++++++------
 tests/unit_tests/fixtures/bash_mock.py             |  44 +++++++
 tests/unit_tests/scripts/docker_build_push_test.py |  44 +++++++
 .../unit_tests/scripts/tag_latest_release_test.py  |  49 ++++++++
 7 files changed, 322 insertions(+), 42 deletions(-)

diff --git a/.github/workflows/docker-release.yml b/.github/workflows/docker-release.yml
index d082603be9..9e9119ab78 100644
--- a/.github/workflows/docker-release.yml
+++ b/.github/workflows/docker-release.yml
@@ -19,4 +19,5 @@ jobs:
           DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
           DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
         run: |
-          .github/workflows/docker_build_push.sh
+          GITHUB_RELEASE_TAG_NAME="${{ github.event.release.tag_name }}"
+          ./scripts/docker_build_push.sh "$GITHUB_RELEASE_TAG_NAME"
diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml
index bd3fa3730c..8e62938baf 100644
--- a/.github/workflows/docker.yml
+++ b/.github/workflows/docker.yml
@@ -41,7 +41,7 @@ jobs:
           DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
           DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
         run: |
-          .github/workflows/docker_build_push.sh
+          ./scripts/docker_build_push.sh
 
       - name: Build ephemeral env image
         if: github.event_name == 'pull_request'
diff --git a/.github/workflows/docker_build_push.sh b/scripts/docker_build_push.sh
similarity index 57%
rename from .github/workflows/docker_build_push.sh
rename to scripts/docker_build_push.sh
index b969813627..4aef8f2244 100755
--- a/.github/workflows/docker_build_push.sh
+++ b/scripts/docker_build_push.sh
@@ -17,6 +17,8 @@
 #
 set -eo pipefail
 
+GITHUB_RELEASE_TAG_NAME="$1"
+
 SHA=$(git rev-parse HEAD)
 REPO_NAME="apache/superset"
 
@@ -32,10 +34,27 @@ else
   LATEST_TAG="${REFSPEC}"
 fi
 
+
 if [[ "${REFSPEC}" == "master" ]]; then
-  LATEST_TAG="latest"
+  LATEST_TAG="master"
+fi
+
+# get the latest release tag
+if [ -n "${GITHUB_RELEASE_TAG_NAME}" ]; then
+  output=$(source ./scripts/tag_latest_release.sh "${GITHUB_RELEASE_TAG_NAME}" --dry-run) || true
+  SKIP_TAG=$(echo "${output}" | grep "SKIP_TAG" | cut -d'=' -f2)
+  if [[ "${SKIP_TAG}" == "SKIP_TAG::false" ]]; then
+    LATEST_TAG="latest"
+  fi
+fi
+
+if [[ "${TEST_ENV}" == "true" ]]; then
+  # don't run the build in test environment
+  echo "LATEST_TAG is ${LATEST_TAG}"
+  exit 0
 fi
 
+
 cat<<EOF
   Rolling with tags:
   - ${REPO_NAME}:${SHA}
@@ -43,6 +62,47 @@ cat<<EOF
   - ${REPO_NAME}:${LATEST_TAG}
 EOF
 
+if [ -z "${DOCKERHUB_TOKEN}" ]; then
+  # Skip if secrets aren't populated -- they're only visible for actions running in the repo (not on forks)
+  echo "Skipping Docker push"
+  # By default load it back
+  DOCKER_ARGS="--load"
+  ARCHITECTURE_FOR_BUILD="linux/amd64 linux/arm64"
+else
+  # Login and push
+  docker logout
+  docker login --username "${DOCKERHUB_USER}" --password "${DOCKERHUB_TOKEN}"
+  DOCKER_ARGS="--push"
+  ARCHITECTURE_FOR_BUILD="linux/amd64,linux/arm64"
+fi
+set -x
+
+# for the dev image, it's ok to tag master as latest-dev
+# for production, we only want to tag the latest official release as latest
+if [ "${LATEST_TAG}" = "master" ]; then
+  DEV_TAG="${REPO_NAME}:latest-dev"
+else
+  DEV_TAG="${REPO_NAME}:${LATEST_TAG}-dev"
+fi
+
+#
+# Build the dev image
+#
+docker buildx build --target dev \
+  $DOCKER_ARGS \
+  --cache-from=type=registry,ref=apache/superset:master-dev \
+  --cache-from=type=local,src=/tmp/superset \
+  --cache-to=type=local,ignore-error=true,dest=/tmp/superset \
+  -t "${REPO_NAME}:${SHA}-dev" \
+  -t "${REPO_NAME}:${REFSPEC}-dev" \
+  -t "${DEV_TAG}" \
+  --platform linux/amd64 \
+  --label "sha=${SHA}" \
+  --label "built_at=$(date)" \
+  --label "target=dev" \
+  --label "build_actor=${GITHUB_ACTOR}" \
+  .
+
 #
 # Build the "lean" image
 #
@@ -71,6 +131,26 @@ docker build --target lean \
   --label "build_actor=${GITHUB_ACTOR}" \
   .
 
+#
+# Build the "lean39" image
+#
+docker buildx build --target lean \
+  $DOCKER_ARGS \
+  --cache-from=type=local,src=/tmp/superset \
+  --cache-to=type=local,ignore-error=true,dest=/tmp/superset \
+  -t "${REPO_NAME}:${SHA}-py39" \
+  -t "${REPO_NAME}:${REFSPEC}-py39" \
+  -t "${REPO_NAME}:${LATEST_TAG}-py39" \
+  --platform linux/amd64 \
+  --build-arg PY_VER="3.9-slim-bullseye"\
+  --label "sha=${SHA}" \
+  --label "built_at=$(date)" \
+  --label "target=lean39" \
+  --label "build_actor=${GITHUB_ACTOR}" \
+  .
+
+
+for BUILD_PLATFORM in $ARCHITECTURE_FOR_BUILD; do
 #
 # Build the "websocket" image
 #
diff --git a/scripts/tag_latest_release.sh b/scripts/tag_latest_release.sh
index a5d50eaf9f..f2ee846a63 100755
--- a/scripts/tag_latest_release.sh
+++ b/scripts/tag_latest_release.sh
@@ -17,7 +17,7 @@
 #
 
 run_git_tag () {
-  if [ "$DRY_RUN" = "false" ] && [ "$SKIP_TAG" = "false" ]
+  if [[ "$DRY_RUN" == "false" ]] && [[ "$SKIP_TAG" == "false" ]]
   then
     git tag -a -f latest "${GITHUB_TAG_NAME}" -m "latest tag"
     echo "${GITHUB_TAG_NAME} has been tagged 'latest'"
@@ -25,7 +25,55 @@ run_git_tag () {
   exit 0
 }
 
-echo "::set-output name=SKIP_TAG::false"
+###
+# separating out git commands into functions so they can be mocked in unit tests
+###
+git_show_ref () {
+  if [[ "$TEST_ENV" == "true" ]]
+  then
+    if [[ "$GITHUB_TAG_NAME" == "does_not_exist" ]]
+        # mock return for testing only
+    then
+      echo ""
+    else
+      echo "2817aebd69dc7d199ec45d973a2079f35e5658b6 refs/tags/${GITHUB_TAG_NAME}"
+    fi
+  fi
+  result=$(git show-ref "${GITHUB_TAG_NAME}")
+  echo "${result}"
+}
+
+get_latest_tag_list () {
+  if [[ "$TEST_ENV" == "true" ]]
+  then
+    echo "(tag: 2.1.0, apache/2.1test)"
+  else
+    result=$(git show-ref --tags --dereference latest | awk '{print $2}' | xargs git show --pretty=tformat:%d -s | grep tag:)
+    echo "${result}"
+  fi
+}
+###
+
+split_string () {
+  local version="$1"
+  local delimiter="$2"
+  local components=()
+  local tmp=""
+  for (( i=0; i<${#version}; i++ )); do
+    local char="${version:$i:1}"
+    if [[ "$char" != "$delimiter" ]]; then
+      tmp="$tmp$char"
+    elif [[ -n "$tmp" ]]; then
+      components+=("$tmp")
+      tmp=""
+    fi
+  done
+  if [[ -n "$tmp" ]]; then
+    components+=("$tmp")
+  fi
+  echo "${components[@]}"
+}
+
 DRY_RUN=false
 
 # get params passed in with script when it was run
@@ -50,12 +98,14 @@ done
 
 if [ -z "${GITHUB_TAG_NAME}" ]; then
     echo "Missing tag parameter, usage: ./scripts/tag_latest_release.sh <GITHUB_TAG_NAME>"
+    echo "::set-output name=SKIP_TAG::true"
     exit 1
 fi
 
-if [ -z "$(git show-ref ${GITHUB_TAG_NAME})" ]; then
+if [ -z "$(git_show_ref)" ]; then
     echo "The tag ${GITHUB_TAG_NAME} does not exist. Please use a different tag."
-    exit 1
+    echo "::set-output name=SKIP_TAG::true"
+    exit 0
 fi
 
 # check that this tag only contains a proper semantic version
@@ -63,36 +113,41 @@ if ! [[ ${GITHUB_TAG_NAME} =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]
 then
   echo "This tag ${GITHUB_TAG_NAME} is not a valid release version. Not tagging."
   echo "::set-output name=SKIP_TAG::true"
-  exit 0
+  exit 1
 fi
 
 ## split the current GITHUB_TAG_NAME into an array at the dot
-IFS=$'.'
-THIS_TAG_NAME=(${GITHUB_TAG_NAME})  || echo 'not found'
+THIS_TAG_NAME=$(split_string "${GITHUB_TAG_NAME}" ".")
 
 # look up the 'latest' tag on git
-LATEST_TAG_LIST=$(git show-ref latest && git show --pretty=tformat:%d -s latest | grep tag:) || echo 'not found'
+LATEST_TAG_LIST=$(get_latest_tag_list) || echo 'not found'
 
 # if 'latest' tag doesn't exist, then set this commit to latest
 if [[ -z "$LATEST_TAG_LIST" ]]
 then
-  # move on to next task
   echo "there are no latest tags yet, so I'm going to start by tagging this sha as the latest"
   run_git_tag
+  exit 0
 fi
 
-## get all tags that use the same sha as the latest tag. split at comma.
-IFS=$','
-LATEST_TAGS=($LATEST_TAG_LIST)
+# remove parenthesis and tag: from the list of tags
+LATEST_TAGS_STRINGS=$(echo "$LATEST_TAG_LIST" | sed 's/tag: \([^,]*\)/\1/g' | tr -d '()')
 
-## loop over those tags and only take action on the one that isn't tagged 'latest'
-## that one will have the version number tag
-for (( i=0; i<${#LATEST_TAGS[@]}; i++ ))
+LATEST_TAGS=$(split_string "$LATEST_TAGS_STRINGS" ",")
+TAGS=($(split_string "$LATEST_TAGS" " "))
+
+# Initialize a flag for comparison result
+compare_result=""
+
+# Iterate through the tags of the latest release
+for tag in $TAGS
 do
-  if [[ ${LATEST_TAGS[$i]} != *"latest"* ]]
-  then
+  if [[ $tag == "latest" ]]; then
+    continue
+  else
     ## extract just the version from this tag
-    LATEST_RELEASE_TAG=$(echo "${LATEST_TAGS[$i]}" | sed -E -e 's/tag:|\(|\)|[[:space:]]*//g')
+    LATEST_RELEASE_TAG="$tag"
+    echo "LATEST_RELEASE_TAG: ${LATEST_RELEASE_TAG}"
 
     # check that this only contains a proper semantic version
     if ! [[ ${LATEST_RELEASE_TAG} =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]
@@ -101,28 +156,35 @@ do
       continue
     fi
     echo "The current release with the latest tag is version ${LATEST_RELEASE_TAG}"
-
-    ## remove the sha from the latest tag and split into an array- split at the dot
-    IFS=$'.'
-    LATEST_RELEASE_TAG_SPLIT=(${LATEST_RELEASE_TAG})
-
-    for (( j=0; j<${#THIS_TAG_NAME[@]}; j++ ))
-    do
-      ## if this value is greater than the latest release, then tag it, if it's lower, then stop, if it's
-      ## the same then move on to the next index
-      if [[ ${THIS_TAG_NAME[$j]} -gt ${LATEST_RELEASE_TAG_SPLIT[$j]} ]]
-      then
-        echo "This release tag ${GITHUB_TAG_NAME} is the latest. Tagging it"
-        run_git_tag
-
-      elif [[ ${THIS_TAG_NAME[$j]} -lt ${LATEST_RELEASE_TAG_SPLIT[$j]} ]]
-      then
-        continue
-      fi
+    # Split the version strings into arrays
+    THIS_TAG_NAME_ARRAY=($(split_string "$THIS_TAG_NAME" "."))
+    LATEST_RELEASE_TAG_ARRAY=($(split_string "$LATEST_RELEASE_TAG" "."))
+
+    # Iterate through the components of the version strings
+    for (( j=0; j<${#THIS_TAG_NAME_ARRAY[@]}; j++ )); do
+        echo "Comparing ${THIS_TAG_NAME_ARRAY[$j]} to ${LATEST_RELEASE_TAG_ARRAY[$j]}"
+        if [[ $((THIS_TAG_NAME_ARRAY[$j])) > $((LATEST_RELEASE_TAG_ARRAY[$j])) ]]; then
+            compare_result="greater"
+            break
+        elif [[ $((THIS_TAG_NAME_ARRAY[$j])) < $((LATEST_RELEASE_TAG_ARRAY[$j])) ]]; then
+            compare_result="lesser"
+            break
+        fi
     done
   fi
 done
 
-echo "This release tag ${GITHUB_TAG_NAME} is not the latest. Not tagging."
-# if you've gotten this far, then we don't want to run any tags in the next step
-echo "::set-output name=SKIP_TAG::true"
+# Determine the result based on the comparison
+if [[ -z "$compare_result" ]]; then
+    echo "Versions are equal"
+    echo "::set-output name=SKIP_TAG::true"
+elif [[ "$compare_result" == "greater" ]]; then
+    echo "This release tag ${GITHUB_TAG_NAME} is newer than the latest."
+    echo "::set-output name=SKIP_TAG::false"
+    # Add other actions you want to perform for a newer version
+elif [[ "$compare_result" == "lesser" ]]; then
+    echo "This release tag ${GITHUB_TAG_NAME} is older than the latest."
+    echo "This release tag ${GITHUB_TAG_NAME} is not the latest. Not tagging."
+    # if you've gotten this far, then we don't want to run any tags in the next step
+    echo "::set-output name=SKIP_TAG::true"
+fi
diff --git a/tests/unit_tests/fixtures/bash_mock.py b/tests/unit_tests/fixtures/bash_mock.py
new file mode 100644
index 0000000000..91194de6bf
--- /dev/null
+++ b/tests/unit_tests/fixtures/bash_mock.py
@@ -0,0 +1,44 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import subprocess
+
+
+class BashMock:
+    @staticmethod
+    def tag_latest_release(tag):
+        bash_command = f"./scripts/tag_latest_release.sh {tag} --dry-run"
+        result = subprocess.run(
+            bash_command,
+            shell=True,
+            capture_output=True,
+            text=True,
+            env={"TEST_ENV": "true"},
+        )
+        return result
+
+    @staticmethod
+    def docker_build_push(tag, branch):
+        bash_command = f"./scripts/docker_build_push.sh {tag}"
+        result = subprocess.run(
+            bash_command,
+            shell=True,
+            capture_output=True,
+            text=True,
+            env={"TEST_ENV": "true", "GITHUB_REF": f"refs/heads/{branch}"},
+        )
+        return result
diff --git a/tests/unit_tests/scripts/docker_build_push_test.py b/tests/unit_tests/scripts/docker_build_push_test.py
new file mode 100644
index 0000000000..9f4c84318e
--- /dev/null
+++ b/tests/unit_tests/scripts/docker_build_push_test.py
@@ -0,0 +1,44 @@
+import re
+import subprocess
+from unittest import mock
+from unittest.mock import patch
+
+import pytest
+
+from tests.unit_tests.fixtures.bash_mock import BashMock
+
+original_run = subprocess.run
+
+
+def wrapped(*args, **kwargs):
+    return original_run(*args, **kwargs)
+
+
+@pytest.mark.parametrize(
+    "tag, expected_output, branch",
+    [
+        ("1.0.0", "LATEST_TAG is master", "master"),
+        ("2.1.0", "LATEST_TAG is master", "master"),
+        ("2.1.1", "LATEST_TAG is latest", "master"),
+        ("3.0.0", "LATEST_TAG is latest", "master"),
+        ("2.1.0rc1", "LATEST_TAG is 2.1.0", "2.1.0"),
+        ("", "LATEST_TAG is foo", "foo"),
+        ("2.1", "LATEST_TAG is 2.1", "2.1"),
+        ("does_not_exist", "LATEST_TAG is does-not-exist", "does_not_exist"),
+    ],
+)
+def test_tag_latest_release(tag, expected_output, branch):
+    with mock.patch(
+        "tests.unit_tests.fixtures.bash_mock.subprocess.run", wraps=wrapped
+    ) as subprocess_mock:
+        result = BashMock.docker_build_push(tag, branch)
+
+        subprocess_mock.assert_called_once_with(
+            f"./scripts/docker_build_push.sh {tag}",
+            shell=True,
+            capture_output=True,
+            text=True,
+            env={"TEST_ENV": "true", "GITHUB_REF": f"refs/heads/{branch}"},
+        )
+
+        assert re.search(expected_output, result.stdout, re.MULTILINE)
diff --git a/tests/unit_tests/scripts/tag_latest_release_test.py b/tests/unit_tests/scripts/tag_latest_release_test.py
new file mode 100644
index 0000000000..7b15a1670a
--- /dev/null
+++ b/tests/unit_tests/scripts/tag_latest_release_test.py
@@ -0,0 +1,49 @@
+import subprocess
+from unittest import mock
+from unittest.mock import patch
+
+import pytest
+
+from tests.unit_tests.fixtures.bash_mock import BashMock
+
+original_run = subprocess.run
+
+
+def wrapped(*args, **kwargs):
+    return original_run(*args, **kwargs)
+
+
+@pytest.mark.parametrize(
+    "tag, expected_output",
+    [
+        ("1.0.0", "This release tag 1.0.0 is older than the latest."),
+        ("2.1.0", "Versions are equal\n::set-output name=SKIP_TAG::true"),
+        ("2.1.1", "This release tag 2.1.1 is newer than the latest."),
+        ("3.0.0", "This release tag 3.0.0 is newer than the latest."),
+        ("2.1.0rc1", "This tag 2.1.0rc1 is not a valid release version. Not tagging."),
+        (
+            "",
+            "Missing tag parameter, usage: ./scripts/tag_latest_release.sh <GITHUB_TAG_NAME>",
+        ),
+        ("2.1", "This tag 2.1 is not a valid release version. Not tagging."),
+        (
+            "does_not_exist",
+            "The tag does_not_exist does not exist. Please use a different tag.\n::set-output name=SKIP_TAG::true",
+        ),
+    ],
+)
+def test_tag_latest_release(tag, expected_output):
+    with mock.patch(
+        "tests.unit_tests.fixtures.bash_mock.subprocess.run", wraps=wrapped
+    ) as subprocess_mock:
+        result = BashMock.tag_latest_release(tag)
+
+        subprocess_mock.assert_called_once_with(
+            f"./scripts/tag_latest_release.sh {tag} --dry-run",
+            shell=True,
+            capture_output=True,
+            text=True,
+            env={"TEST_ENV": "true"},
+        )
+
+        assert expected_output in result.stdout


[superset] 04/07: chore: remove CssTemplate and Annotation access from gamma role (#24826)

Posted by el...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

elizabeth pushed a commit to branch 2.1
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 95873007bbb80547afdc585373566ca4860b8f9a
Author: Lily Kuang <li...@preset.io>
AuthorDate: Thu Aug 24 16:39:56 2023 -0700

    chore: remove CssTemplate and Annotation access from gamma role (#24826)
---
 superset/security/manager.py              | 22 +++++++++++-----------
 tests/integration_tests/security_tests.py |  3 ---
 2 files changed, 11 insertions(+), 14 deletions(-)

diff --git a/superset/security/manager.py b/superset/security/manager.py
index 6f3a4b90bf..391704c41b 100644
--- a/superset/security/manager.py
+++ b/superset/security/manager.py
@@ -167,8 +167,6 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
     }
 
     GAMMA_READ_ONLY_MODEL_VIEWS = {
-        "Annotation",
-        "CssTemplate",
         "Dataset",
         "Datasource",
     } | READ_ONLY_MODEL_VIEWS
@@ -191,19 +189,21 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
     } | USER_MODEL_VIEWS
 
     ALPHA_ONLY_VIEW_MENUS = {
-        "Manage",
-        "CSS Templates",
-        "Annotation Layers",
-        "Queries",
-        "Import dashboards",
-        "Upload a CSV",
-        "ReportSchedule",
         "Alerts & Report",
-        "TableSchemaView",
-        "CsvToDatabaseView",
+        "Annotation Layers",
+        "Annotation",
+        "CSS Templates",
         "ColumnarToDatabaseView",
+        "CssTemplate",
+        "CsvToDatabaseView",
         "ExcelToDatabaseView",
+        "Import dashboards",
         "ImportExportRestApi",
+        "Manage",
+        "Queries",
+        "ReportSchedule",
+        "TableSchemaView",
+        "Upload a CSV",
     }
 
     ADMIN_ONLY_PERMISSIONS = {
diff --git a/tests/integration_tests/security_tests.py b/tests/integration_tests/security_tests.py
index c65f5a6dd8..55fc1f2dad 100644
--- a/tests/integration_tests/security_tests.py
+++ b/tests/integration_tests/security_tests.py
@@ -1345,7 +1345,6 @@ class TestRolePermission(SupersetTestCase):
         self.assert_cannot_menu("Alerts & Report", perm_set)
 
     def assert_can_gamma(self, perm_set):
-        self.assert_can_read("CssTemplate", perm_set)
         self.assert_can_read("Dataset", perm_set)
 
         # make sure that user can create slices and dashboards
@@ -1552,8 +1551,6 @@ class TestRolePermission(SupersetTestCase):
         # make sure that user can create slices and dashboards
         self.assert_can_all("Dashboard", gamma_perm_set)
         self.assert_can_read("Dataset", gamma_perm_set)
-        self.assert_can_read("Annotation", gamma_perm_set)
-        self.assert_can_read("CssTemplate", gamma_perm_set)
 
         # make sure that user can create slices and dashboards
         self.assert_can_all("Chart", gamma_perm_set)


[superset] 02/07: fix: SSH Tunnel creation with dynamic form (#24196)

Posted by el...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

elizabeth pushed a commit to branch 2.1
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 2867f057d25ab3bc08742d8a7475c72085dc384e
Author: Hugh A. Miles II <hu...@gmail.com>
AuthorDate: Sun Jul 2 23:48:48 2023 -0400

    fix: SSH Tunnel creation with dynamic form (#24196)
    
    (cherry picked from commit 226c7f807dd70239691dc3baaa4d4276a6a4f7c4)
---
 .../cypress/integration/database/modal.test.ts     |   8 +-
 superset-frontend/src/types/Database.ts            |   1 +
 .../DatabaseConnectionForm/CommonParameters.tsx    |  31 +++
 .../DatabaseModal/DatabaseConnectionForm/index.tsx |   3 +
 .../data/database/DatabaseModal/index.test.tsx     |   4 +
 .../CRUD/data/database/DatabaseModal/index.tsx     |  30 ++-
 .../src/views/CRUD/data/database/types.ts          |   1 +
 superset-frontend/src/views/CRUD/hooks.ts          | 209 +++++++++++----------
 superset/db_engine_specs/base.py                   |   4 +
 tests/integration_tests/databases/api_tests.py     |  12 ++
 .../db_engine_specs/postgres_tests.py              |   4 +
 11 files changed, 184 insertions(+), 123 deletions(-)

diff --git a/superset-frontend/cypress-base/cypress/integration/database/modal.test.ts b/superset-frontend/cypress-base/cypress/integration/database/modal.test.ts
index a3260250aa..3cc34cb64f 100644
--- a/superset-frontend/cypress-base/cypress/integration/database/modal.test.ts
+++ b/superset-frontend/cypress-base/cypress/integration/database/modal.test.ts
@@ -62,8 +62,8 @@ describe('Add database', () => {
   it('show error alerts on dynamic form for bad host', () => {
     // click postgres dynamic form
     cy.get('.preferred > :nth-child(1)').click();
-    cy.get('input[name="host"]').focus().type('badhost');
-    cy.get('input[name="port"]').focus().type('5432');
+    cy.get('input[name="host"]').focus().type('badhost', { force: true });
+    cy.get('input[name="port"]').focus().type('5432', { force: true });
     cy.get('.ant-form-item-explain-error').contains(
       "The hostname provided can't be resolved",
     );
@@ -72,8 +72,8 @@ describe('Add database', () => {
   it('show error alerts on dynamic form for bad port', () => {
     // click postgres dynamic form
     cy.get('.preferred > :nth-child(1)').click();
-    cy.get('input[name="host"]').focus().type('localhost');
-    cy.get('input[name="port"]').focus().type('123');
+    cy.get('input[name="host"]').focus().type('localhost', { force: true });
+    cy.get('input[name="port"]').focus().type('123', { force: true });
     cy.get('input[name="database"]').focus();
     cy.get('.ant-form-item-explain-error').contains('The port is closed');
   });
diff --git a/superset-frontend/src/types/Database.ts b/superset-frontend/src/types/Database.ts
index c4491dbb99..575d69e2f2 100644
--- a/superset-frontend/src/types/Database.ts
+++ b/superset-frontend/src/types/Database.ts
@@ -27,4 +27,5 @@ export default interface Database {
   server_cert: string;
   sqlalchemy_uri: string;
   catalog: object;
+  parameters: any;
 }
diff --git a/superset-frontend/src/views/CRUD/data/database/DatabaseModal/DatabaseConnectionForm/CommonParameters.tsx b/superset-frontend/src/views/CRUD/data/database/DatabaseModal/DatabaseConnectionForm/CommonParameters.tsx
index 99a414012b..7078b49cbc 100644
--- a/superset-frontend/src/views/CRUD/data/database/DatabaseModal/DatabaseConnectionForm/CommonParameters.tsx
+++ b/superset-frontend/src/views/CRUD/data/database/DatabaseModal/DatabaseConnectionForm/CommonParameters.tsx
@@ -17,6 +17,7 @@
  * under the License.
  */
 import React from 'react';
+import { isEmpty } from 'lodash';
 import { SupersetTheme, t } from '@superset-ui/core';
 import { AntdSwitch } from 'src/components';
 import InfoTooltip from 'src/components/InfoTooltip';
@@ -250,3 +251,33 @@ export const forceSSLField = ({
     />
   </div>
 );
+
+export const SSHTunnelSwitch = ({
+  isEditMode,
+  changeMethods,
+  db,
+}: FieldPropTypes) => (
+  <div css={(theme: SupersetTheme) => infoTooltip(theme)}>
+    <AntdSwitch
+      disabled={isEditMode && !isEmpty(db?.ssh_tunnel)}
+      checked={db?.parameters?.ssh}
+      onChange={changed => {
+        changeMethods.onParametersChange({
+          target: {
+            type: 'toggle',
+            name: 'ssh',
+            checked: true,
+            value: changed,
+          },
+        });
+      }}
+      data-test="ssh-tunnel-switch"
+    />
+    <span css={toggleStyle}>{t('SSH Tunnel')}</span>
+    <InfoTooltip
+      tooltip={t('SSH Tunnel configuration parameters')}
+      placement="right"
+      viewBox="0 -5 24 24"
+    />
+  </div>
+);
diff --git a/superset-frontend/src/views/CRUD/data/database/DatabaseModal/DatabaseConnectionForm/index.tsx b/superset-frontend/src/views/CRUD/data/database/DatabaseModal/DatabaseConnectionForm/index.tsx
index f6284ae67d..5dce73206f 100644
--- a/superset-frontend/src/views/CRUD/data/database/DatabaseModal/DatabaseConnectionForm/index.tsx
+++ b/superset-frontend/src/views/CRUD/data/database/DatabaseModal/DatabaseConnectionForm/index.tsx
@@ -31,6 +31,7 @@ import {
   portField,
   queryField,
   usernameField,
+  SSHTunnelSwitch,
 } from './CommonParameters';
 import { validatedInputField } from './ValidatedInputField';
 import { EncryptedField } from './EncryptedField';
@@ -55,6 +56,7 @@ export const FormFieldOrder = [
   'account',
   'warehouse',
   'role',
+  'ssh',
 ];
 
 export interface FieldPropTypes {
@@ -102,6 +104,7 @@ const FORM_FIELD_MAP = {
   warehouse: validatedInputField,
   role: validatedInputField,
   account: validatedInputField,
+  ssh: SSHTunnelSwitch,
 };
 
 interface DatabaseConnectionFormProps {
diff --git a/superset-frontend/src/views/CRUD/data/database/DatabaseModal/index.test.tsx b/superset-frontend/src/views/CRUD/data/database/DatabaseModal/index.test.tsx
index 32cc16b04f..cc5eda7d06 100644
--- a/superset-frontend/src/views/CRUD/data/database/DatabaseModal/index.test.tsx
+++ b/superset-frontend/src/views/CRUD/data/database/DatabaseModal/index.test.tsx
@@ -127,6 +127,10 @@ fetchMock.mock(AVAILABLE_DB_ENDPOINT, {
             description: 'Additional parameters',
             type: 'object',
           },
+          ssh: {
+            description: 'Create SSH Tunnel',
+            type: 'boolean',
+          },
           username: {
             description: 'Username',
             nullable: true,
diff --git a/superset-frontend/src/views/CRUD/data/database/DatabaseModal/index.tsx b/superset-frontend/src/views/CRUD/data/database/DatabaseModal/index.tsx
index fdc41e9e22..97bd916fd6 100644
--- a/superset-frontend/src/views/CRUD/data/database/DatabaseModal/index.tsx
+++ b/superset-frontend/src/views/CRUD/data/database/DatabaseModal/index.tsx
@@ -706,15 +706,18 @@ const DatabaseModal: FunctionComponent<DatabaseModalProps> = ({
         });
       }
 
-      // make sure that button spinner animates
-      setLoading(true);
-      const errors = await getValidation(dbToUpdate, true);
-      if ((validationErrors && !isEmpty(validationErrors)) || errors) {
+      // only do validation for non ssh tunnel connections
+      if (!dbToUpdate?.ssh_tunnel) {
+        // make sure that button spinner animates
+        setLoading(true);
+        const errors = await getValidation(dbToUpdate, true);
+        if ((validationErrors && !isEmpty(validationErrors)) || errors) {
+          setLoading(false);
+          return;
+        }
+        // end spinner animation
         setLoading(false);
-        return;
       }
-      setLoading(false);
-      // end spinner animation
 
       const parameters_schema = isEditMode
         ? dbToUpdate.parameters_schema?.properties
@@ -1431,18 +1434,7 @@ const DatabaseModal: FunctionComponent<DatabaseModalProps> = ({
         validationErrors={validationErrors}
         getPlaceholder={getPlaceholder}
       />
-      <SSHTunnelContainer>
-        <SSHTunnelSwitchComponent
-          isEditMode={isEditMode}
-          dbFetched={dbFetched}
-          disableSSHTunnelingForEngine={disableSSHTunnelingForEngine}
-          useSSHTunneling={useSSHTunneling}
-          setUseSSHTunneling={setUseSSHTunneling}
-          setDB={setDB}
-          isSSHTunneling={isSSHTunneling}
-        />
-      </SSHTunnelContainer>
-      {useSSHTunneling && (
+      {db?.parameters?.ssh && (
         <SSHTunnelContainer>{renderSSHTunnelForm()}</SSHTunnelContainer>
       )}
     </>
diff --git a/superset-frontend/src/views/CRUD/data/database/types.ts b/superset-frontend/src/views/CRUD/data/database/types.ts
index c347948f7e..a6f04c97af 100644
--- a/superset-frontend/src/views/CRUD/data/database/types.ts
+++ b/superset-frontend/src/views/CRUD/data/database/types.ts
@@ -68,6 +68,7 @@ export type DatabaseObject = {
     warehouse?: string;
     role?: string;
     account?: string;
+    ssh?: boolean;
   };
 
   // Performance
diff --git a/superset-frontend/src/views/CRUD/hooks.ts b/superset-frontend/src/views/CRUD/hooks.ts
index 80a6c4793b..8cc03b3115 100644
--- a/superset-frontend/src/views/CRUD/hooks.ts
+++ b/superset-frontend/src/views/CRUD/hooks.ts
@@ -692,123 +692,132 @@ export function useDatabaseValidation() {
     null,
   );
   const getValidation = useCallback(
-    (database: Partial<DatabaseObject> | null, onCreate = false) =>
-      SupersetClient.post({
-        endpoint: '/api/v1/database/validate_parameters/',
-        body: JSON.stringify(transformDB(database)),
-        headers: { 'Content-Type': 'application/json' },
-      })
-        .then(() => {
-          setValidationErrors(null);
+    (database: Partial<DatabaseObject> | null, onCreate = false) => {
+      if (database?.parameters?.ssh) {
+        // when ssh tunnel is enabled we don't want to render any validation errors
+        setValidationErrors(null);
+        return [];
+      }
+
+      return (
+        SupersetClient.post({
+          endpoint: '/api/v1/database/validate_parameters/',
+          body: JSON.stringify(transformDB(database)),
+          headers: { 'Content-Type': 'application/json' },
         })
-        // eslint-disable-next-line consistent-return
-        .catch(e => {
-          if (typeof e.json === 'function') {
-            return e.json().then(({ errors = [] }: JsonObject) => {
-              const parsedErrors = errors
-                .filter((error: { error_type: string }) => {
-                  const skipValidationError = ![
-                    'CONNECTION_MISSING_PARAMETERS_ERROR',
-                    'CONNECTION_ACCESS_DENIED_ERROR',
-                  ].includes(error.error_type);
-                  return skipValidationError || onCreate;
-                })
-                .reduce(
-                  (
-                    obj: {},
-                    {
-                      error_type,
-                      extra,
-                      message,
-                    }: {
-                      error_type: string;
-                      extra: {
-                        invalid?: string[];
-                        missing?: string[];
-                        name: string;
-                        catalog: {
+          .then(() => {
+            setValidationErrors(null);
+          })
+          // eslint-disable-next-line consistent-return
+          .catch(e => {
+            if (typeof e.json === 'function') {
+              return e.json().then(({ errors = [] }: JsonObject) => {
+                const parsedErrors = errors
+                  .filter((error: { error_type: string }) => {
+                    const skipValidationError = ![
+                      'CONNECTION_MISSING_PARAMETERS_ERROR',
+                      'CONNECTION_ACCESS_DENIED_ERROR',
+                    ].includes(error.error_type);
+                    return skipValidationError || onCreate;
+                  })
+                  .reduce(
+                    (
+                      obj: {},
+                      {
+                        error_type,
+                        extra,
+                        message,
+                      }: {
+                        error_type: string;
+                        extra: {
+                          invalid?: string[];
+                          missing?: string[];
                           name: string;
-                          url: string;
-                          idx: number;
+                          catalog: {
+                            name: string;
+                            url: string;
+                            idx: number;
+                          };
+                          issue_codes?: {
+                            code?: number;
+                            message?: string;
+                          }[];
                         };
-                        issue_codes?: {
-                          code?: number;
-                          message?: string;
-                        }[];
-                      };
-                      message: string;
-                    },
-                  ) => {
-                    if (extra.catalog) {
-                      if (extra.catalog.name) {
+                        message: string;
+                      },
+                    ) => {
+                      if (extra.catalog) {
+                        if (extra.catalog.name) {
+                          return {
+                            ...obj,
+                            error_type,
+                            [extra.catalog.idx]: {
+                              name: message,
+                            },
+                          };
+                        }
+                        if (extra.catalog.url) {
+                          return {
+                            ...obj,
+                            error_type,
+                            [extra.catalog.idx]: {
+                              url: message,
+                            },
+                          };
+                        }
+
                         return {
                           ...obj,
                           error_type,
                           [extra.catalog.idx]: {
                             name: message,
+                            url: message,
                           },
                         };
                       }
-                      if (extra.catalog.url) {
+                      // if extra.invalid doesn't exist then the
+                      // error can't be mapped to a parameter
+                      // so leave it alone
+                      if (extra.invalid) {
                         return {
                           ...obj,
+                          [extra.invalid[0]]: message,
                           error_type,
-                          [extra.catalog.idx]: {
-                            url: message,
-                          },
+                        };
+                      }
+                      if (extra.missing) {
+                        return {
+                          ...obj,
+                          error_type,
+                          ...Object.assign(
+                            {},
+                            ...extra.missing.map(field => ({
+                              [field]: 'This is a required field',
+                            })),
+                          ),
+                        };
+                      }
+                      if (extra.issue_codes?.length) {
+                        return {
+                          ...obj,
+                          error_type,
+                          description: message || extra.issue_codes[0]?.message,
                         };
                       }
 
-                      return {
-                        ...obj,
-                        error_type,
-                        [extra.catalog.idx]: {
-                          name: message,
-                          url: message,
-                        },
-                      };
-                    }
-                    // if extra.invalid doesn't exist then the
-                    // error can't be mapped to a parameter
-                    // so leave it alone
-                    if (extra.invalid) {
-                      return {
-                        ...obj,
-                        [extra.invalid[0]]: message,
-                        error_type,
-                      };
-                    }
-                    if (extra.missing) {
-                      return {
-                        ...obj,
-                        error_type,
-                        ...Object.assign(
-                          {},
-                          ...extra.missing.map(field => ({
-                            [field]: 'This is a required field',
-                          })),
-                        ),
-                      };
-                    }
-                    if (extra.issue_codes?.length) {
-                      return {
-                        ...obj,
-                        error_type,
-                        description: message || extra.issue_codes[0]?.message,
-                      };
-                    }
-
-                    return obj;
-                  },
-                  {},
-                );
-              setValidationErrors(parsedErrors);
-              return parsedErrors;
-            });
-          }
-          // eslint-disable-next-line no-console
-          console.error(e);
-        }),
+                      return obj;
+                    },
+                    {},
+                  );
+                setValidationErrors(parsedErrors);
+                return parsedErrors;
+              });
+            }
+            // eslint-disable-next-line no-console
+            console.error(e);
+          })
+      );
+    },
     [setValidationErrors],
   );
 
diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py
index b789bbe70c..63a8044612 100644
--- a/superset/db_engine_specs/base.py
+++ b/superset/db_engine_specs/base.py
@@ -1773,6 +1773,10 @@ class BasicParametersSchema(Schema):
     encryption = fields.Boolean(
         required=False, description=__("Use an encrypted connection to the database")
     )
+    ssh = fields.Boolean(
+        required=False,
+        metadata={"description": __("Use an ssh tunnel connection to the database")},
+    )
 
 
 class BasicParametersType(TypedDict, total=False):
diff --git a/tests/integration_tests/databases/api_tests.py b/tests/integration_tests/databases/api_tests.py
index f4968edae9..79c1319d59 100644
--- a/tests/integration_tests/databases/api_tests.py
+++ b/tests/integration_tests/databases/api_tests.py
@@ -2437,6 +2437,10 @@ class TestDatabaseApi(SupersetTestCase):
                                 "description": "Additional parameters",
                                 "type": "object",
                             },
+                            "ssh": {
+                                "description": "Use an ssh tunnel connection to the database",
+                                "type": "boolean",
+                            },
                             "username": {
                                 "description": "Username",
                                 "nullable": True,
@@ -2512,6 +2516,10 @@ class TestDatabaseApi(SupersetTestCase):
                                 "description": "Additional parameters",
                                 "type": "object",
                             },
+                            "ssh": {
+                                "description": "Use an ssh tunnel connection to the database",
+                                "type": "boolean",
+                            },
                             "username": {
                                 "description": "Username",
                                 "nullable": True,
@@ -2587,6 +2595,10 @@ class TestDatabaseApi(SupersetTestCase):
                                 "description": "Additional parameters",
                                 "type": "object",
                             },
+                            "ssh": {
+                                "description": "Use an ssh tunnel connection to the database",
+                                "type": "boolean",
+                            },
                             "username": {
                                 "description": "Username",
                                 "nullable": True,
diff --git a/tests/integration_tests/db_engine_specs/postgres_tests.py b/tests/integration_tests/db_engine_specs/postgres_tests.py
index a6145432c2..260b5a2f95 100644
--- a/tests/integration_tests/db_engine_specs/postgres_tests.py
+++ b/tests/integration_tests/db_engine_specs/postgres_tests.py
@@ -511,6 +511,10 @@ def test_base_parameters_mixin():
                 "description": "Additional parameters",
                 "additionalProperties": {},
             },
+            "ssh": {
+                "description": "Use an ssh tunnel connection to the database",
+                "type": "boolean",
+            },
         },
         "required": ["database", "host", "port", "username"],
     }


[superset] 03/07: fix: validation errors appearing after ssh tunnel switch (#24849)

Posted by el...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

elizabeth pushed a commit to branch 2.1
in repository https://gitbox.apache.org/repos/asf/superset.git

commit ad90832fd1e08d74c683a8af987826da7048faa9
Author: Hugh A. Miles II <hu...@gmail.com>
AuthorDate: Wed Aug 2 17:41:37 2023 -0400

    fix: validation errors appearing after ssh tunnel switch (#24849)
    
    (cherry picked from commit b71541fb7fb1bdfd3e1eea59ee76de1f51e67e6b)
---
 .../DatabaseModal/DatabaseConnectionForm/CommonParameters.tsx         | 3 +++
 .../CRUD/data/database/DatabaseModal/DatabaseConnectionForm/index.tsx | 4 ++++
 .../src/views/CRUD/data/database/DatabaseModal/index.tsx              | 2 +-
 3 files changed, 8 insertions(+), 1 deletion(-)

diff --git a/superset-frontend/src/views/CRUD/data/database/DatabaseModal/DatabaseConnectionForm/CommonParameters.tsx b/superset-frontend/src/views/CRUD/data/database/DatabaseModal/DatabaseConnectionForm/CommonParameters.tsx
index 7078b49cbc..7b52eab26c 100644
--- a/superset-frontend/src/views/CRUD/data/database/DatabaseModal/DatabaseConnectionForm/CommonParameters.tsx
+++ b/superset-frontend/src/views/CRUD/data/database/DatabaseModal/DatabaseConnectionForm/CommonParameters.tsx
@@ -49,6 +49,7 @@ export const hostField = ({
     onChange={changeMethods.onParametersChange}
   />
 );
+
 export const portField = ({
   required,
   changeMethods,
@@ -255,6 +256,7 @@ export const forceSSLField = ({
 export const SSHTunnelSwitch = ({
   isEditMode,
   changeMethods,
+  clearValidationErrors,
   db,
 }: FieldPropTypes) => (
   <div css={(theme: SupersetTheme) => infoTooltip(theme)}>
@@ -270,6 +272,7 @@ export const SSHTunnelSwitch = ({
             value: changed,
           },
         });
+        clearValidationErrors();
       }}
       data-test="ssh-tunnel-switch"
     />
diff --git a/superset-frontend/src/views/CRUD/data/database/DatabaseModal/DatabaseConnectionForm/index.tsx b/superset-frontend/src/views/CRUD/data/database/DatabaseModal/DatabaseConnectionForm/index.tsx
index 5dce73206f..e747b3c895 100644
--- a/superset-frontend/src/views/CRUD/data/database/DatabaseModal/DatabaseConnectionForm/index.tsx
+++ b/superset-frontend/src/views/CRUD/data/database/DatabaseModal/DatabaseConnectionForm/index.tsx
@@ -79,6 +79,7 @@ export interface FieldPropTypes {
   };
   validationErrors: JsonObject | null;
   getValidation: () => void;
+  clearValidationErrors: () => void;
   db?: DatabaseObject;
   field: string;
   isEditMode?: boolean;
@@ -132,6 +133,7 @@ interface DatabaseConnectionFormProps {
   onRemoveTableCatalog: (idx: number) => void;
   validationErrors: JsonObject | null;
   getValidation: () => void;
+  clearValidationErrors: () => void;
   getPlaceholder?: (field: string) => string | undefined;
 }
 
@@ -151,6 +153,7 @@ const DatabaseConnectionForm = ({
   onRemoveTableCatalog,
   sslForced,
   validationErrors,
+  clearValidationErrors,
 }: DatabaseConnectionFormProps) => (
   <Form>
     <div
@@ -179,6 +182,7 @@ const DatabaseConnectionForm = ({
             },
             validationErrors,
             getValidation,
+            clearValidationErrors,
             db,
             key: field,
             field,
diff --git a/superset-frontend/src/views/CRUD/data/database/DatabaseModal/index.tsx b/superset-frontend/src/views/CRUD/data/database/DatabaseModal/index.tsx
index 97bd916fd6..63c38e3e10 100644
--- a/superset-frontend/src/views/CRUD/data/database/DatabaseModal/index.tsx
+++ b/superset-frontend/src/views/CRUD/data/database/DatabaseModal/index.tsx
@@ -267,7 +267,6 @@ export function dbReducer(
       };
     case ActionType.extraInputChange:
       // "extra" payload in state is a string
-
       if (
         action.payload.name === 'schema_cache_timeout' ||
         action.payload.name === 'table_cache_timeout'
@@ -1433,6 +1432,7 @@ const DatabaseModal: FunctionComponent<DatabaseModalProps> = ({
         getValidation={() => getValidation(db)}
         validationErrors={validationErrors}
         getPlaceholder={getPlaceholder}
+        clearValidationErrors={() => setValidationErrors(null)}
       />
       {db?.parameters?.ssh && (
         <SSHTunnelContainer>{renderSSHTunnelForm()}</SSHTunnelContainer>