You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@superset.apache.org by mi...@apache.org on 2023/01/06 16:36:32 UTC

[superset] branch 1.5 updated (649a0b3fc1 -> 0adbf9c052)

This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a change to branch 1.5
in repository https://gitbox.apache.org/repos/asf/superset.git


    omit 649a0b3fc1 chore: Merge adjustments
     new 0adbf9c052 chore: Merge adjustments

This update added new revisions after undoing existing revisions.
That is to say, some revisions that were in the old version of the
branch are not in the new version.  This situation occurs
when a user --force pushes a change and generates a repository
containing something like this:

 * -- * -- B -- O -- O -- O   (649a0b3fc1)
            \
             N -- N -- N   refs/heads/1.5 (0adbf9c052)

You should already have received notification emails for all of the O
revisions, and so the following emails describe only the N revisions
from the common base, B.

Any revisions marked "omit" are not gone; other references still
refer to them.  Any revisions marked "discard" are gone forever.

The 1 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../cypress/integration/explore/control.test.ts    |  2 +-
 tests/integration_tests/csv_upload_tests.py        | 43 ++++++++++++----------
 2 files changed, 25 insertions(+), 20 deletions(-)


[superset] 01/01: chore: Merge adjustments

Posted by mi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch 1.5
in repository https://gitbox.apache.org/repos/asf/superset.git

commit 0adbf9c052238f906043bf73904b58233216a2a6
Author: Michael S. Molina <mi...@gmail.com>
AuthorDate: Thu Jan 5 16:03:56 2023 -0500

    chore: Merge adjustments
---
 .../cypress/integration/explore/control.test.ts    |  2 +-
 .../views/CRUD/data/database/DatabaseList.test.jsx |  2 +
 tests/integration_tests/csv_upload_tests.py        | 43 ++++++++++++----------
 tests/unit_tests/conftest.py                       |  2 +-
 4 files changed, 28 insertions(+), 21 deletions(-)

diff --git a/superset-frontend/cypress-base/cypress/integration/explore/control.test.ts b/superset-frontend/cypress-base/cypress/integration/explore/control.test.ts
index f8254ac9bb..5537b59522 100644
--- a/superset-frontend/cypress-base/cypress/integration/explore/control.test.ts
+++ b/superset-frontend/cypress-base/cypress/integration/explore/control.test.ts
@@ -122,7 +122,7 @@ describe('Test datatable', () => {
   });
   it('Data Pane opens and loads results', () => {
     cy.get('[data-test="data-tab"]').click();
-    cy.get('[data-test="row-count-label"]').contains('26 rows retrieved');
+    cy.get('[data-test="row-count-label"]').contains('25 rows retrieved');
     cy.contains('View results');
     cy.get('.ant-empty-description').should('not.exist');
   });
diff --git a/superset-frontend/src/views/CRUD/data/database/DatabaseList.test.jsx b/superset-frontend/src/views/CRUD/data/database/DatabaseList.test.jsx
index 964adc64d5..b71a13ffeb 100644
--- a/superset-frontend/src/views/CRUD/data/database/DatabaseList.test.jsx
+++ b/superset-frontend/src/views/CRUD/data/database/DatabaseList.test.jsx
@@ -82,6 +82,8 @@ const mockUser = {
   userId: 1,
 };
 
+const userSelectorMock = jest.spyOn(redux, 'useSelector');
+
 fetchMock.get(databasesInfoEndpoint, {
   permissions: ['can_write'],
 });
diff --git a/tests/integration_tests/csv_upload_tests.py b/tests/integration_tests/csv_upload_tests.py
index d5da25c38c..c0a47f7d2b 100644
--- a/tests/integration_tests/csv_upload_tests.py
+++ b/tests/integration_tests/csv_upload_tests.py
@@ -205,6 +205,14 @@ def mock_upload_to_s3(filename: str, upload_prefix: str, table: Table) -> str:
     return dest_dir
 
 
+def escaped_double_quotes(text):
+    return f"&#34;{text}&#34;"
+
+
+def escaped_parquet(text):
+    return escaped_double_quotes(f"[&#39;{text}&#39;]")
+
+
 @pytest.mark.usefixtures("setup_csv_upload")
 @pytest.mark.usefixtures("create_csv_files")
 @mock.patch(
@@ -226,11 +234,11 @@ def test_import_csv_enforced_schema(mock_event_logger):
     # no schema specified, fail upload
     resp = upload_csv(CSV_FILENAME1, CSV_UPLOAD_TABLE_W_SCHEMA, extra={"schema": None})
     assert (
-        f'Database "{CSV_UPLOAD_DATABASE}" schema "None" is not allowed for csv uploads'
+        f'Database {escaped_double_quotes(CSV_UPLOAD_DATABASE)} schema {escaped_double_quotes("None")} is not allowed for csv uploads'
         in resp
     )
 
-    success_msg = f'CSV file "{CSV_FILENAME1}" uploaded to table "{full_table_name}"'
+    success_msg = f"CSV file {escaped_double_quotes(CSV_FILENAME1)} uploaded to table {escaped_double_quotes(full_table_name)}"
     resp = upload_csv(
         CSV_FILENAME1,
         CSV_UPLOAD_TABLE_W_SCHEMA,
@@ -255,7 +263,7 @@ def test_import_csv_enforced_schema(mock_event_logger):
         CSV_FILENAME1, CSV_UPLOAD_TABLE_W_SCHEMA, extra={"schema": "gold"}
     )
     assert (
-        f'Database "{CSV_UPLOAD_DATABASE}" schema "gold" is not allowed for csv uploads'
+        f'Database {escaped_double_quotes(CSV_UPLOAD_DATABASE)} schema {escaped_double_quotes("gold")} is not allowed for csv uploads'
         in resp
     )
 
@@ -283,7 +291,10 @@ def test_import_csv_explore_database(setup_csv_upload, create_csv_files):
         pytest.skip("Sqlite doesn't support schema / database creation")
 
     resp = upload_csv(CSV_FILENAME1, CSV_UPLOAD_TABLE_W_EXPLORE)
-    assert f'CSV file "{CSV_FILENAME1}" uploaded to table "{full_table_name}"' in resp
+    assert (
+        f"CSV file {escaped_double_quotes(CSV_FILENAME1)} uploaded to table {escaped_double_quotes(full_table_name)}"
+        in resp
+    )
     table = SupersetTestCase.get_table(name=CSV_UPLOAD_TABLE_W_EXPLORE)
     assert table.database_id == superset.utils.database.get_example_database().id
 
@@ -295,7 +306,7 @@ def test_import_csv_explore_database(setup_csv_upload, create_csv_files):
 def test_import_csv(mock_event_logger):
     schema = utils.get_example_default_schema()
     full_table_name = f"{schema}.{CSV_UPLOAD_TABLE}" if schema else CSV_UPLOAD_TABLE
-    success_msg_f1 = f'CSV file "{CSV_FILENAME1}" uploaded to table "{full_table_name}"'
+    success_msg_f1 = f"CSV file {escaped_double_quotes(CSV_FILENAME1)} uploaded to table {escaped_double_quotes(full_table_name)}"
 
     test_db = get_upload_db()
 
@@ -304,9 +315,7 @@ def test_import_csv(mock_event_logger):
     assert success_msg_f1 in resp
 
     # upload again with fail mode; should fail
-    fail_msg = (
-        f'Unable to upload CSV file "{CSV_FILENAME1}" to table "{CSV_UPLOAD_TABLE}"'
-    )
+    fail_msg = f"Unable to upload CSV file {escaped_double_quotes(CSV_FILENAME1)} to table {escaped_double_quotes(CSV_UPLOAD_TABLE)}"
     resp = upload_csv(CSV_FILENAME1, CSV_UPLOAD_TABLE)
     assert fail_msg in resp
 
@@ -341,14 +350,12 @@ def test_import_csv(mock_event_logger):
 
     # try to append to table from file with different schema
     resp = upload_csv(CSV_FILENAME2, CSV_UPLOAD_TABLE, extra={"if_exists": "append"})
-    fail_msg_f2 = (
-        f'Unable to upload CSV file "{CSV_FILENAME2}" to table "{CSV_UPLOAD_TABLE}"'
-    )
+    fail_msg_f2 = f"Unable to upload CSV file {escaped_double_quotes(CSV_FILENAME2)} to table {escaped_double_quotes(CSV_UPLOAD_TABLE)}"
     assert fail_msg_f2 in resp
 
     # replace table from file with different schema
     resp = upload_csv(CSV_FILENAME2, CSV_UPLOAD_TABLE, extra={"if_exists": "replace"})
-    success_msg_f2 = f'CSV file "{CSV_FILENAME2}" uploaded to table "{full_table_name}"'
+    success_msg_f2 = f"CSV file {escaped_double_quotes(CSV_FILENAME2)} uploaded to table {escaped_double_quotes(full_table_name)}"
     assert success_msg_f2 in resp
 
     table = SupersetTestCase.get_table(name=CSV_UPLOAD_TABLE)
@@ -388,7 +395,7 @@ def test_import_excel(mock_event_logger):
     full_table_name = f"{schema}.{EXCEL_UPLOAD_TABLE}" if schema else EXCEL_UPLOAD_TABLE
     test_db = get_upload_db()
 
-    success_msg = f'Excel file "{EXCEL_FILENAME}" uploaded to table "{full_table_name}"'
+    success_msg = f"Excel file {escaped_double_quotes(EXCEL_FILENAME)} uploaded to table {escaped_double_quotes(full_table_name)}"
 
     # initial upload with fail mode
     resp = upload_excel(EXCEL_FILENAME, EXCEL_UPLOAD_TABLE)
@@ -405,7 +412,7 @@ def test_import_excel(mock_event_logger):
     assert security_manager.find_user("admin") in table.owners
 
     # upload again with fail mode; should fail
-    fail_msg = f'Unable to upload Excel file "{EXCEL_FILENAME}" to table "{EXCEL_UPLOAD_TABLE}"'
+    fail_msg = f"Unable to upload Excel file {escaped_double_quotes(EXCEL_FILENAME)} to table {escaped_double_quotes(EXCEL_UPLOAD_TABLE)}"
     resp = upload_excel(EXCEL_FILENAME, EXCEL_UPLOAD_TABLE)
     assert fail_msg in resp
 
@@ -451,14 +458,14 @@ def test_import_parquet(mock_event_logger):
     )
     test_db = get_upload_db()
 
-    success_msg_f1 = f'Columnar file "[\'{PARQUET_FILENAME1}\']" uploaded to table "{full_table_name}"'
+    success_msg_f1 = f"Columnar file {escaped_parquet(PARQUET_FILENAME1)} uploaded to table {escaped_double_quotes(full_table_name)}"
 
     # initial upload with fail mode
     resp = upload_columnar(PARQUET_FILENAME1, PARQUET_UPLOAD_TABLE)
     assert success_msg_f1 in resp
 
     # upload again with fail mode; should fail
-    fail_msg = f'Unable to upload Columnar file "[\'{PARQUET_FILENAME1}\']" to table "{PARQUET_UPLOAD_TABLE}"'
+    fail_msg = f"Unable to upload Columnar file {escaped_parquet(PARQUET_FILENAME1)} to table {escaped_double_quotes(PARQUET_UPLOAD_TABLE)}"
     resp = upload_columnar(PARQUET_FILENAME1, PARQUET_UPLOAD_TABLE)
     assert fail_msg in resp
 
@@ -507,9 +514,7 @@ def test_import_parquet(mock_event_logger):
     resp = upload_columnar(
         ZIP_FILENAME, PARQUET_UPLOAD_TABLE, extra={"if_exists": "replace"}
     )
-    success_msg_f2 = (
-        f'Columnar file "[\'{ZIP_FILENAME}\']" uploaded to table "{full_table_name}"'
-    )
+    success_msg_f2 = f"Columnar file {escaped_parquet(ZIP_FILENAME)} uploaded to table {escaped_double_quotes(full_table_name)}"
     assert success_msg_f2 in resp
 
     data = (
diff --git a/tests/unit_tests/conftest.py b/tests/unit_tests/conftest.py
index 43c4ac69ad..7676111ea0 100644
--- a/tests/unit_tests/conftest.py
+++ b/tests/unit_tests/conftest.py
@@ -88,7 +88,7 @@ def client(app: SupersetApp) -> Any:
         yield client
 
 
-@pytest.fixture
+@pytest.fixture(autouse=True)
 def app_context(app: SupersetApp) -> Iterator[None]:
     """
     A fixture that yields and application context.