You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@superset.apache.org by vi...@apache.org on 2022/04/04 12:36:51 UTC
[superset] 01/01: fix tests and lint
This is an automated email from the ASF dual-hosted git repository.
villebro pushed a commit to branch lts-v1
in repository https://gitbox.apache.org/repos/asf/superset.git
commit 7771501437124d5f2c54f1674f95abd9ea9167e6
Author: Ville Brofeldt <vi...@gmail.com>
AuthorDate: Mon Apr 4 15:34:11 2022 +0300
fix tests and lint
---
superset/common/query_object_factory.py | 2 +-
superset/sql_parse.py | 1 -
tests/integration_tests/sqla_models_tests.py | 47 ----------------------------
3 files changed, 1 insertion(+), 49 deletions(-)
diff --git a/superset/common/query_object_factory.py b/superset/common/query_object_factory.py
index 0066346d8a..d38c114e1d 100644
--- a/superset/common/query_object_factory.py
+++ b/superset/common/query_object_factory.py
@@ -79,7 +79,7 @@ class QueryObjectFactory: # pylint: disable=too-few-public-methods
str(datasource["type"]), int(datasource["id"]), self._session_maker()
)
- def _process_extras( # pylint: disable=no-self-use
+ def _process_extras(
self,
extras: Optional[Dict[str, Any]],
) -> Dict[str, Any]:
diff --git a/superset/sql_parse.py b/superset/sql_parse.py
index 6bfb63c425..e3b2e7c196 100644
--- a/superset/sql_parse.py
+++ b/superset/sql_parse.py
@@ -574,7 +574,6 @@ def get_rls_for_table(
return None
template_processor = dataset.get_template_processor()
- # pylint: disable=protected-access
predicate = " AND ".join(
str(filter_)
for filter_ in dataset.get_sqla_row_level_filters(template_processor)
diff --git a/tests/integration_tests/sqla_models_tests.py b/tests/integration_tests/sqla_models_tests.py
index d8c9c070c9..5f3d3cfe81 100644
--- a/tests/integration_tests/sqla_models_tests.py
+++ b/tests/integration_tests/sqla_models_tests.py
@@ -658,53 +658,6 @@ def test_filter_on_text_column(text_column_table):
assert result_object.df["count"][0] == 1
-def test_should_generate_closed_and_open_time_filter_range():
- with app.app_context():
- if backend() != "postgresql":
- pytest.skip(f"{backend()} has different dialect for datetime column")
-
- table = SqlaTable(
- table_name="temporal_column_table",
- sql=(
- "SELECT '2021-12-31'::timestamp as datetime_col "
- "UNION SELECT '2022-01-01'::timestamp "
- "UNION SELECT '2022-03-10'::timestamp "
- "UNION SELECT '2023-01-01'::timestamp "
- "UNION SELECT '2023-03-10'::timestamp "
- ),
- database=get_example_database(),
- )
- TableColumn(
- column_name="datetime_col",
- type="TIMESTAMP",
- table=table,
- is_dttm=True,
- )
- SqlMetric(metric_name="count", expression="count(*)", table=table)
- result_object = table.query(
- {
- "metrics": ["count"],
- "is_timeseries": False,
- "filter": [],
- "from_dttm": datetime(2022, 1, 1),
- "to_dttm": datetime(2023, 1, 1),
- "granularity": "datetime_col",
- }
- )
- """ >>> result_object.query
- SELECT count(*) AS count
- FROM
- (SELECT '2021-12-31'::timestamp as datetime_col
- UNION SELECT '2022-01-01'::timestamp
- UNION SELECT '2022-03-10'::timestamp
- UNION SELECT '2023-01-01'::timestamp
- UNION SELECT '2023-03-10'::timestamp) AS virtual_table
- WHERE datetime_col >= TO_TIMESTAMP('2022-01-01 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US')
- AND datetime_col < TO_TIMESTAMP('2023-01-01 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US')
- """
- assert result_object.df.iloc[0]["count"] == 2
-
-
@pytest.mark.parametrize(
"row,dimension,result",
[