You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@superset.apache.org by hu...@apache.org on 2022/08/05 01:37:55 UTC
[superset] branch master updated: fix: for series limit comparison on explore with chart = Query (#20989)
This is an automated email from the ASF dual-hosted git repository.
hugh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/superset.git
The following commit(s) were added to refs/heads/master by this push:
new be4fbaf951 fix: for series limit comparison on explore with chart = Query (#20989)
be4fbaf951 is described below
commit be4fbaf951e3ef2ae510410c20ee3991ec1531ba
Author: Hugh A. Miles II <hu...@gmail.com>
AuthorDate: Thu Aug 4 21:37:45 2022 -0400
fix: for series limit comparison on explore with chart = Query (#20989)
* oops
* 1 more
---
superset/common/query_context_processor.py | 13 +++++++------
1 file changed, 7 insertions(+), 6 deletions(-)
diff --git a/superset/common/query_context_processor.py b/superset/common/query_context_processor.py
index 9d7f8305e5..19d78e0b35 100644
--- a/superset/common/query_context_processor.py
+++ b/superset/common/query_context_processor.py
@@ -43,6 +43,7 @@ from superset.exceptions import (
)
from superset.extensions import cache_manager, security_manager
from superset.models.helpers import QueryResult
+from superset.models.sql_lab import Query
from superset.utils import csv
from superset.utils.cache import generate_cache_key, set_and_log_cache
from superset.utils.core import (
@@ -185,10 +186,6 @@ class QueryContextProcessor:
# a valid assumption for current setting. In the long term, we may
# support multiple queries from different data sources.
- # The datasource here can be different backend but the interface is common
- # pylint: disable=import-outside-toplevel
- from superset.models.sql_lab import Query
-
query = ""
if isinstance(query_context.datasource, Query):
# todo(hugh): add logic to manage all sip68 models here
@@ -248,7 +245,7 @@ class QueryContextProcessor:
return df
- def processing_time_offsets( # pylint: disable=too-many-locals
+ def processing_time_offsets( # pylint: disable=too-many-locals,too-many-statements
self,
df: pd.DataFrame,
query_object: QueryObject,
@@ -307,7 +304,11 @@ class QueryContextProcessor:
}
join_keys = [col for col in df.columns if col not in metrics_mapping.keys()]
- result = self._qc_datasource.query(query_object_clone_dct)
+ if isinstance(self._qc_datasource, Query):
+ result = self._qc_datasource.exc_query(query_object_clone_dct)
+ else:
+ result = self._qc_datasource.query(query_object_clone_dct)
+
queries.append(result.query)
cache_keys.append(None)