You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@superset.apache.org by jo...@apache.org on 2019/01/11 02:22:18 UTC

[incubator-superset] 14/43: fix invalid name

This is an automated email from the ASF dual-hosted git repository.

johnbodley pushed a commit to branch feature--embeddable-charts-pilot
in repository https://gitbox.apache.org/repos/asf/incubator-superset.git

commit 14939d42ae33f2d2f939fe4190d164d7d602790b
Author: Conglei Shi <co...@airbnb.com>
AuthorDate: Thu Nov 15 00:23:08 2018 -0800

    fix invalid name
---
 superset/common/query_context.py | 19 ++++++++++---------
 superset/common/query_object.py  |  1 +
 2 files changed, 11 insertions(+), 9 deletions(-)

diff --git a/superset/common/query_context.py b/superset/common/query_context.py
index c8a2328..e13e263 100644
--- a/superset/common/query_context.py
+++ b/superset/common/query_context.py
@@ -81,7 +81,7 @@ class QueryContext:
                         df[DTTM_ALIAS], utc=False, format=timestamp_format)
                 if self.datasource.offset:
                     df[DTTM_ALIAS] += timedelta(hours=self.datasource.offset)
-                df[DTTM_ALIAS] += self.time_shift
+                df[DTTM_ALIAS] += query_object.time_shift
 
             if self.enforce_numerical_metrics:
                 self.df_metrics_to_num(df, query_object)
@@ -95,16 +95,16 @@ class QueryContext:
             'df': df,
         }
 
-    def df_metrics_to_num(self, df, query_object):
+    def df_metrics_to_num(self, data_frame, query_object):
         """Converting metrics to numeric when pandas.read_sql cannot"""
         metrics = [metric.label for metric in query_object.metrics]
-        for col, dtype in df.dtypes.items():
+        for col, dtype in data_frame.dtypes.items():
             if dtype.type == np.object_ and col in metrics:
-                df[col] = pd.to_numeric(df[col], errors='coerce')
+                data_frame[col] = pd.to_numeric(data_frame[col], errors='coerce')
 
-    def handle_nulls(self, df):
+    def handle_nulls(self, data_frame):
         fillna = self.get_fillna_for_columns(df.columns)
-        return df.fillna(fillna)
+        return data_frame.fillna(fillna)
 
     def get_fillna_for_col(self, col):
         """Returns the value to use as filler for a specific Column.type"""
@@ -124,14 +124,15 @@ class QueryContext:
         }
         return fillna
 
-    def get_data(self, df):
-        return df.to_dict(orient='records')
+    def get_data(self, data_frame):
+        return data_frame.to_dict(orient='records')
 
     def get_single_payload(self, query_obj):
         """Returns a payload of metadata and data"""
         payload = self.get_df_payload(query_obj)
         df = payload.get('df')
-        if self.status != utils.QueryStatus.FAILED:
+        status = payload.get('status')
+        if status != utils.QueryStatus.FAILED:
             if df is not None and df.empty:
                 payload['error'] = 'No data'
             else:
diff --git a/superset/common/query_object.py b/superset/common/query_object.py
index b1dc063..9e38263 100644
--- a/superset/common/query_object.py
+++ b/superset/common/query_object.py
@@ -35,6 +35,7 @@ class QueryObject:
         self.from_dttm, self.to_dttm = utils.get_since_until(time_range, time_shift)
         self.is_timeseries = is_timeseries
         self.time_range = time_range
+        self.time_shift = time_shift
         self.groupby = groupby
         self.metrics = metrics
         self.row_limit = row_limit