You are viewing a plain text version of this content. The canonical link for it is here.
Posted to notifications@superset.apache.org by GitBox <gi...@apache.org> on 2018/12/13 19:02:23 UTC

[GitHub] conglei closed pull request #6380: [SIP5]Implemented get_payload for query_context.

conglei closed pull request #6380: [SIP5]Implemented get_payload for query_context.
URL: https://github.com/apache/incubator-superset/pull/6380
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/superset/assets/src/chart/chartAction.js b/superset/assets/src/chart/chartAction.js
index 6c6ddab2fb..c4a81b60b8 100644
--- a/superset/assets/src/chart/chartAction.js
+++ b/superset/assets/src/chart/chartAction.js
@@ -156,7 +156,7 @@ export function runQuery(formData, force = false, timeout = 60, key) {
 
     let querySettings = {
       url,
-      postPayload: { form_data: payload },
+      postPayload: { ...payload },
       signal,
       timeout: timeout * 1000,
     };
diff --git a/superset/assets/src/explore/actions/saveModalActions.js b/superset/assets/src/explore/actions/saveModalActions.js
index 1993a382a9..ac9c1be076 100644
--- a/superset/assets/src/explore/actions/saveModalActions.js
+++ b/superset/assets/src/explore/actions/saveModalActions.js
@@ -52,7 +52,7 @@ export function saveSlice(formData, requestParams) {
       requestParams,
     });
 
-    return SupersetClient.post({ url, postPayload: { form_data: payload } })
+    return SupersetClient.post({ url, postPayload: { ...payload } })
       .then(({ json }) => dispatch(saveSliceSuccess(json)))
       .catch(() => dispatch(saveSliceFailed()));
   };
diff --git a/superset/assets/src/explore/components/DisplayQueryButton.jsx b/superset/assets/src/explore/components/DisplayQueryButton.jsx
index 042ca24d83..9601b2ca2b 100644
--- a/superset/assets/src/explore/components/DisplayQueryButton.jsx
+++ b/superset/assets/src/explore/components/DisplayQueryButton.jsx
@@ -60,7 +60,7 @@ export default class DisplayQueryButton extends React.PureComponent {
     });
     SupersetClient.post({
       url,
-      postPayload: { form_data: payload },
+      postPayload: { ...payload },
     })
       .then(({ json }) => {
         this.setState({
diff --git a/superset/assets/src/explore/components/EmbedCodeButton.jsx b/superset/assets/src/explore/components/EmbedCodeButton.jsx
index ff28fbbfeb..23529dfd16 100644
--- a/superset/assets/src/explore/components/EmbedCodeButton.jsx
+++ b/superset/assets/src/explore/components/EmbedCodeButton.jsx
@@ -31,7 +31,10 @@ export default class EmbedCodeButton extends React.Component {
   generateEmbedHTML() {
     const srcLink = (
       window.location.origin +
-      getExploreLongUrl(this.props.latestQueryFormData, 'standalone') +
+      getExploreLongUrl({
+        formData: this.props.latestQueryFormData,
+        endpointType: 'standalone',
+      }) +
       `&height=${this.state.height}`
     );
     return (
diff --git a/superset/assets/src/explore/components/ExploreActionButtons.jsx b/superset/assets/src/explore/components/ExploreActionButtons.jsx
index 6bb059c66b..7e413cdb9b 100644
--- a/superset/assets/src/explore/components/ExploreActionButtons.jsx
+++ b/superset/assets/src/explore/components/ExploreActionButtons.jsx
@@ -28,7 +28,7 @@ export default function ExploreActionButtons({
     <div className="btn-group results" role="group">
       {latestQueryFormData &&
         <URLShortLinkButton
-          url={getExploreLongUrl(latestQueryFormData)}
+          url={getExploreLongUrl({ formData: latestQueryFormData })}
           emailSubject="Superset Chart"
           emailContent="Check out this chart: "
         />
diff --git a/superset/assets/src/explore/components/ExploreViewContainer.jsx b/superset/assets/src/explore/components/ExploreViewContainer.jsx
index 162cb16333..b6f22f9670 100644
--- a/superset/assets/src/explore/components/ExploreViewContainer.jsx
+++ b/superset/assets/src/explore/components/ExploreViewContainer.jsx
@@ -171,7 +171,7 @@ class ExploreViewContainer extends React.Component {
 
   addHistory({ isReplace = false, title }) {
     const { payload } = getExploreUrlAndPayload({ formData: this.props.form_data });
-    const longUrl = getExploreLongUrl(this.props.form_data);
+    const longUrl = getExploreLongUrl({ formData: this.props.form_data, forceExplore: true });
     try {
       if (isReplace) {
         history.replaceState(payload, title, longUrl);
diff --git a/superset/assets/src/explore/exploreUtils.js b/superset/assets/src/explore/exploreUtils.js
index 4eac3abce4..658eddf3be 100644
--- a/superset/assets/src/explore/exploreUtils.js
+++ b/superset/assets/src/explore/exploreUtils.js
@@ -1,5 +1,6 @@
 /* eslint camelcase: 0 */
 import URI from 'urijs';
+import { getChartBuildQueryRegistry } from '@superset-ui/chart';
 import { availableDomains } from '../utils/hostNamesConfig';
 
 export function getChartKey(explore) {
@@ -31,22 +32,34 @@ export function getAnnotationJsonUrl(slice_id, form_data, isNative) {
     }).toString();
 }
 
-export function getURIDirectory(formData, endpointType = 'base') {
+export function getURIDirectory({
+  formData,
+  endpointType = 'base',
+  forceExplore,
+}) {
   // Building the directory part of the URI
   let directory = '/superset/explore/';
   if (['json', 'csv', 'query', 'results', 'samples'].indexOf(endpointType) >= 0) {
     directory = '/superset/explore_json/';
   }
+  // const buildQueryRegistry = getChartBuildQueryRegistry();
+  if (formData.viz_type === 'word_cloud' && !forceExplore) {
+    directory = '/api/v1/query/';
+  }
   return directory;
 }
 
-export function getExploreLongUrl(formData, endpointType) {
+export function getExploreLongUrl({
+  formData,
+  endpointType,
+  forceExplore = false,
+}) {
   if (!formData.datasource) {
     return null;
   }
 
   const uri = new URI('/');
-  const directory = getURIDirectory(formData, endpointType);
+  const directory = getURIDirectory({ formData, endpointType, forceExplore });
   const search = uri.search(true);
   search.form_data = JSON.stringify(formData);
   if (endpointType === 'standalone') {
@@ -81,7 +94,7 @@ export function getExploreUrlAndPayload({
     uri = URI(URI(curUrl).search());
   }
 
-  const directory = getURIDirectory(formData, endpointType);
+  const directory = getURIDirectory({ formData, endpointType });
 
   // Building the querystring (search) part of the URI
   const search = uri.search(true);
@@ -115,7 +128,12 @@ export function getExploreUrlAndPayload({
     });
   }
   uri = uri.search(search).directory(directory);
-  const payload = { ...formData };
+  let payload = { form_data: { ...formData } };
+
+  const buildQuery = getChartBuildQueryRegistry().get(formData.viz_type);
+  if (buildQuery) {
+    payload = { query_context: buildQuery(formData) };
+  }
 
   return {
     url: uri.toString(),
diff --git a/superset/assets/src/visualizations/deckgl/Multi/Multi.jsx b/superset/assets/src/visualizations/deckgl/Multi/Multi.jsx
index 50dfa4bbab..0669e0b9b3 100644
--- a/superset/assets/src/visualizations/deckgl/Multi/Multi.jsx
+++ b/superset/assets/src/visualizations/deckgl/Multi/Multi.jsx
@@ -49,7 +49,7 @@ class DeckMulti extends React.PureComponent {
       };
 
       SupersetClient.get({
-          endpoint: getExploreLongUrl(subsliceCopy.form_data, 'json'),
+          endpoint: getExploreLongUrl({ formData: subsliceCopy.form_data, endpointType: 'json' }),
         })
         .then(({ json }) => {
           const layer = layerGenerators[subsliceCopy.form_data.viz_type](
diff --git a/superset/assets/src/visualizations/nvd3/LineMulti/LineMulti.jsx b/superset/assets/src/visualizations/nvd3/LineMulti/LineMulti.jsx
index 525a0f0944..3f5aff4350 100644
--- a/superset/assets/src/visualizations/nvd3/LineMulti/LineMulti.jsx
+++ b/superset/assets/src/visualizations/nvd3/LineMulti/LineMulti.jsx
@@ -81,7 +81,7 @@ class LineMulti extends React.Component {
         time_range: timeRange,
       };
       const addPrefix = prefixMetricWithSliceName;
-      return getJson(getExploreLongUrl(combinedFormData, 'json'))
+      return getJson(getExploreLongUrl({ formData: combinedFormData, endpointType: 'json' }))
         .then(data => data.map(({ key, values }) => ({
           key: addPrefix ? `${subslice.slice_name}: ${key}` : key,
           type: combinedFormData.viz_type,
diff --git a/superset/assets/src/visualizations/wordcloud/FormData.ts b/superset/assets/src/visualizations/wordcloud/FormData.ts
index 55f81313af..a2f85ea582 100644
--- a/superset/assets/src/visualizations/wordcloud/FormData.ts
+++ b/superset/assets/src/visualizations/wordcloud/FormData.ts
@@ -3,6 +3,8 @@ import { FormData as GenericFormData } from 'src/query';
 // FormData specific to the wordcloud viz
 interface WordCloudFormData {
   series: string;
+  metric: string;
+  time_range: string;
 }
 
 // FormData for wordcloud contains both common properties of all form data
diff --git a/superset/assets/src/visualizations/wordcloud/buildQuery.ts b/superset/assets/src/visualizations/wordcloud/buildQuery.ts
index 2aa0f2cd75..feee0dee94 100644
--- a/superset/assets/src/visualizations/wordcloud/buildQuery.ts
+++ b/superset/assets/src/visualizations/wordcloud/buildQuery.ts
@@ -6,5 +6,6 @@ export default function buildQuery(formData: FormData) {
   return buildQueryContext(formData, (baseQueryObject) => [{
     ...baseQueryObject,
     groupby: [formData.series],
+    time_range: formData.time_range,
   }]);
 }
diff --git a/superset/assets/src/visualizations/wordcloud/transformProps.js b/superset/assets/src/visualizations/wordcloud/transformProps.js
index ef824e9f6b..183c63c373 100644
--- a/superset/assets/src/visualizations/wordcloud/transformProps.js
+++ b/superset/assets/src/visualizations/wordcloud/transformProps.js
@@ -1,12 +1,14 @@
 function transformData(data, formData) {
   const { metric, series } = formData;
+  if (metric && series) {
+    const transformedData = data.map(datum => ({
+      text: datum[series],
+      size: datum[metric.label || metric],
+    }));
 
-  const transformedData = data.map(datum => ({
-    text: datum[series],
-    size: datum[metric.label || metric],
-  }));
-
-  return transformedData;
+    return transformedData;
+  }
+  return [];
 }
 
 export default function transformProps(chartProps) {
@@ -21,7 +23,7 @@ export default function transformProps(chartProps) {
   return {
     width,
     height,
-    data: transformData(payload.data, formData),
+    data: transformData(payload[0].data, formData),
     colorScheme,
     rotation,
     sizeRange: [sizeFrom, sizeTo],
diff --git a/superset/common/query_context.py b/superset/common/query_context.py
index 21b0dac368..f0005c3701 100644
--- a/superset/common/query_context.py
+++ b/superset/common/query_context.py
@@ -1,27 +1,244 @@
-# pylint: disable=R
+# pylint: disable=C,R,W
+from datetime import datetime, timedelta
+import logging
+import pickle as pkl
+import traceback
 from typing import Dict, List
 
+import numpy as np
+import pandas as pd
+
+from superset import app, cache
 from superset import db
 from superset.connectors.connector_registry import ConnectorRegistry
+from superset.utils import core as utils
+from superset.utils.core import DTTM_ALIAS
 from .query_object import QueryObject
 
+config = app.config
+stats_logger = config.get('STATS_LOGGER')
+
 
 class QueryContext:
     """
     The query context contains the query object and additional fields necessary
     to retrieve the data payload for a given viz.
     """
+
+    default_fillna = 0
+    cache_type = 'df'
+    enforce_numerical_metrics = True
+
     # TODO: Type datasource and query_object dictionary with TypedDict when it becomes
     # a vanilla python type https://github.com/python/mypy/issues/5288
     def __init__(
             self,
             datasource: Dict,
             queries: List[Dict],
+            force: bool = False,
+            custom_cache_timeout: int = None,
     ):
         self.datasource = ConnectorRegistry.get_datasource(datasource.get('type'),
                                                            int(datasource.get('id')),
                                                            db.session)
         self.queries = list(map(lambda query_obj: QueryObject(**query_obj), queries))
 
-    def get_data(self):
-        raise NotImplementedError()
+        self.force = force
+
+        self.custom_cache_timeout = custom_cache_timeout
+
+        self.enforce_numerical_metrics = True
+
+    def get_query_result(self, query_object):
+        """Returns a pandas dataframe based on the query object"""
+
+        # Here, we assume that all the queries will use the same datasource, which is
+        # is a valid assumption for current setting. In a long term, we may or maynot
+        # support multiple queries from different data source.
+
+        timestamp_format = None
+        if self.datasource.type == 'table':
+            dttm_col = self.datasource.get_col(query_object.granularity)
+            if dttm_col:
+                timestamp_format = dttm_col.python_date_format
+
+        # The datasource here can be different backend but the interface is common
+        result = self.datasource.query(query_object.to_dict())
+
+        df = result.df
+        # Transform the timestamp we received from database to pandas supported
+        # datetime format. If no python_date_format is specified, the pattern will
+        # be considered as the default ISO date format
+        # If the datetime format is unix, the parse will use the corresponding
+        # parsing logic
+        if df is not None and not df.empty:
+            if DTTM_ALIAS in df.columns:
+                if timestamp_format in ('epoch_s', 'epoch_ms'):
+                    # Column has already been formatted as a timestamp.
+                    df[DTTM_ALIAS] = df[DTTM_ALIAS].apply(pd.Timestamp)
+                else:
+                    df[DTTM_ALIAS] = pd.to_datetime(
+                        df[DTTM_ALIAS], utc=False, format=timestamp_format)
+                if self.datasource.offset:
+                    df[DTTM_ALIAS] += timedelta(hours=self.datasource.offset)
+                df[DTTM_ALIAS] += query_object.time_shift
+
+            if self.enforce_numerical_metrics:
+                self.df_metrics_to_num(df, query_object)
+
+            df.replace([np.inf, -np.inf], np.nan)
+            df = self.handle_nulls(df)
+        return {
+            'query': result.query,
+            'status': result.status,
+            'error_message': result.error_message,
+            'df': df,
+        }
+
+    def df_metrics_to_num(self, df, query_object):
+        """Converting metrics to numeric when pandas.read_sql cannot"""
+        metrics = [metric for metric in query_object.metrics]
+        for col, dtype in df.dtypes.items():
+            if dtype.type == np.object_ and col in metrics:
+                df[col] = pd.to_numeric(df[col], errors='coerce')
+
+    def handle_nulls(self, df):
+        fillna = self.get_fillna_for_columns(df.columns)
+        return df.fillna(fillna)
+
+    def get_fillna_for_col(self, col):
+        """Returns the value to use as filler for a specific Column.type"""
+        if col and col.is_string:
+            return ' NULL'
+        return self.default_fillna
+
+    def get_fillna_for_columns(self, columns=None):
+        """Returns a dict or scalar that can be passed to DataFrame.fillna"""
+        if columns is None:
+            return self.default_fillna
+        columns_dict = {col.column_name: col for col in self.datasource.columns}
+        fillna = {
+            c: self.get_fillna_for_col(columns_dict.get(c))
+            for c in columns
+        }
+        return fillna
+
+    def get_data(self, df):
+        return df.to_dict(orient='records')
+
+    def get_single_payload(self, query_obj):
+        """Returns a payload of metadata and data"""
+        payload = self.get_df_payload(query_obj)
+        df = payload.get('df')
+        status = payload.get('status')
+        if status != utils.QueryStatus.FAILED:
+            if df is not None and df.empty:
+                payload['error'] = 'No data'
+            else:
+                payload['data'] = self.get_data(df)
+        if 'df' in payload:
+            del payload['df']
+        return payload
+
+    def get_payload(self):
+        """Get all the paylaods from the arrays"""
+        return [self.get_single_payload(query_ojbect) for query_ojbect in self.queries]
+
+    @property
+    def cache_timeout(self):
+        if self.custom_cache_timeout is not None:
+            return self.custom_cache_timeout
+        if self.datasource.cache_timeout is not None:
+            return self.datasource.cache_timeout
+        if (
+                hasattr(self.datasource, 'database') and
+                self.datasource.database.cache_timeout) is not None:
+            return self.datasource.database.cache_timeout
+        return config.get('CACHE_DEFAULT_TIMEOUT')
+
+    def get_df_payload(self, query_obj):
+        """Handles caching around the df paylod retrieval"""
+        cache_key = query_obj.cache_key(
+            datasource=self.datasource.uid) if query_obj else None
+        logging.info('Cache key: {}'.format(cache_key))
+        is_loaded = False
+        stacktrace = None
+        df = None
+        cached_dttm = datetime.utcnow().isoformat().split('.')[0]
+        cache_value = None
+        status = None
+        query = ''
+        error_message = None
+        if cache_key and cache and not self.force:
+            cache_value = cache.get(cache_key)
+            if cache_value:
+                stats_logger.incr('loaded_from_cache')
+                try:
+                    cache_value = pkl.loads(cache_value)
+                    df = cache_value['df']
+                    query = cache_value['query']
+                    status = utils.QueryStatus.SUCCESS
+                    is_loaded = True
+                except Exception as e:
+                    logging.exception(e)
+                    logging.error('Error reading cache: ' +
+                                  utils.error_msg_from_exception(e))
+                logging.info('Serving from cache')
+
+        if query_obj and not is_loaded:
+            try:
+                query_result = self.get_query_result(query_obj)
+                status = query_result['status']
+                query = query_result['query']
+                error_message = query_result['error_message']
+                df = query_result['df']
+                if status != utils.QueryStatus.FAILED:
+                    stats_logger.incr('loaded_from_source')
+                    is_loaded = True
+            except Exception as e:
+                logging.exception(e)
+                if not error_message:
+                    error_message = '{}'.format(e)
+                status = utils.QueryStatus.FAILED
+                stacktrace = traceback.format_exc()
+
+            if (
+                    is_loaded and
+                    cache_key and
+                    cache and
+                    status != utils.QueryStatus.FAILED):
+                try:
+                    cache_value = dict(
+                        dttm=cached_dttm,
+                        df=df if df is not None else None,
+                        query=query,
+                    )
+                    cache_value = pkl.dumps(
+                        cache_value, protocol=pkl.HIGHEST_PROTOCOL)
+
+                    logging.info('Caching {} chars at key {}'.format(
+                        len(cache_value), cache_key))
+
+                    stats_logger.incr('set_cache_key')
+                    cache.set(
+                        cache_key,
+                        cache_value,
+                        timeout=self.cache_timeout)
+                except Exception as e:
+                    # cache.set call can fail if the backend is down or if
+                    # the key is too large or whatever other reasons
+                    logging.warning('Could not cache key {}'.format(cache_key))
+                    logging.exception(e)
+                    cache.delete(cache_key)
+        return {
+            'cache_key': cache_key,
+            'cached_dttm': cache_value['dttm'] if cache_value is not None else None,
+            'cache_timeout': self.cache_timeout,
+            'df': df,
+            'error': error_message,
+            'is_cached': cache_key is not None,
+            'query': query,
+            'status': status,
+            'stacktrace': stacktrace,
+            'rowcount': len(df.index) if df is not None else 0,
+        }
diff --git a/superset/common/query_object.py b/superset/common/query_object.py
index 8116d269c4..edbc460f0b 100644
--- a/superset/common/query_object.py
+++ b/superset/common/query_object.py
@@ -1,13 +1,15 @@
 # pylint: disable=R
+import hashlib
 from typing import Dict, List, Optional
 
+import simplejson as json
+
 from superset import app
 from superset.utils import core as utils
 
+
 # TODO: Type Metrics dictionary with TypedDict when it becomes a vanilla python type
 # https://github.com/python/mypy/issues/5288
-Metric = Dict
-
 
 class QueryObject:
     """
@@ -17,31 +19,76 @@ class QueryObject:
     def __init__(
             self,
             granularity: str,
-            groupby: List[str] = None,
-            metrics: List[Metric] = None,
+            groupby: List[str],
+            metrics: List[Dict],
             filters: List[str] = None,
             time_range: Optional[str] = None,
             time_shift: Optional[str] = None,
             is_timeseries: bool = False,
             row_limit: int = app.config.get('ROW_LIMIT'),
             limit: int = 0,
-            timeseries_limit_metric: Optional[Metric] = None,
+            timeseries_limit_metric: Optional[Dict] = None,
             order_desc: bool = True,
             extras: Optional[Dict] = None,
     ):
         self.granularity = granularity
         self.from_dttm, self.to_dttm = utils.get_since_until(time_range, time_shift)
         self.is_timeseries = is_timeseries
-        self.groupby = groupby or []
-        self.metrics = metrics or []
-        self.filter = filters or []
+        self.time_range = time_range
+        self.time_shift = time_shift
+        self.groupby = groupby
+        self.metrics = metrics
         self.row_limit = row_limit
+        self.filter = filters if filters is not None else []
         self.timeseries_limit = int(limit)
         self.timeseries_limit_metric = timeseries_limit_metric
         self.order_desc = order_desc
         self.prequeries = []
         self.is_prequery = False
-        self.extras = extras
+        self.extras = extras if extras is not None else {}
 
     def to_dict(self):
-        raise NotImplementedError()
+        query_object_dict = {
+            'granularity': self.granularity,
+            'from_dttm': self.from_dttm,
+            'to_dttm': self.to_dttm,
+            'is_timeseries': self.is_timeseries,
+            'groupby': self.groupby,
+            'metrics': self.metrics,
+            'row_limit': self.row_limit,
+            'filter': self.filter,
+            'timeseries_limit': self.timeseries_limit,
+            'timeseries_limit_metric': self.timeseries_limit_metric,
+            'order_desc': self.order_desc,
+            'prequeries': self.prequeries,
+            'is_prequery': self.is_prequery,
+            'extras': self.extras,
+        }
+        query_object_dict.update(self.extras)
+        return query_object_dict
+
+    def cache_key(self, **extra):
+        """
+        The cache key is made out of the key/values in `query_obj`, plus any
+        other key/values in `extra`
+        We remove datetime bounds that are hard values, and replace them with
+        the use-provided inputs to bounds, which may be time-relative (as in
+        "5 days ago" or "now").
+        """
+        cache_dict = self.to_dict()
+        cache_dict.update(extra)
+
+        for k in ['from_dttm', 'to_dttm']:
+            del cache_dict[k]
+
+        cache_dict['time_range'] = self.time_range
+        json_data = self.json_dumps(cache_dict, sort_keys=True)
+        return hashlib.md5(json_data.encode('utf-8')).hexdigest()
+
+    def json_dumps(self, obj, sort_keys=False):
+        return json.dumps(
+            obj,
+            default=utils.json_int_dttm_ser,
+            ignore_nan=True,
+            sort_keys=sort_keys,
+        )
diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py
index cf22add628..1b0e137859 100644
--- a/superset/connectors/sqla/models.py
+++ b/superset/connectors/sqla/models.py
@@ -566,7 +566,7 @@ def get_sqla_query(  # sqla
 
         cols = {col.column_name: col for col in self.columns}
         metrics_dict = {m.metric_name: m for m in self.metrics}
-
+        print(metrics_dict)
         if not granularity and is_timeseries:
             raise Exception(_(
                 'Datetime column not provided as part table configuration '
@@ -577,10 +577,10 @@ def get_sqla_query(  # sqla
         for m in metrics:
             if utils.is_adhoc_metric(m):
                 metrics_exprs.append(self.adhoc_metric_to_sqla(m, cols))
-            elif m in metrics_dict:
-                metrics_exprs.append(metrics_dict.get(m).get_sqla_col())
+            elif m['label'] in metrics_dict:
+                metrics_exprs.append(metrics_dict.get(m['label']).get_sqla_col())
             else:
-                raise Exception(_("Metric '{}' is not valid".format(m)))
+                raise Exception(_("Metric '{}' is not valid".format(m['label'])))
         if metrics_exprs:
             main_metric_expr = metrics_exprs[0]
         else:
diff --git a/superset/utils/core.py b/superset/utils/core.py
index bd23d0776c..76567c65bd 100644
--- a/superset/utils/core.py
+++ b/superset/utils/core.py
@@ -856,6 +856,7 @@ def get_main_database(session):
 def is_adhoc_metric(metric):
     return (
         isinstance(metric, dict) and
+        'expressionType' in metric and
         (
             (
                 metric['expressionType'] == ADHOC_METRIC_EXPRESSION_TYPES['SIMPLE'] and
@@ -872,7 +873,7 @@ def is_adhoc_metric(metric):
 
 
 def get_metric_name(metric):
-    return metric['label'] if is_adhoc_metric(metric) else metric
+    return metric['label']
 
 
 def get_metric_names(metrics):
diff --git a/superset/views/api.py b/superset/views/api.py
index 5f684f3cf9..e5a9462779 100644
--- a/superset/views/api.py
+++ b/superset/views/api.py
@@ -1,14 +1,16 @@
 # pylint: disable=R
 import json
 
-from flask import g, request
+from flask import request
 from flask_appbuilder import expose
 from flask_appbuilder.security.decorators import has_access_api
 
-from superset import appbuilder, security_manager
+from superset import appbuilder, db, security_manager
 from superset.common.query_context import QueryContext
+from superset.legacy import update_time_range
+import superset.models.core as models
 from superset.models.core import Log
-from .base import api, BaseSupersetView, data_payload_response, handle_api_exception
+from .base import api, BaseSupersetView, handle_api_exception
 
 
 class Api(BaseSupersetView):
@@ -23,9 +25,31 @@ def query(self):
         for the given query_obj.
         """
         query_context = QueryContext(**json.loads(request.form.get('query_context')))
-        security_manager.assert_datasource_permission(query_context.datasource, g.user)
-        payload_json = query_context.get_data()
-        return data_payload_response(payload_json)
+        security_manager.assert_datasource_permission(query_context.datasource)
+        payload_json = query_context.get_payload()
+        return json.dumps(payload_json)
+
+    @Log.log_this
+    @api
+    @handle_api_exception
+    @has_access_api
+    @expose('/v1/form_data/', methods=['GET'])
+    def query_form_data(self):
+        """
+        Takes a query_obj constructed in the client and returns payload data response
+        for the given query_obj.
+        """
+        form_data = {}
+        slice_id = request.args.get('slice_id')
+        print(slice_id)
+        if slice_id:
+            slc = db.session.query(models.Slice).filter_by(id=slice_id).one_or_none()
+            if slc:
+                form_data = slc.form_data.copy()
+
+        update_time_range(form_data)
+
+        return json.dumps(form_data)
 
 
 appbuilder.add_view_no_menu(Api)


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: notifications-unsubscribe@superset.apache.org
For additional commands, e-mail: notifications-help@superset.apache.org