You are viewing a plain text version of this content. The canonical link for it is here.
Posted to notifications@superset.apache.org by GitBox <gi...@apache.org> on 2018/06/22 01:12:13 UTC

[GitHub] mistercrunch closed pull request #4696: [WiP] Refactor SQL Lab views into `views/sql_lab.py`

mistercrunch closed pull request #4696: [WiP] Refactor SQL Lab views into `views/sql_lab.py`
URL: https://github.com/apache/incubator-superset/pull/4696
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/UPDATING.MD b/UPDATING.MD
index 7fb3e852ec..5db50bb0da 100644
--- a/UPDATING.MD
+++ b/UPDATING.MD
@@ -3,6 +3,10 @@
 This file documents any backwards-incompatible changes in Superset and
 assists people when migrating to a new version.
 
+## Superset 0.25.0
+
+* [4565](https://github.com/apache/incubator-superset/pull/4565)
+
 ## Superset 0.23.0
 
-* [4565](https://github.com/apache/incubator-superset/pull/4565)
\ No newline at end of file
+* [4565](https://github.com/apache/incubator-superset/pull/4565)
diff --git a/superset/assets/javascripts/SqlLab/actions.js b/superset/assets/javascripts/SqlLab/actions.js
index 04a9a5eae4..c562503882 100644
--- a/superset/assets/javascripts/SqlLab/actions.js
+++ b/superset/assets/javascripts/SqlLab/actions.js
@@ -409,8 +409,7 @@ export function createDatasourceStarted() {
   return { type: CREATE_DATASOURCE_STARTED };
 }
 export function createDatasourceSuccess(response) {
-  const data = JSON.parse(response);
-  const datasource = `${data.table_id}__table`;
+  const datasource = `${response.table_id}__table`;
   return { type: CREATE_DATASOURCE_SUCCESS, datasource };
 }
 export function createDatasourceFailed(err) {
@@ -423,7 +422,7 @@ export function createDatasource(vizOptions, context) {
 
     return $.ajax({
       type: 'POST',
-      url: '/superset/sqllab_viz/',
+      url: '/sqllab/sqllab_viz/',
       async: false,
       data: {
         data: JSON.stringify(vizOptions),
diff --git a/superset/assets/javascripts/SqlLab/components/App.jsx b/superset/assets/javascripts/SqlLab/components/App.jsx
index 3698a2a258..1328837b22 100644
--- a/superset/assets/javascripts/SqlLab/components/App.jsx
+++ b/superset/assets/javascripts/SqlLab/components/App.jsx
@@ -44,7 +44,8 @@ class App extends React.PureComponent {
     const tabsHeight = tabsEl.length > 0 ? tabsEl.outerHeight() : searchHeaderHeight;
     const warningHeight = warningEl.length > 0 ? warningEl.outerHeight() : 0;
     const alertHeight = alertEl.length > 0 ? alertEl.outerHeight() : 0;
-    return `${window.innerHeight - headerHeight - tabsHeight - warningHeight - alertHeight}px`;
+    const northHeight = headerHeight + tabsHeight + warningHeight + alertHeight;
+    return `${Math.round(window.innerHeight - northHeight)}px`;
   }
   handleResize() {
     this.setState({ contentHeight: this.getHeight() });
diff --git a/superset/assets/javascripts/SqlLab/components/QuerySearch.jsx b/superset/assets/javascripts/SqlLab/components/QuerySearch.jsx
index a3e4bf4dfd..0829e9d60d 100644
--- a/superset/assets/javascripts/SqlLab/components/QuerySearch.jsx
+++ b/superset/assets/javascripts/SqlLab/components/QuerySearch.jsx
@@ -13,7 +13,7 @@ const $ = window.$ = require('jquery');
 
 const propTypes = {
   actions: PropTypes.object.isRequired,
-  height: PropTypes.number.isRequired,
+  height: PropTypes.string.isRequired,
 };
 
 class QuerySearch extends React.PureComponent {
@@ -136,7 +136,7 @@ class QuerySearch extends React.PureComponent {
       this.state.to ? `to=${this.getTimeFromSelection(this.state.to)}` : '',
     ];
 
-    const url = this.insertParams('/superset/search_queries', params);
+    const url = this.insertParams('/sqllab/search_queries/', params);
     $.getJSON(url, (data, status) => {
       if (status === 'success') {
         this.setState({ queriesArray: data, queriesLoading: false });
diff --git a/superset/assets/javascripts/SqlLab/components/VisualizeModal.jsx b/superset/assets/javascripts/SqlLab/components/VisualizeModal.jsx
index 1c4315ff91..80727d10d9 100644
--- a/superset/assets/javascripts/SqlLab/components/VisualizeModal.jsx
+++ b/superset/assets/javascripts/SqlLab/components/VisualizeModal.jsx
@@ -32,6 +32,7 @@ const propTypes = {
   query: PropTypes.object,
   show: PropTypes.bool,
   datasource: PropTypes.string,
+  schema: PropTypes.string,
   errorMessage: PropTypes.string,
   timeout: PropTypes.number,
 };
@@ -48,6 +49,7 @@ class VisualizeModal extends React.PureComponent {
       chartType: CHART_TYPES[0],
       datasourceName: this.datasourceName(),
       columns: this.getColumnFromProps(),
+      schema: props.query ? props.query.schema : null,
       hints: [],
     };
   }
@@ -127,6 +129,7 @@ class VisualizeModal extends React.PureComponent {
     return {
       chartType: this.state.chartType.value,
       datasourceName: this.state.datasourceName,
+      schema: this.state.schema,
       columns: this.state.columns,
       sql: this.props.query.sql,
       dbId: this.props.query.dbId,
@@ -153,10 +156,9 @@ class VisualizeModal extends React.PureComponent {
     this.props.actions.createDatasource(this.buildVizOptions(), this)
       .done((resp) => {
         const columns = Object.keys(this.state.columns).map(k => this.state.columns[k]);
-        const data = JSON.parse(resp);
         const mainGroupBy = columns.filter(d => d.is_dim)[0];
         const formData = {
-          datasource: `${data.table_id}__table`,
+          datasource: `${resp.table_id}__table`,
           viz_type: this.state.chartType.value,
           since: '100 years ago',
           limit: '0',
diff --git a/superset/assets/spec/javascripts/sqllab/VisualizeModal_spec.jsx b/superset/assets/spec/javascripts/sqllab/VisualizeModal_spec.jsx
index 6c9fc5b1ed..0e1bd37985 100644
--- a/superset/assets/spec/javascripts/sqllab/VisualizeModal_spec.jsx
+++ b/superset/assets/spec/javascripts/sqllab/VisualizeModal_spec.jsx
@@ -269,6 +269,7 @@ describe('VisualizeModal', () => {
       chartType: wrapper.state().chartType.value,
       datasourceName: wrapper.state().datasourceName,
       columns: wrapper.state().columns,
+      schema: 'test_schema',
       sql: wrapper.instance().props.query.sql,
       dbId: wrapper.instance().props.query.dbId,
     });
@@ -307,7 +308,6 @@ describe('VisualizeModal', () => {
     let datasourceSpy;
     beforeEach(() => {
       ajaxSpy = sinon.spy($, 'ajax');
-      sinon.stub(JSON, 'parse').callsFake(() => ({ table_id: 107 }));
       sinon.stub(exploreUtils, 'getExploreUrlAndPayload').callsFake(() => ({ url: 'mockURL', payload: { datasource: '107__table' } }));
       sinon.spy(exploreUtils, 'exportChart');
       sinon.stub(wrapper.instance(), 'buildVizOptions').callsFake(() => (mockOptions));
@@ -315,7 +315,6 @@ describe('VisualizeModal', () => {
     });
     afterEach(() => {
       ajaxSpy.restore();
-      JSON.parse.restore();
       exploreUtils.getExploreUrlAndPayload.restore();
       exploreUtils.exportChart.restore();
       wrapper.instance().buildVizOptions.restore();
@@ -328,13 +327,13 @@ describe('VisualizeModal', () => {
 
       const spyCall = ajaxSpy.getCall(0);
       expect(spyCall.args[0].type).to.equal('POST');
-      expect(spyCall.args[0].url).to.equal('/superset/sqllab_viz/');
+      expect(spyCall.args[0].url).to.equal('/sqllab/sqllab_viz/');
       expect(spyCall.args[0].data.data).to.equal(JSON.stringify(mockOptions));
     });
     it('should open new window', () => {
       datasourceSpy.callsFake(() => {
         const d = $.Deferred();
-        d.resolve('done');
+        d.resolveWith('done', [{ table_id: 107 }]);
         return d.promise();
       });
       wrapper.setProps({ actions: { createDatasource: datasourceSpy } });
diff --git a/superset/assets/spec/javascripts/sqllab/fixtures.js b/superset/assets/spec/javascripts/sqllab/fixtures.js
index 5f9df8d08c..2fef334ef7 100644
--- a/superset/assets/spec/javascripts/sqllab/fixtures.js
+++ b/superset/assets/spec/javascripts/sqllab/fixtures.js
@@ -197,7 +197,7 @@ export const queries = [
     rows: 42,
     endDttm: 1476910566798,
     limit_reached: false,
-    schema: null,
+    schema: 'test_schema',
     errorMessage: null,
     db: 'main',
     user: 'admin',
diff --git a/superset/security.py b/superset/security.py
index e342b034d9..f878d28485 100644
--- a/superset/security.py
+++ b/superset/security.py
@@ -352,6 +352,7 @@ def is_sql_lab_pvm(self, pvm):
         return (
             pvm.view_menu.name in {
                 'SQL Lab', 'SQL Editor', 'Query Search', 'Saved Queries',
+                'SqlLab',
             } or
             pvm.permission.name in {
                 'can_sql_json', 'can_csv', 'can_search_queries', 'can_sqllab_viz',
diff --git a/superset/views/base.py b/superset/views/base.py
index 328789f0db..e6c42f5d8f 100644
--- a/superset/views/base.py
+++ b/superset/views/base.py
@@ -10,7 +10,8 @@
 import logging
 import traceback
 
-from flask import abort, flash, g, get_flashed_messages, redirect, Response
+from flask import (
+    abort, flash, g, get_flashed_messages, redirect, request, Response)
 from flask_appbuilder import BaseView, ModelView
 from flask_appbuilder.actions import action
 from flask_appbuilder.models.sqla.filters import BaseFilter
@@ -40,6 +41,10 @@ def get_error_msg():
     return error_msg
 
 
+def json_success(json_msg, status=200):
+    return Response(json_msg, status=status, mimetype='application/json')
+
+
 def json_error_response(msg=None, status=500, stacktrace=None, payload=None):
     if not payload:
         payload = {'error': str(msg)}
@@ -85,7 +90,30 @@ def get_user_roles():
     return g.user.roles
 
 
+def get_database_access_error_msg(database_name):
+    return __('This view requires the database %(name)s or '
+              '`all_datasource_access` permission', name=database_name)
+
+
+def get_datasource_access_error_msg(datasource_name):
+    return __('This endpoint requires the datasource %(name)s, database or '
+              '`all_datasource_access` permission', name=datasource_name)
+
+
 class BaseSupersetView(BaseView):
+
+    def json_response(self, obj, status=200):
+        return Response(
+            json.dumps(obj, default=utils.json_int_dttm_ser),
+            status=status,
+            mimetype='application/json')
+
+    def redirect_with_context(self, url):
+        """Redirect that indicates sending the same data"""
+        if request.query_string:
+            url += '?' + request.query_string
+        return redirect(url, code=307)
+
     def common_bootsrap_payload(self):
         """Common data always sent to the client"""
         messages = get_flashed_messages(with_categories=True)
diff --git a/superset/views/core.py b/superset/views/core.py
index f0956e359e..28bc23f9b3 100755
--- a/superset/views/core.py
+++ b/superset/views/core.py
@@ -33,7 +33,7 @@
 from werkzeug.utils import secure_filename
 
 from superset import (
-    app, appbuilder, cache, db, results_backend, security_manager, sql_lab, utils,
+    app, appbuilder, cache, db, results_backend, security_manager, utils,
     viz,
 )
 from superset.connectors.connector_registry import ConnectorRegistry
@@ -43,14 +43,15 @@
 from superset.legacy import cast_form_data
 import superset.models.core as models
 from superset.models.sql_lab import Query
-from superset.sql_parse import SupersetQuery
 from superset.utils import (
     has_access, merge_extra_filters, merge_request_params, QueryStatus,
 )
 from .base import (
     api, BaseSupersetView, CsvResponse, DeleteMixin,
+    get_datasource_access_error_msg,
     generate_download_headers, get_error_msg, get_user_roles,
-    json_error_response, SupersetFilter, SupersetModelView, YamlExportMixin,
+    json_error_response, json_success,
+    SupersetFilter, SupersetModelView, YamlExportMixin,
 )
 from .utils import bootstrap_user_data
 
@@ -84,20 +85,6 @@
     ]
 
 
-def get_database_access_error_msg(database_name):
-    return __('This view requires the database %(name)s or '
-              '`all_datasource_access` permission', name=database_name)
-
-
-def get_datasource_access_error_msg(datasource_name):
-    return __('This endpoint requires the datasource %(name)s, database or '
-              '`all_datasource_access` permission', name=datasource_name)
-
-
-def json_success(json_msg, status=200):
-    return Response(json_msg, status=status, mimetype='application/json')
-
-
 def is_owner(obj, user):
     """ Check if user is owner of the slice """
     return obj and user in obj.owners
@@ -760,12 +747,6 @@ def msg(self):
 
 class Superset(BaseSupersetView):
     """The base views for Superset!"""
-    def json_response(self, obj, status=200):
-        return Response(
-            json.dumps(obj, default=utils.json_int_dttm_ser),
-            status=status,
-            mimetype='application/json')
-
     @has_access_api
     @expose('/datasources/')
     def datasources(self):
@@ -2144,67 +2125,6 @@ def sync_druid_source(self):
             return json_error_response(utils.error_msg_from_exception(e))
         return Response(status=201)
 
-    @has_access
-    @expose('/sqllab_viz/', methods=['POST'])
-    @log_this
-    def sqllab_viz(self):
-        SqlaTable = ConnectorRegistry.sources['table']
-        data = json.loads(request.form.get('data'))
-        table_name = data.get('datasourceName')
-        table = (
-            db.session.query(SqlaTable)
-            .filter_by(table_name=table_name)
-            .first()
-        )
-        if not table:
-            table = SqlaTable(table_name=table_name)
-        table.database_id = data.get('dbId')
-        q = SupersetQuery(data.get('sql'))
-        table.sql = q.stripped()
-        db.session.add(table)
-        cols = []
-        dims = []
-        metrics = []
-        for column_name, config in data.get('columns').items():
-            is_dim = config.get('is_dim', False)
-            SqlaTable = ConnectorRegistry.sources['table']
-            TableColumn = SqlaTable.column_class
-            SqlMetric = SqlaTable.metric_class
-            col = TableColumn(
-                column_name=column_name,
-                filterable=is_dim,
-                groupby=is_dim,
-                is_dttm=config.get('is_date', False),
-                type=config.get('type', False),
-            )
-            cols.append(col)
-            if is_dim:
-                dims.append(col)
-            agg = config.get('agg')
-            if agg:
-                if agg == 'count_distinct':
-                    metrics.append(SqlMetric(
-                        metric_name='{agg}__{column_name}'.format(**locals()),
-                        expression='COUNT(DISTINCT {column_name})'
-                        .format(**locals()),
-                    ))
-                else:
-                    metrics.append(SqlMetric(
-                        metric_name='{agg}__{column_name}'.format(**locals()),
-                        expression='{agg}({column_name})'.format(**locals()),
-                    ))
-        if not metrics:
-            metrics.append(SqlMetric(
-                metric_name='count'.format(**locals()),
-                expression='count(*)'.format(**locals()),
-            ))
-        table.columns = cols
-        table.metrics = metrics
-        db.session.commit()
-        return self.json_response(json.dumps({
-            'table_id': table.id,
-        }))
-
     @has_access
     @expose('/table/<database_id>/<table_name>/<schema>/')
     @log_this
@@ -2354,110 +2274,6 @@ def stop_query(self):
             pass
         return self.json_response('OK')
 
-    @has_access_api
-    @expose('/sql_json/', methods=['POST', 'GET'])
-    @log_this
-    def sql_json(self):
-        """Runs arbitrary sql and returns and json"""
-        async = request.form.get('runAsync') == 'true'
-        sql = request.form.get('sql')
-        database_id = request.form.get('database_id')
-        schema = request.form.get('schema') or None
-        template_params = json.loads(
-            request.form.get('templateParams') or '{}')
-
-        session = db.session()
-        mydb = session.query(models.Database).filter_by(id=database_id).first()
-
-        if not mydb:
-            json_error_response(
-                'Database with id {} is missing.'.format(database_id))
-
-        rejected_tables = security_manager.rejected_datasources(sql, mydb, schema)
-        if rejected_tables:
-            return json_error_response(get_datasource_access_error_msg(
-                '{}'.format(rejected_tables)))
-        session.commit()
-
-        select_as_cta = request.form.get('select_as_cta') == 'true'
-        tmp_table_name = request.form.get('tmp_table_name')
-        if select_as_cta and mydb.force_ctas_schema:
-            tmp_table_name = '{}.{}'.format(
-                mydb.force_ctas_schema,
-                tmp_table_name,
-            )
-
-        query = Query(
-            database_id=int(database_id),
-            limit=int(app.config.get('SQL_MAX_ROW', None)),
-            sql=sql,
-            schema=schema,
-            select_as_cta=request.form.get('select_as_cta') == 'true',
-            start_time=utils.now_as_float(),
-            tab_name=request.form.get('tab'),
-            status=QueryStatus.PENDING if async else QueryStatus.RUNNING,
-            sql_editor_id=request.form.get('sql_editor_id'),
-            tmp_table_name=tmp_table_name,
-            user_id=int(g.user.get_id()),
-            client_id=request.form.get('client_id'),
-        )
-        session.add(query)
-        session.flush()
-        query_id = query.id
-        session.commit()  # shouldn't be necessary
-        if not query_id:
-            raise Exception(_('Query record was not created as expected.'))
-        logging.info('Triggering query_id: {}'.format(query_id))
-
-        # Async request.
-        if async:
-            logging.info('Running query on a Celery worker')
-            # Ignore the celery future object and the request may time out.
-            try:
-                sql_lab.get_sql_results.delay(
-                    query_id=query_id, return_results=False,
-                    store_results=not query.select_as_cta,
-                    user_name=g.user.username,
-                    template_params=template_params)
-            except Exception as e:
-                logging.exception(e)
-                msg = (
-                    'Failed to start remote query on a worker. '
-                    'Tell your administrator to verify the availability of '
-                    'the message queue.'
-                )
-                query.status = QueryStatus.FAILED
-                query.error_message = msg
-                session.commit()
-                return json_error_response('{}'.format(msg))
-
-            resp = json_success(json.dumps(
-                {'query': query.to_dict()}, default=utils.json_int_dttm_ser,
-                allow_nan=False), status=202)
-            session.commit()
-            return resp
-
-        # Sync request.
-        try:
-            timeout = config.get('SQLLAB_TIMEOUT')
-            timeout_msg = (
-                'The query exceeded the {timeout} seconds '
-                'timeout.').format(**locals())
-            with utils.timeout(seconds=timeout,
-                               error_message=timeout_msg):
-                # pylint: disable=no-value-for-parameter
-                data = sql_lab.get_sql_results(
-                    query_id=query_id, return_results=True,
-                    template_params=template_params)
-            payload = json.dumps(
-                data, default=utils.pessimistic_json_iso_dttm_ser)
-        except Exception as e:
-            logging.exception(e)
-            return json_error_response('{}'.format(e))
-        if data.get('status') == QueryStatus.FAILED:
-            return json_error_response(payload=data)
-        return json_success(payload)
-
     @has_access
     @expose('/csv/<client_id>')
     @log_this
@@ -2544,56 +2360,15 @@ def queries(self, last_updated_ms):
         return json_success(
             json.dumps(dict_queries, default=utils.json_int_dttm_ser))
 
-    @has_access
+    @expose('/sqllab')
+    def sqllab(self):
+        """For backward compatibility"""
+        return self.redirect_with_context('/sqllab/')
+
     @expose('/search_queries')
-    @log_this
     def search_queries(self):
-        """Search for queries."""
-        query = db.session.query(Query)
-        search_user_id = request.args.get('user_id')
-        database_id = request.args.get('database_id')
-        search_text = request.args.get('search_text')
-        status = request.args.get('status')
-        # From and To time stamp should be Epoch timestamp in seconds
-        from_time = request.args.get('from')
-        to_time = request.args.get('to')
-
-        if search_user_id:
-            # Filter on db Id
-            query = query.filter(Query.user_id == search_user_id)
-
-        if database_id:
-            # Filter on db Id
-            query = query.filter(Query.database_id == database_id)
-
-        if status:
-            # Filter on status
-            query = query.filter(Query.status == status)
-
-        if search_text:
-            # Filter on search text
-            query = query \
-                .filter(Query.sql.like('%{}%'.format(search_text)))
-
-        if from_time:
-            query = query.filter(Query.start_time > int(from_time))
-
-        if to_time:
-            query = query.filter(Query.start_time < int(to_time))
-
-        query_limit = config.get('QUERY_SEARCH_LIMIT', 1000)
-        sql_queries = (
-            query.order_by(Query.start_time.asc())
-            .limit(query_limit)
-            .all()
-        )
-
-        dict_queries = [q.to_dict() for q in sql_queries]
-
-        return Response(
-            json.dumps(dict_queries, default=utils.json_int_dttm_ser),
-            status=200,
-            mimetype='application/json')
+        """For backward compatibility"""
+        return self.redirect_with_context('/sqllab/search_queries/')
 
     @app.errorhandler(500)
     def show_traceback(self):
@@ -2639,20 +2414,6 @@ def profile(self, username):
             bootstrap_data=json.dumps(payload, default=utils.json_iso_dttm_ser),
         )
 
-    @has_access
-    @expose('/sqllab')
-    def sqllab(self):
-        """SQL Editor"""
-        d = {
-            'defaultDbId': config.get('SQLLAB_DEFAULT_DBID'),
-            'common': self.common_bootsrap_payload(),
-        }
-        return self.render_template(
-            'superset/basic.html',
-            entry='sqllab',
-            bootstrap_data=json.dumps(d, default=utils.json_iso_dttm_ser),
-        )
-
     @api
     @has_access_api
     @expose('/slice_query/<slice_id>/')
@@ -2697,26 +2458,6 @@ class CssTemplateAsyncModelView(CssTemplateModelView):
 
 appbuilder.add_view_no_menu(CssTemplateAsyncModelView)
 
-appbuilder.add_link(
-    'SQL Editor',
-    label=_('SQL Editor'),
-    href='/superset/sqllab',
-    category_icon='fa-flask',
-    icon='fa-flask',
-    category='SQL Lab',
-    category_label=__('SQL Lab'),
-)
-
-appbuilder.add_link(
-    'Query Search',
-    label=_('Query Search'),
-    href='/superset/sqllab#search',
-    icon='fa-search',
-    category_icon='fa-flask',
-    category='SQL Lab',
-    category_label=__('SQL Lab'),
-)
-
 appbuilder.add_link(
     'Upload a CSV',
     label=__('Upload a CSV'),
diff --git a/superset/views/sql_lab.py b/superset/views/sql_lab.py
index aac1d6d2c1..a5df983346 100644
--- a/superset/views/sql_lab.py
+++ b/superset/views/sql_lab.py
@@ -4,15 +4,29 @@
 from __future__ import print_function
 from __future__ import unicode_literals
 
-from flask import g, redirect
+import json
+import logging
+
+from flask import g, redirect, request
 from flask_appbuilder import expose
 from flask_appbuilder.models.sqla.interface import SQLAInterface
+from flask_appbuilder.security.decorators import has_access_api
 from flask_babel import gettext as __
 from flask_babel import lazy_gettext as _
 
-from superset import appbuilder
+from superset import app, appbuilder, db, security_manager, sql_lab, utils
+from superset.connectors.connector_registry import ConnectorRegistry
+import superset.models.core as models
 from superset.models.sql_lab import Query, SavedQuery
-from .base import BaseSupersetView, DeleteMixin, SupersetModelView
+from superset.sql_parse import SupersetQuery
+from superset.utils import QueryStatus
+from .base import (
+    BaseSupersetView, DeleteMixin, get_datasource_access_error_msg,
+    json_error_response, json_success, SupersetModelView,
+)
+
+config = app.config
+log_this = models.Log.log_this
 
 
 class QueryView(SupersetModelView):
@@ -81,20 +95,270 @@ class SavedQueryViewApi(SavedQueryView):
 appbuilder.add_view_no_menu(SavedQueryViewApi)
 appbuilder.add_view_no_menu(SavedQueryView)
 
-appbuilder.add_link(
-    __('Saved Queries'),
-    href='/sqllab/my_queries/',
-    icon='fa-save',
-    category='SQL Lab')
-
 
 class SqlLab(BaseSupersetView):
-    """The base views for Superset!"""
+    """SQL Lab views"""
+
+    @utils.has_access
+    @expose('/')
+    def sqllab(self):
+        """SQL Editor"""
+        d = {
+            'defaultDbId': config.get('SQLLAB_DEFAULT_DBID'),
+            'common': self.common_bootsrap_payload(),
+        }
+        return self.render_template(
+            'superset/basic.html',
+            entry='sqllab',
+            bootstrap_data=json.dumps(d, default=utils.json_iso_dttm_ser),
+        )
+
+    @has_access_api
+    @expose('/sql_json/', methods=['POST', 'GET'])
+    @log_this
+    def sql_json(self):
+        """Runs arbitrary sql and returns and json"""
+        # TODO move to ./sqllab.py
+        async = request.form.get('runAsync') == 'true'
+        sql = request.form.get('sql')
+        database_id = request.form.get('database_id')
+        schema = request.form.get('schema') or None
+        template_params = json.loads(
+            request.form.get('templateParams') or '{}')
+
+        session = db.session()
+        mydb = session.query(models.Database).filter_by(id=database_id).first()
+
+        if not mydb:
+            json_error_response(
+                'Database with id {} is missing.'.format(database_id))
+
+        rejected_tables = security_manager.rejected_datasources(sql, mydb, schema)
+        if rejected_tables:
+            return json_error_response(get_datasource_access_error_msg(
+                '{}'.format(rejected_tables)))
+        session.commit()
+
+        select_as_cta = request.form.get('select_as_cta') == 'true'
+        tmp_table_name = request.form.get('tmp_table_name')
+        if select_as_cta and mydb.force_ctas_schema:
+            tmp_table_name = '{}.{}'.format(
+                mydb.force_ctas_schema,
+                tmp_table_name,
+            )
+
+        query = Query(
+            database_id=int(database_id),
+            limit=int(app.config.get('SQL_MAX_ROW', None)),
+            sql=sql,
+            schema=schema,
+            select_as_cta=request.form.get('select_as_cta') == 'true',
+            start_time=utils.now_as_float(),
+            tab_name=request.form.get('tab'),
+            status=QueryStatus.PENDING if async else QueryStatus.RUNNING,
+            sql_editor_id=request.form.get('sql_editor_id'),
+            tmp_table_name=tmp_table_name,
+            user_id=int(g.user.get_id()),
+            client_id=request.form.get('client_id'),
+        )
+        session.add(query)
+        session.flush()
+        query_id = query.id
+        session.commit()  # shouldn't be necessary
+        if not query_id:
+            raise Exception(_('Query record was not created as expected.'))
+        logging.info('Triggering query_id: {}'.format(query_id))
+
+        # Async request.
+        if async:
+            logging.info('Running query on a Celery worker')
+            # Ignore the celery future object and the request may time out.
+            try:
+                sql_lab.get_sql_results.delay(
+                    query_id=query_id, return_results=False,
+                    store_results=not query.select_as_cta,
+                    user_name=g.user.username,
+                    template_params=template_params)
+            except Exception as e:
+                logging.exception(e)
+                msg = (
+                    'Failed to start remote query on a worker. '
+                    'Tell your administrator to verify the availability of '
+                    'the message queue.'
+                )
+                query.status = QueryStatus.FAILED
+                query.error_message = msg
+                session.commit()
+                return json_error_response('{}'.format(msg))
+
+            resp = json_success(json.dumps(
+                {'query': query.to_dict()}, default=utils.json_int_dttm_ser,
+                allow_nan=False), status=202)
+            session.commit()
+            return resp
+
+        # Sync request.
+        try:
+            timeout = config.get('SQLLAB_TIMEOUT')
+            timeout_msg = (
+                'The query exceeded the {timeout} seconds '
+                'timeout.').format(**locals())
+            with utils.timeout(seconds=timeout,
+                               error_message=timeout_msg):
+                # pylint: disable=no-value-for-parameter
+                data = sql_lab.get_sql_results(
+                    query_id=query_id, return_results=True,
+                    template_params=template_params)
+            payload = json.dumps(
+                data, default=utils.pessimistic_json_iso_dttm_ser)
+        except Exception as e:
+            logging.exception(e)
+            return json_error_response('{}'.format(e))
+        if data.get('status') == QueryStatus.FAILED:
+            return json_error_response(payload=data)
+        return json_success(payload)
+
     @expose('/my_queries/')
     def my_queries(self):
         """Assigns a list of found users to the given role."""
         return redirect(
             '/savedqueryview/list/?_flt_0_user={}'.format(g.user.id))
 
+    @utils.has_access
+    @expose('/sqllab_viz/', methods=['POST'])
+    @log_this
+    def sqllab_viz(self):
+        SqlaTable = ConnectorRegistry.sources['table']
+        data = json.loads(request.form.get('data'))
+        table_name = data.get('datasourceName')
+        table = (
+            db.session.query(SqlaTable)
+            .filter_by(table_name=table_name)
+            .first()
+        )
+        if not table:
+            table = SqlaTable(table_name=table_name)
+        table.database_id = data.get('dbId')
+        table.schema = data.get('schema')
+        q = SupersetQuery(data.get('sql'))
+        table.sql = q.stripped()
+        db.session.add(table)
+        cols = []
+        dims = []
+        metrics = []
+        for column_name, config in data.get('columns').items():
+            is_dim = config.get('is_dim', False)
+            SqlaTable = ConnectorRegistry.sources['table']
+            TableColumn = SqlaTable.column_class
+            SqlMetric = SqlaTable.metric_class
+            col = TableColumn(
+                column_name=column_name,
+                filterable=is_dim,
+                groupby=is_dim,
+                is_dttm=config.get('is_date', False),
+                type=config.get('type', False),
+            )
+            cols.append(col)
+            if is_dim:
+                dims.append(col)
+            agg = config.get('agg')
+            if agg:
+                if agg == 'count_distinct':
+                    metrics.append(SqlMetric(
+                        metric_name='{agg}__{column_name}'.format(**locals()),
+                        expression='COUNT(DISTINCT {column_name})'
+                        .format(**locals()),
+                    ))
+                else:
+                    metrics.append(SqlMetric(
+                        metric_name='{agg}__{column_name}'.format(**locals()),
+                        expression='{agg}({column_name})'.format(**locals()),
+                    ))
+        if not metrics:
+            metrics.append(SqlMetric(
+                metric_name='count'.format(**locals()),
+                expression='count(*)'.format(**locals()),
+            ))
+        table.columns = cols
+        table.metrics = metrics
+        db.session.commit()
+        return self.json_response({
+            'table_id': table.id,
+        })
+
+    @utils.has_access
+    @expose('/search_queries/')
+    @log_this
+    def search_queries(self):
+        """Search for queries."""
+        query = db.session.query(Query)
+        search_user_id = request.args.get('user_id')
+        database_id = request.args.get('database_id')
+        search_text = request.args.get('search_text')
+        status = request.args.get('status')
+        # From and To time stamp should be Epoch timestamp in seconds
+        from_time = request.args.get('from')
+        to_time = request.args.get('to')
+
+        if search_user_id:
+            # Filter on db Id
+            query = query.filter(Query.user_id == search_user_id)
+
+        if database_id:
+            # Filter on db Id
+            query = query.filter(Query.database_id == database_id)
+
+        if status:
+            # Filter on status
+            query = query.filter(Query.status == status)
+
+        if search_text:
+            # Filter on search text
+            query = query \
+                .filter(Query.sql.like('%{}%'.format(search_text)))
+
+        if from_time:
+            query = query.filter(Query.start_time > int(from_time))
+
+        if to_time:
+            query = query.filter(Query.start_time < int(to_time))
+
+        query_limit = config.get('QUERY_SEARCH_LIMIT', 1000)
+        sql_queries = (
+            query.order_by(Query.start_time.asc())
+            .limit(query_limit)
+            .all()
+        )
+
+        dict_queries = [q.to_dict() for q in sql_queries]
+        return self.json_response(dict_queries)
+
 
 appbuilder.add_view_no_menu(SqlLab)
+
+appbuilder.add_link(
+    'SQL Editor',
+    label=_('SQL Editor'),
+    href='/sqllab/',
+    category_icon='fa-flask',
+    icon='fa-flask',
+    category='SQL Lab',
+    category_label=__('SQL Lab'),
+)
+
+appbuilder.add_link(
+    'Query Search',
+    label=_('Query Search'),
+    href='/sqllab/#search',
+    icon='fa-search',
+    category_icon='fa-flask',
+    category='SQL Lab',
+    category_label=__('SQL Lab'),
+)
+
+appbuilder.add_link(
+    __('Saved Queries'),
+    href='/sqllab/my_queries/',
+    icon='fa-save',
+    category='SQL Lab',
+)
diff --git a/tests/sqllab_tests.py b/tests/sqllab_tests.py
index afab1403f0..411e3a49c8 100644
--- a/tests/sqllab_tests.py
+++ b/tests/sqllab_tests.py
@@ -16,6 +16,8 @@
 from superset.sql_lab import convert_results_to_df
 from .base_tests import SupersetTestCase
 
+URL_SEARCH_QUERIES = '/sqllab/search_queries/'
+
 
 class SqlLabTests(SupersetTestCase):
     """Testings for Sql Lab"""
@@ -85,6 +87,37 @@ def test_sql_json_has_access(self):
         db.session.commit()
         self.assertLess(0, len(data['data']))
 
+    def test_sqllab_viz(self):
+        payload = {
+            'chartType': 'dist_bar',
+            'datasourceName': 'test_viz_flow_table',
+            'schema': 'superset',
+            'columns': {
+                'viz_type': {
+                    'is_date': False,
+                    'type': 'STRING',
+                    'nam:qe': 'viz_type',
+                    'is_dim': True,
+                },
+                'ccount': {
+                    'is_date': False,
+                    'type': 'OBJECT',
+                    'name': 'ccount',
+                    'is_dim': True,
+                    'agg': 'sum',
+                },
+            },
+            'sql': '''\
+                SELECT viz_type, count(1) as ccount
+                FROM slices
+                WHERE viz_type LIKE '%%a%%'
+                GROUP BY viz_type''',
+            'dbId': 1,
+        }
+        data = {"data": json.dumps(payload)}
+        resp = self.get_json_resp('/sqllab/sqllab_viz/', data=data)
+        self.assertIn('table_id', resp)
+
     def test_queries_endpoint(self):
         self.run_some_queries()
 
@@ -125,12 +158,12 @@ def test_search_query_on_db_id(self):
         self.run_some_queries()
         self.login('admin')
         # Test search queries on database Id
-        data = self.get_json_resp('/superset/search_queries?database_id=1')
+        data = self.get_json_resp(URL_SEARCH_QUERIES + '?database_id=1')
         self.assertEquals(3, len(data))
         db_ids = [k['dbId'] for k in data]
         self.assertEquals([1, 1, 1], db_ids)
 
-        resp = self.get_resp('/superset/search_queries?database_id=-1')
+        resp = self.get_resp(URL_SEARCH_QUERIES + '?database_id=-1')
         data = json.loads(resp)
         self.assertEquals(0, len(data))
 
@@ -141,14 +174,14 @@ def test_search_query_on_user(self):
         # Test search queries on user Id
         user_id = security_manager.find_user('admin').id
         data = self.get_json_resp(
-            '/superset/search_queries?user_id={}'.format(user_id))
+            URL_SEARCH_QUERIES + '?user_id={}'.format(user_id))
         self.assertEquals(2, len(data))
         user_ids = {k['userId'] for k in data}
         self.assertEquals(set([user_id]), user_ids)
 
         user_id = security_manager.find_user('gamma_sqllab').id
         resp = self.get_resp(
-            '/superset/search_queries?user_id={}'.format(user_id))
+            URL_SEARCH_QUERIES + '?user_id={}'.format(user_id))
         data = json.loads(resp)
         self.assertEquals(1, len(data))
         self.assertEquals(data[0]['userId'], user_id)
@@ -157,13 +190,13 @@ def test_search_query_on_status(self):
         self.run_some_queries()
         self.login('admin')
         # Test search queries on status
-        resp = self.get_resp('/superset/search_queries?status=success')
+        resp = self.get_resp(URL_SEARCH_QUERIES + '?status=success')
         data = json.loads(resp)
         self.assertEquals(2, len(data))
         states = [k['state'] for k in data]
         self.assertEquals(['success', 'success'], states)
 
-        resp = self.get_resp('/superset/search_queries?status=failed')
+        resp = self.get_resp(URL_SEARCH_QUERIES + '?status=failed')
         data = json.loads(resp)
         self.assertEquals(1, len(data))
         self.assertEquals(data[0]['state'], 'failed')
@@ -171,7 +204,7 @@ def test_search_query_on_status(self):
     def test_search_query_on_text(self):
         self.run_some_queries()
         self.login('admin')
-        url = '/superset/search_queries?search_text=permission'
+        url = URL_SEARCH_QUERIES + '?search_text=permission'
         data = self.get_json_resp(url)
         self.assertEquals(1, len(data))
         self.assertIn('permission', data[0]['sql'])
@@ -191,7 +224,7 @@ def test_search_query_on_time(self):
         from_time = 'from={}'.format(int(first_query_time))
         to_time = 'to={}'.format(int(second_query_time))
         params = [from_time, to_time]
-        resp = self.get_resp('/superset/search_queries?' + '&'.join(params))
+        resp = self.get_resp(URL_SEARCH_QUERIES + '?' + '&'.join(params))
         data = json.loads(resp)
         self.assertEquals(2, len(data))
 


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: notifications-unsubscribe@superset.apache.org
For additional commands, e-mail: notifications-help@superset.apache.org