You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@superset.apache.org by vi...@apache.org on 2021/01/29 14:25:46 UTC
[superset] 08/18: feat(explore): allow opening charts with missing
dataset (#12705)
This is an automated email from the ASF dual-hosted git repository.
villebro pushed a commit to branch 1.0
in repository https://gitbox.apache.org/repos/asf/superset.git
commit c1c798aac69f765664619fdce731db8f469dcc5f
Author: Jesse Yang <je...@airbnb.com>
AuthorDate: Mon Jan 25 15:09:03 2021 -0800
feat(explore): allow opening charts with missing dataset (#12705)
---
.../explore/components/DatasourceControl_spec.jsx | 2 +-
superset-frontend/src/common/components/index.tsx | 1 +
.../src/explore/components/Control.tsx | 4 +-
.../src/explore/components/DatasourcePanel.tsx | 62 ++++----------
.../components/controls/DatasourceControl.jsx | 49 ++++++++++-
superset/commands/exceptions.py | 2 +-
superset/commands/utils.py | 4 +-
superset/connectors/connector_registry.py | 17 +++-
superset/connectors/druid/views.py | 4 +-
superset/datasets/api.py | 1 +
superset/datasets/commands/exceptions.py | 10 ++-
superset/datasets/commands/importers/v0.py | 15 ++--
superset/models/slice.py | 2 +-
superset/models/tags.py | 7 +-
superset/translations/de/LC_MESSAGES/messages.json | 4 +-
superset/translations/de/LC_MESSAGES/messages.po | 4 +-
superset/translations/en/LC_MESSAGES/messages.json | 4 +-
superset/translations/en/LC_MESSAGES/messages.po | 4 +-
superset/translations/es/LC_MESSAGES/messages.json | 4 +-
superset/translations/es/LC_MESSAGES/messages.po | 4 +-
superset/translations/fr/LC_MESSAGES/messages.json | 6 +-
superset/translations/fr/LC_MESSAGES/messages.po | 4 +-
superset/translations/it/LC_MESSAGES/messages.json | 4 +-
superset/translations/it/LC_MESSAGES/messages.po | 4 +-
superset/translations/ja/LC_MESSAGES/messages.json | 4 +-
superset/translations/ja/LC_MESSAGES/messages.po | 4 +-
superset/translations/ko/LC_MESSAGES/messages.json | 4 +-
superset/translations/ko/LC_MESSAGES/messages.po | 4 +-
superset/translations/messages.pot | 4 +-
superset/translations/pt/LC_MESSAGES/message.json | 6 +-
superset/translations/pt/LC_MESSAGES/message.po | 4 +-
superset/translations/pt/LC_MESSAGES/messages.json | 4 +-
.../translations/pt_BR/LC_MESSAGES/messages.json | 6 +-
.../translations/pt_BR/LC_MESSAGES/messages.po | 4 +-
superset/translations/ru/LC_MESSAGES/messages.json | 4 +-
superset/translations/ru/LC_MESSAGES/messages.po | 4 +-
superset/translations/zh/LC_MESSAGES/messages.json | 4 +-
superset/translations/zh/LC_MESSAGES/messages.po | 6 +-
superset/utils/core.py | 3 +
superset/views/base.py | 7 +-
superset/views/core.py | 97 ++++++++++++----------
superset/views/datasource.py | 37 ++++-----
superset/views/utils.py | 8 +-
tests/base_tests.py | 15 ++++
tests/charts/api_tests.py | 21 +++--
tests/datasets/api_tests.py | 7 +-
tests/datasource_tests.py | 48 +++++------
47 files changed, 278 insertions(+), 249 deletions(-)
diff --git a/superset-frontend/spec/javascripts/explore/components/DatasourceControl_spec.jsx b/superset-frontend/spec/javascripts/explore/components/DatasourceControl_spec.jsx
index 2996dcd..5891327 100644
--- a/superset-frontend/spec/javascripts/explore/components/DatasourceControl_spec.jsx
+++ b/superset-frontend/spec/javascripts/explore/components/DatasourceControl_spec.jsx
@@ -99,7 +99,7 @@ describe('DatasourceControl', () => {
const wrapper = setup();
const alert = wrapper.find(Icon);
expect(alert.at(1).prop('name')).toBe('alert-solid');
- const tooltip = wrapper.find(Tooltip).at(1);
+ const tooltip = wrapper.find(Tooltip).at(0);
expect(tooltip.prop('title')).toBe(
defaultProps.datasource.health_check_message,
);
diff --git a/superset-frontend/src/common/components/index.tsx b/superset-frontend/src/common/components/index.tsx
index e193a2a..1058d35 100644
--- a/superset-frontend/src/common/components/index.tsx
+++ b/superset-frontend/src/common/components/index.tsx
@@ -28,6 +28,7 @@ import { DropDownProps } from 'antd/lib/dropdown';
*/
// eslint-disable-next-line no-restricted-imports
export {
+ Alert,
AutoComplete,
Avatar,
Button,
diff --git a/superset-frontend/src/explore/components/Control.tsx b/superset-frontend/src/explore/components/Control.tsx
index e728ad5..f1b6925 100644
--- a/superset-frontend/src/explore/components/Control.tsx
+++ b/superset-frontend/src/explore/components/Control.tsx
@@ -29,9 +29,9 @@ export type ControlProps = {
// signature to the original action factory.
actions: Partial<ExploreActions> & Pick<ExploreActions, 'setControlValue'>;
type: ControlType;
- label: string;
+ label?: ReactNode;
name: string;
- description?: string;
+ description?: ReactNode;
tooltipOnClick?: () => ReactNode;
places?: number;
rightNode?: ReactNode;
diff --git a/superset-frontend/src/explore/components/DatasourcePanel.tsx b/superset-frontend/src/explore/components/DatasourcePanel.tsx
index dd81ce0..f781074 100644
--- a/superset-frontend/src/explore/components/DatasourcePanel.tsx
+++ b/superset-frontend/src/explore/components/DatasourcePanel.tsx
@@ -17,57 +17,25 @@
* under the License.
*/
import React, { useEffect, useState } from 'react';
-import { styled, t, QueryFormData } from '@superset-ui/core';
+import { styled, t } from '@superset-ui/core';
import { Collapse } from 'src/common/components';
import {
ColumnOption,
MetricOption,
- ControlType,
+ ControlConfig,
+ DatasourceMeta,
} from '@superset-ui/chart-controls';
import { debounce } from 'lodash';
import { matchSorter, rankings } from 'match-sorter';
import { ExploreActions } from '../actions/exploreActions';
import Control from './Control';
-interface DatasourceControl {
- validationErrors: Array<any>;
- mapStateToProps: QueryFormData;
- type: ControlType;
- label: string;
- datasource?: DatasourceControl;
+interface DatasourceControl extends ControlConfig {
+ datasource?: DatasourceMeta;
}
-type Columns = {
- column_name: string;
- description: string | undefined;
- expression: string | undefined;
- filterable: boolean;
- groupby: string | undefined;
- id: number;
- is_dttm: boolean;
- python_date_format: string;
- type: string;
- verbose_name: string;
-};
-
-type Metrics = {
- certification_details: string | undefined;
- certified_by: string | undefined;
- d3format: string | undefined;
- description: string | undefined;
- expression: string;
- id: number;
- is_certified: boolean;
- metric_name: string;
- verbose_name: string;
- warning_text: string;
-};
-
interface Props {
- datasource: {
- columns: Array<Columns>;
- metrics: Array<Metrics>;
- };
+ datasource: DatasourceMeta;
controls: {
datasource: DatasourceControl;
};
@@ -228,15 +196,8 @@ export default function DataSourcePanel({
const metricSlice = lists.metrics.slice(0, 50);
const columnSlice = lists.columns.slice(0, 50);
- return (
- <DatasourceContainer>
- <Control
- {...datasourceControl}
- name="datasource"
- validationErrors={datasourceControl.validationErrors}
- actions={actions}
- formData={datasourceControl.mapStateToProps}
- />
+ const mainBody = (
+ <>
<input
type="text"
onChange={evt => {
@@ -279,6 +240,13 @@ export default function DataSourcePanel({
</Collapse.Panel>
</Collapse>
</div>
+ </>
+ );
+
+ return (
+ <DatasourceContainer>
+ <Control {...datasourceControl} name="datasource" actions={actions} />
+ {datasource.id != null && mainBody}
</DatasourceContainer>
);
}
diff --git a/superset-frontend/src/explore/components/controls/DatasourceControl.jsx b/superset-frontend/src/explore/components/controls/DatasourceControl.jsx
index d531d55..88e804d 100644
--- a/superset-frontend/src/explore/components/controls/DatasourceControl.jsx
+++ b/superset-frontend/src/explore/components/controls/DatasourceControl.jsx
@@ -26,6 +26,8 @@ import Icon from 'src/components/Icon';
import ChangeDatasourceModal from 'src/datasource/ChangeDatasourceModal';
import DatasourceModal from 'src/datasource/DatasourceModal';
import { postForm } from 'src/explore/exploreUtils';
+import Button from 'src/components/Button';
+import ErrorAlert from 'src/components/ErrorMessage/ErrorAlert';
const propTypes = {
actions: PropTypes.object.isRequired,
@@ -51,6 +53,9 @@ const Styles = styled.div`
border-bottom: 1px solid ${({ theme }) => theme.colors.grayscale.light2};
padding: ${({ theme }) => 2 * theme.gridUnit}px;
}
+ .error-alert {
+ margin: ${({ theme }) => 2 * theme.gridUnit}px;
+ }
.ant-dropdown-trigger {
margin-left: ${({ theme }) => 2 * theme.gridUnit}px;
box-shadow: none;
@@ -152,6 +157,7 @@ class DatasourceControl extends React.PureComponent {
render() {
const { showChangeDatasourceModal, showEditDatasourceModal } = this.state;
const { datasource, onChange } = this.props;
+ const isMissingDatasource = datasource;
const datasourceMenu = (
<Menu onClick={this.handleMenuItemClick}>
{this.props.isEditable && (
@@ -164,16 +170,22 @@ class DatasourceControl extends React.PureComponent {
</Menu>
);
- // eslint-disable-next-line camelcase
const { health_check_message: healthCheckMessage } = datasource;
return (
<Styles className="DatasourceControl">
<div className="data-container">
<Icon name="dataset-physical" className="dataset-svg" />
- <Tooltip title={datasource.name}>
- <span className="title-select">{datasource.name}</span>
- </Tooltip>
+ {/* Add a tooltip only for long dataset names */}
+ {!isMissingDatasource && datasource.name.length > 25 ? (
+ <Tooltip title={datasource.name}>
+ <span className="title-select">{datasource.name}</span>
+ </Tooltip>
+ ) : (
+ <span title={datasource.name} className="title-select">
+ {datasource.name}
+ </span>
+ )}
{healthCheckMessage && (
<Tooltip title={healthCheckMessage}>
<Icon
@@ -196,6 +208,35 @@ class DatasourceControl extends React.PureComponent {
</Tooltip>
</Dropdown>
</div>
+ {/* missing dataset */}
+ {isMissingDatasource && (
+ <div className="error-alert">
+ <ErrorAlert
+ level="warning"
+ title={t('Missing dataset')}
+ source="explore"
+ subtitle={
+ <>
+ <p>
+ {t(
+ 'The dataset linked to this chart may have been deleted.',
+ )}
+ </p>
+ <p>
+ <Button
+ buttonStyle="primary"
+ onClick={() =>
+ this.handleMenuItemClick({ key: CHANGE_DATASET })
+ }
+ >
+ {t('Change dataset')}
+ </Button>
+ </p>
+ </>
+ }
+ />
+ </div>
+ )}
{showEditDatasourceModal && (
<DatasourceModal
datasource={datasource}
diff --git a/superset/commands/exceptions.py b/superset/commands/exceptions.py
index 9694755..fdc7bee 100644
--- a/superset/commands/exceptions.py
+++ b/superset/commands/exceptions.py
@@ -89,4 +89,4 @@ class DatasourceNotFoundValidationError(ValidationError):
status = 404
def __init__(self) -> None:
- super().__init__([_("Datasource does not exist")], field_name="datasource_id")
+ super().__init__([_("Dataset does not exist")], field_name="datasource_id")
diff --git a/superset/commands/utils.py b/superset/commands/utils.py
index c0bd8b7..874ea4b 100644
--- a/superset/commands/utils.py
+++ b/superset/commands/utils.py
@@ -17,7 +17,6 @@
from typing import List, Optional
from flask_appbuilder.security.sqla.models import User
-from sqlalchemy.orm.exc import NoResultFound
from superset.commands.exceptions import (
DatasourceNotFoundValidationError,
@@ -25,6 +24,7 @@ from superset.commands.exceptions import (
)
from superset.connectors.base.models import BaseDatasource
from superset.connectors.connector_registry import ConnectorRegistry
+from superset.datasets.commands.exceptions import DatasetNotFoundError
from superset.extensions import db, security_manager
@@ -53,5 +53,5 @@ def get_datasource_by_id(datasource_id: int, datasource_type: str) -> BaseDataso
return ConnectorRegistry.get_datasource(
datasource_type, datasource_id, db.session
)
- except (NoResultFound, KeyError):
+ except DatasetNotFoundError:
raise DatasourceNotFoundValidationError()
diff --git a/superset/connectors/connector_registry.py b/superset/connectors/connector_registry.py
index fa9a54b..0931fd9 100644
--- a/superset/connectors/connector_registry.py
+++ b/superset/connectors/connector_registry.py
@@ -19,6 +19,8 @@ from typing import Dict, List, Optional, Set, Type, TYPE_CHECKING
from sqlalchemy import or_
from sqlalchemy.orm import Session, subqueryload
+from superset.datasets.commands.exceptions import DatasetNotFoundError
+
if TYPE_CHECKING:
from collections import OrderedDict
@@ -44,12 +46,23 @@ class ConnectorRegistry:
def get_datasource(
cls, datasource_type: str, datasource_id: int, session: Session
) -> "BaseDatasource":
- return (
+ """Safely get a datasource instance, raises `DatasetNotFoundError` if
+ `datasource_type` is not registered or `datasource_id` does not
+ exist."""
+ if datasource_type not in cls.sources:
+ raise DatasetNotFoundError()
+
+ datasource = (
session.query(cls.sources[datasource_type])
.filter_by(id=datasource_id)
- .one()
+ .one_or_none()
)
+ if not datasource:
+ raise DatasetNotFoundError()
+
+ return datasource
+
@classmethod
def get_all_datasources(cls, session: Session) -> List["BaseDatasource"]:
datasources: List["BaseDatasource"] = []
diff --git a/superset/connectors/druid/views.py b/superset/connectors/druid/views.py
index 112bcd1..4b2b45b 100644
--- a/superset/connectors/druid/views.py
+++ b/superset/connectors/druid/views.py
@@ -39,7 +39,7 @@ from superset.views.base import (
BaseSupersetView,
DatasourceFilter,
DeleteMixin,
- get_datasource_exist_error_msg,
+ get_dataset_exist_error_msg,
ListWidgetWithCheckboxes,
SupersetModelView,
validate_json,
@@ -352,7 +352,7 @@ class DruidDatasourceModelView(DatasourceModelView, DeleteMixin, YamlExportMixin
models.DruidDatasource.cluster_id == item.cluster_id,
)
if db.session.query(query.exists()).scalar():
- raise Exception(get_datasource_exist_error_msg(item.full_name))
+ raise Exception(get_dataset_exist_error_msg(item.full_name))
def post_add(self, item: "DruidDatasourceModelView") -> None:
item.refresh_metrics()
diff --git a/superset/datasets/api.py b/superset/datasets/api.py
index 68cfdbb..9660a1c 100644
--- a/superset/datasets/api.py
+++ b/superset/datasets/api.py
@@ -228,6 +228,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
# This validates custom Schema with custom validations
except ValidationError as error:
return self.response_400(message=error.messages)
+
try:
new_model = CreateDatasetCommand(g.user, item).run()
return self.response(201, id=new_model.id, result=item)
diff --git a/superset/datasets/commands/exceptions.py b/superset/datasets/commands/exceptions.py
index 03071ca..44064f0 100644
--- a/superset/datasets/commands/exceptions.py
+++ b/superset/datasets/commands/exceptions.py
@@ -26,7 +26,10 @@ from superset.commands.exceptions import (
ImportFailedError,
UpdateFailedError,
)
-from superset.views.base import get_datasource_exist_error_msg
+
+
+def get_dataset_exist_error_msg(full_name: str) -> str:
+ return _("Dataset %(name)s already exists", name=full_name)
class DatabaseNotFoundValidationError(ValidationError):
@@ -54,7 +57,7 @@ class DatasetExistsValidationError(ValidationError):
def __init__(self, table_name: str) -> None:
super().__init__(
- get_datasource_exist_error_msg(table_name), field_name="table_name"
+ [get_dataset_exist_error_msg(table_name)], field_name="table_name"
)
@@ -142,7 +145,8 @@ class OwnersNotFoundValidationError(ValidationError):
class DatasetNotFoundError(CommandException):
- message = "Dataset not found."
+ status = 404
+ message = _("Dataset does not exist")
class DatasetInvalidError(CommandInvalidError):
diff --git a/superset/datasets/commands/importers/v0.py b/superset/datasets/commands/importers/v0.py
index df02a39..a19e9ae 100644
--- a/superset/datasets/commands/importers/v0.py
+++ b/superset/datasets/commands/importers/v0.py
@@ -21,7 +21,6 @@ from typing import Any, Callable, Dict, List, Optional
import yaml
from flask_appbuilder import Model
from sqlalchemy.orm import Session
-from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.orm.session import make_transient
from superset import db
@@ -56,14 +55,14 @@ def lookup_sqla_table(table: SqlaTable) -> Optional[SqlaTable]:
def lookup_sqla_database(table: SqlaTable) -> Optional[Database]:
- try:
- return (
- db.session.query(Database)
- .filter_by(database_name=table.params_dict["database_name"])
- .one()
- )
- except NoResultFound:
+ database = (
+ db.session.query(Database)
+ .filter_by(database_name=table.params_dict["database_name"])
+ .one_or_none()
+ )
+ if database is None:
raise DatabaseNotFoundError
+ return database
def lookup_druid_cluster(datasource: DruidDatasource) -> Optional[DruidCluster]:
diff --git a/superset/models/slice.py b/superset/models/slice.py
index 2fd55a7..7461fa8 100644
--- a/superset/models/slice.py
+++ b/superset/models/slice.py
@@ -206,7 +206,7 @@ class Slice(
"""
Returns a MD5 HEX digest that makes this dashboard unique
"""
- return utils.md5_hex(self.params)
+ return utils.md5_hex(self.params or "")
@property
def thumbnail_url(self) -> str:
diff --git a/superset/models/tags.py b/superset/models/tags.py
index 3f508ff..722c5b0 100644
--- a/superset/models/tags.py
+++ b/superset/models/tags.py
@@ -23,7 +23,6 @@ from flask_appbuilder import Model
from sqlalchemy import Column, Enum, ForeignKey, Integer, String
from sqlalchemy.engine.base import Connection
from sqlalchemy.orm import relationship, Session, sessionmaker
-from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.orm.mapper import Mapper
from superset.models.helpers import AuditMixinNullable
@@ -89,13 +88,11 @@ class TaggedObject(Model, AuditMixinNullable):
def get_tag(name: str, session: Session, type_: TagTypes) -> Tag:
- try:
- tag = session.query(Tag).filter_by(name=name, type=type_).one()
- except NoResultFound:
+ tag = session.query(Tag).filter_by(name=name, type=type_).one_or_none()
+ if tag is None:
tag = Tag(name=name, type=type_)
session.add(tag)
session.commit()
-
return tag
diff --git a/superset/translations/de/LC_MESSAGES/messages.json b/superset/translations/de/LC_MESSAGES/messages.json
index 61175cb..72de85b 100644
--- a/superset/translations/de/LC_MESSAGES/messages.json
+++ b/superset/translations/de/LC_MESSAGES/messages.json
@@ -225,7 +225,7 @@
"Charts could not be deleted.": [""],
"Import chart failed for an unknown reason": [""],
"Owners are invalid": [""],
- "Datasource does not exist": ["Datenquellen"],
+ "Dataset does not exist": ["Datenquellen"],
"`operation` property of post processing object undefined": [""],
"Unsupported post processing operation: %(operation)s": [""],
"Adding new datasource [{}]": ["Druid Datenquelle einfügen"],
@@ -643,7 +643,7 @@
"Add Annotation Layer": ["Anmerkungstufe"],
"Edit Annotation Layer": ["Anmerkungstufe"],
"Name": ["Name"],
- "Datasource %(name)s already exists": [""],
+ "Dataset %(name)s already exists": [""],
"Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [
""
],
diff --git a/superset/translations/de/LC_MESSAGES/messages.po b/superset/translations/de/LC_MESSAGES/messages.po
index 8aecafa..8cd006f 100644
--- a/superset/translations/de/LC_MESSAGES/messages.po
+++ b/superset/translations/de/LC_MESSAGES/messages.po
@@ -776,7 +776,7 @@ msgid "Owners are invalid"
msgstr ""
#: superset/commands/exceptions.py:92
-msgid "Datasource does not exist"
+msgid "Dataset does not exist"
msgstr "Datenquellen"
#: superset/common/query_object.py:301
@@ -2303,7 +2303,7 @@ msgstr "Name"
#: superset/views/base.py:207
#, python-format
-msgid "Datasource %(name)s already exists"
+msgid "Dataset %(name)s already exists"
msgstr ""
#: superset/views/base.py:227
diff --git a/superset/translations/en/LC_MESSAGES/messages.json b/superset/translations/en/LC_MESSAGES/messages.json
index e9e28a8..28521e4 100644
--- a/superset/translations/en/LC_MESSAGES/messages.json
+++ b/superset/translations/en/LC_MESSAGES/messages.json
@@ -200,7 +200,7 @@
"Charts could not be deleted.": [""],
"Import chart failed for an unknown reason": [""],
"Owners are invalid": [""],
- "Datasource does not exist": [""],
+ "Dataset does not exist": [""],
"`operation` property of post processing object undefined": [""],
"Unsupported post processing operation: %(operation)s": [""],
"Adding new datasource [{}]": [""],
@@ -585,7 +585,7 @@
"Add Annotation Layer": [""],
"Edit Annotation Layer": [""],
"Name": [""],
- "Datasource %(name)s already exists": [""],
+ "Dataset %(name)s already exists": [""],
"Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [
""
],
diff --git a/superset/translations/en/LC_MESSAGES/messages.po b/superset/translations/en/LC_MESSAGES/messages.po
index 7158ff0..86c784c 100644
--- a/superset/translations/en/LC_MESSAGES/messages.po
+++ b/superset/translations/en/LC_MESSAGES/messages.po
@@ -775,7 +775,7 @@ msgid "Owners are invalid"
msgstr ""
#: superset/commands/exceptions.py:92
-msgid "Datasource does not exist"
+msgid "Dataset does not exist"
msgstr ""
#: superset/common/query_object.py:301
@@ -2302,7 +2302,7 @@ msgstr ""
#: superset/views/base.py:207
#, python-format
-msgid "Datasource %(name)s already exists"
+msgid "Dataset %(name)s already exists"
msgstr ""
#: superset/views/base.py:227
diff --git a/superset/translations/es/LC_MESSAGES/messages.json b/superset/translations/es/LC_MESSAGES/messages.json
index b8e088d..7ad0696 100644
--- a/superset/translations/es/LC_MESSAGES/messages.json
+++ b/superset/translations/es/LC_MESSAGES/messages.json
@@ -272,7 +272,7 @@
"Charts could not be deleted.": ["Los Gráficos no han podido eliminarse"],
"Import chart failed for an unknown reason": [""],
"Owners are invalid": ["Los propietarios son invalidos"],
- "Datasource does not exist": ["La fuente no existe"],
+ "Dataset does not exist": ["La fuente no existe"],
"`operation` property of post processing object undefined": [""],
"Unsupported post processing operation: %(operation)s": [""],
"Adding new datasource [{}]": ["Añadiendo [{}] como nueva fuente"],
@@ -696,7 +696,7 @@
"Add Annotation Layer": [""],
"Edit Annotation Layer": [""],
"Name": ["Nombre"],
- "Datasource %(name)s already exists": [
+ "Dataset %(name)s already exists": [
"La fuente de datos %(name)s ya existe"
],
"Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [
diff --git a/superset/translations/es/LC_MESSAGES/messages.po b/superset/translations/es/LC_MESSAGES/messages.po
index 19afda9..c916cc5 100644
--- a/superset/translations/es/LC_MESSAGES/messages.po
+++ b/superset/translations/es/LC_MESSAGES/messages.po
@@ -784,7 +784,7 @@ msgid "Owners are invalid"
msgstr "Los propietarios son invalidos"
#: superset/commands/exceptions.py:92
-msgid "Datasource does not exist"
+msgid "Dataset does not exist"
msgstr "La fuente no existe"
#: superset/common/query_object.py:301
@@ -2336,7 +2336,7 @@ msgstr "Nombre"
#: superset/views/base.py:207
#, python-format
-msgid "Datasource %(name)s already exists"
+msgid "Dataset %(name)s already exists"
msgstr "La fuente de datos %(name)s ya existe"
#: superset/views/base.py:227
diff --git a/superset/translations/fr/LC_MESSAGES/messages.json b/superset/translations/fr/LC_MESSAGES/messages.json
index 5629a9d..151f527 100644
--- a/superset/translations/fr/LC_MESSAGES/messages.json
+++ b/superset/translations/fr/LC_MESSAGES/messages.json
@@ -277,9 +277,7 @@
"Charts could not be deleted.": ["La requête ne peut pas être chargée"],
"Import chart failed for an unknown reason": [""],
"Owners are invalid": [""],
- "Datasource does not exist": [
- "La source de données %(name)s existe déjà"
- ],
+ "Dataset does not exist": ["La source de données %(name)s existe déjà"],
"`operation` property of post processing object undefined": [""],
"Unsupported post processing operation: %(operation)s": [""],
"Adding new datasource [{}]": ["Ajouter une source de données Druid"],
@@ -728,7 +726,7 @@
"Add Annotation Layer": ["Ajouter une couche d'annotation"],
"Edit Annotation Layer": ["Ajouter une couche d'annotation"],
"Name": ["Nom"],
- "Datasource %(name)s already exists": [
+ "Dataset %(name)s already exists": [
"La source de données %(name)s existe déjà"
],
"Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [
diff --git a/superset/translations/fr/LC_MESSAGES/messages.po b/superset/translations/fr/LC_MESSAGES/messages.po
index 40b2827..660cceb 100644
--- a/superset/translations/fr/LC_MESSAGES/messages.po
+++ b/superset/translations/fr/LC_MESSAGES/messages.po
@@ -781,7 +781,7 @@ msgid "Owners are invalid"
msgstr ""
#: superset/commands/exceptions.py:92
-msgid "Datasource does not exist"
+msgid "Dataset does not exist"
msgstr "La source de données %(name)s existe déjà"
#: superset/common/query_object.py:301
@@ -2350,7 +2350,7 @@ msgstr "Nom"
#: superset/views/base.py:207
#, python-format
-msgid "Datasource %(name)s already exists"
+msgid "Dataset %(name)s already exists"
msgstr "La source de données %(name)s existe déjà"
#: superset/views/base.py:227
diff --git a/superset/translations/it/LC_MESSAGES/messages.json b/superset/translations/it/LC_MESSAGES/messages.json
index d479c62..da4561b 100644
--- a/superset/translations/it/LC_MESSAGES/messages.json
+++ b/superset/translations/it/LC_MESSAGES/messages.json
@@ -237,7 +237,7 @@
"Charts could not be deleted.": ["La query non può essere caricata"],
"Import chart failed for an unknown reason": [""],
"Owners are invalid": [""],
- "Datasource does not exist": ["Sorgente dati e tipo di grafico"],
+ "Dataset does not exist": ["Sorgente dati e tipo di grafico"],
"`operation` property of post processing object undefined": [""],
"Unsupported post processing operation: %(operation)s": [""],
"Adding new datasource [{}]": [""],
@@ -643,7 +643,7 @@
"Add Annotation Layer": [""],
"Edit Annotation Layer": [""],
"Name": ["Nome"],
- "Datasource %(name)s already exists": [""],
+ "Dataset %(name)s already exists": [""],
"Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [
""
],
diff --git a/superset/translations/it/LC_MESSAGES/messages.po b/superset/translations/it/LC_MESSAGES/messages.po
index 3b80e84..1157574 100644
--- a/superset/translations/it/LC_MESSAGES/messages.po
+++ b/superset/translations/it/LC_MESSAGES/messages.po
@@ -773,7 +773,7 @@ msgid "Owners are invalid"
msgstr ""
#: superset/commands/exceptions.py:92
-msgid "Datasource does not exist"
+msgid "Dataset does not exist"
msgstr "Sorgente dati e tipo di grafico"
#: superset/common/query_object.py:301
@@ -2331,7 +2331,7 @@ msgstr "Nome"
#: superset/views/base.py:207
#, python-format
-msgid "Datasource %(name)s already exists"
+msgid "Dataset %(name)s already exists"
msgstr ""
#: superset/views/base.py:227
diff --git a/superset/translations/ja/LC_MESSAGES/messages.json b/superset/translations/ja/LC_MESSAGES/messages.json
index 2cd8c1a..7f7284f 100644
--- a/superset/translations/ja/LC_MESSAGES/messages.json
+++ b/superset/translations/ja/LC_MESSAGES/messages.json
@@ -213,7 +213,7 @@
"Charts could not be deleted.": [""],
"Import chart failed for an unknown reason": [""],
"Owners are invalid": [""],
- "Datasource does not exist": ["データソース"],
+ "Dataset does not exist": ["データソース"],
"`operation` property of post processing object undefined": [""],
"Unsupported post processing operation: %(operation)s": [""],
"Adding new datasource [{}]": [""],
@@ -601,7 +601,7 @@
"Add Annotation Layer": [""],
"Edit Annotation Layer": [""],
"Name": ["名前"],
- "Datasource %(name)s already exists": [""],
+ "Dataset %(name)s already exists": [""],
"Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [
""
],
diff --git a/superset/translations/ja/LC_MESSAGES/messages.po b/superset/translations/ja/LC_MESSAGES/messages.po
index 697631c..089826c 100644
--- a/superset/translations/ja/LC_MESSAGES/messages.po
+++ b/superset/translations/ja/LC_MESSAGES/messages.po
@@ -772,7 +772,7 @@ msgid "Owners are invalid"
msgstr ""
#: superset/commands/exceptions.py:92
-msgid "Datasource does not exist"
+msgid "Dataset does not exist"
msgstr "データソース"
#: superset/common/query_object.py:301
@@ -2294,7 +2294,7 @@ msgstr "名前"
#: superset/views/base.py:207
#, python-format
-msgid "Datasource %(name)s already exists"
+msgid "Dataset %(name)s already exists"
msgstr ""
#: superset/views/base.py:227
diff --git a/superset/translations/ko/LC_MESSAGES/messages.json b/superset/translations/ko/LC_MESSAGES/messages.json
index fea747f..f756a74 100644
--- a/superset/translations/ko/LC_MESSAGES/messages.json
+++ b/superset/translations/ko/LC_MESSAGES/messages.json
@@ -197,7 +197,7 @@
"Charts could not be deleted.": [""],
"Import chart failed for an unknown reason": [""],
"Owners are invalid": [""],
- "Datasource does not exist": ["데이터소스"],
+ "Dataset does not exist": ["데이터소스"],
"`operation` property of post processing object undefined": [""],
"Unsupported post processing operation: %(operation)s": [""],
"Adding new datasource [{}]": ["새 데이터소스 스캔"],
@@ -579,7 +579,7 @@
"Add Annotation Layer": [""],
"Edit Annotation Layer": ["주석 레이어"],
"Name": ["이름"],
- "Datasource %(name)s already exists": [""],
+ "Dataset %(name)s already exists": [""],
"Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [
""
],
diff --git a/superset/translations/ko/LC_MESSAGES/messages.po b/superset/translations/ko/LC_MESSAGES/messages.po
index 74e9512..7168410 100644
--- a/superset/translations/ko/LC_MESSAGES/messages.po
+++ b/superset/translations/ko/LC_MESSAGES/messages.po
@@ -772,7 +772,7 @@ msgid "Owners are invalid"
msgstr ""
#: superset/commands/exceptions.py:92
-msgid "Datasource does not exist"
+msgid "Dataset does not exist"
msgstr "데이터소스"
#: superset/common/query_object.py:301
@@ -2294,7 +2294,7 @@ msgstr "이름"
#: superset/views/base.py:207
#, python-format
-msgid "Datasource %(name)s already exists"
+msgid "Dataset %(name)s already exists"
msgstr ""
#: superset/views/base.py:227
diff --git a/superset/translations/messages.pot b/superset/translations/messages.pot
index 283ac68..4fd8fd0 100644
--- a/superset/translations/messages.pot
+++ b/superset/translations/messages.pot
@@ -775,7 +775,7 @@ msgid "Owners are invalid"
msgstr ""
#: superset/commands/exceptions.py:92
-msgid "Datasource does not exist"
+msgid "Dataset does not exist"
msgstr ""
#: superset/common/query_object.py:301
@@ -2306,7 +2306,7 @@ msgstr ""
#: superset/views/base.py:207
#, python-format
-msgid "Datasource %(name)s already exists"
+msgid "Dataset %(name)s already exists"
msgstr ""
#: superset/views/base.py:227
diff --git a/superset/translations/pt/LC_MESSAGES/message.json b/superset/translations/pt/LC_MESSAGES/message.json
index d71426a..dd45b3e 100644
--- a/superset/translations/pt/LC_MESSAGES/message.json
+++ b/superset/translations/pt/LC_MESSAGES/message.json
@@ -258,7 +258,7 @@
"Charts could not be deleted.": ["Não foi possível carregar a query"],
"Import chart failed for an unknown reason": [""],
"Owners are invalid": [""],
- "Datasource does not exist": ["Origem de dados %(name)s já existe"],
+ "Dataset does not exist": ["Origem de dados %(name)s já existe"],
"`operation` property of post processing object undefined": [""],
"Unsupported post processing operation: %(operation)s": [""],
"Adding new datasource [{}]": ["Adicionar origem de dados Druid"],
@@ -693,9 +693,7 @@
"Add Annotation Layer": ["Camadas de anotação"],
"Edit Annotation Layer": ["Camadas de anotação"],
"Name": ["Nome"],
- "Datasource %(name)s already exists": [
- "Origem de dados %(name)s já existe"
- ],
+ "Dataset %(name)s already exists": ["Origem de dados %(name)s já existe"],
"Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [
"Tabela [{}] não encontrada, por favor verifique conexão à base de dados, esquema e nome da tabela"
],
diff --git a/superset/translations/pt/LC_MESSAGES/message.po b/superset/translations/pt/LC_MESSAGES/message.po
index a1187a1..6ceaeaf 100644
--- a/superset/translations/pt/LC_MESSAGES/message.po
+++ b/superset/translations/pt/LC_MESSAGES/message.po
@@ -783,7 +783,7 @@ msgid "Owners are invalid"
msgstr ""
#: superset/commands/exceptions.py:92
-msgid "Datasource does not exist"
+msgid "Dataset does not exist"
msgstr "Origem de dados %(name)s já existe"
#: superset/common/query_object.py:297
@@ -2363,7 +2363,7 @@ msgstr "Nome"
#: superset/views/base.py:207
#, python-format
-msgid "Datasource %(name)s already exists"
+msgid "Dataset %(name)s already exists"
msgstr "Origem de dados %(name)s já existe"
#: superset/views/base.py:227
diff --git a/superset/translations/pt/LC_MESSAGES/messages.json b/superset/translations/pt/LC_MESSAGES/messages.json
index 9243828..9bb3d75 100644
--- a/superset/translations/pt/LC_MESSAGES/messages.json
+++ b/superset/translations/pt/LC_MESSAGES/messages.json
@@ -1150,9 +1150,7 @@
"Welcome!": ["Bem vindo!"],
"Test Connection": ["Conexão de teste"],
"Manage": ["Gerir"],
- "Datasource %(name)s already exists": [
- "Origem de dados %(name)s já existe"
- ],
+ "Dataset %(name)s already exists": ["Origem de dados %(name)s já existe"],
"json isn't valid": ["json não é válido"],
"Delete": ["Eliminar"],
"Delete all Really?": ["Tem a certeza que pretende eliminar tudo?"],
diff --git a/superset/translations/pt_BR/LC_MESSAGES/messages.json b/superset/translations/pt_BR/LC_MESSAGES/messages.json
index 2974661..669f13b 100644
--- a/superset/translations/pt_BR/LC_MESSAGES/messages.json
+++ b/superset/translations/pt_BR/LC_MESSAGES/messages.json
@@ -328,7 +328,7 @@
"A importação do gráfico falhou por um motivo desconhecido"
],
"Owners are invalid": ["Donos inválidos"],
- "Datasource does not exist": ["Fonte de dados não existe"],
+ "Dataset does not exist": ["Fonte de dados não existe"],
"`operation` property of post processing object undefined": [
"A propriedade `operation` do objeto de pós processamento está indefinida"
],
@@ -935,9 +935,7 @@
"Add Annotation Layer": ["Adicionar camada de anotação"],
"Edit Annotation Layer": ["Editar camada de anotação"],
"Name": ["Nome"],
- "Datasource %(name)s already exists": [
- "Fonte de dados %(name)s já existe"
- ],
+ "Dataset %(name)s already exists": ["Fonte de dados %(name)s já existe"],
"Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [
"Não foi possível localizar a tabela [%{table}s], por favor revise sua conexão com o banco de dados, esquema e nome da tabela. Erro: {}"
],
diff --git a/superset/translations/pt_BR/LC_MESSAGES/messages.po b/superset/translations/pt_BR/LC_MESSAGES/messages.po
index cb10cc8..2a89d39 100644
--- a/superset/translations/pt_BR/LC_MESSAGES/messages.po
+++ b/superset/translations/pt_BR/LC_MESSAGES/messages.po
@@ -801,7 +801,7 @@ msgid "Owners are invalid"
msgstr "Donos inválidos"
#: superset/commands/exceptions.py:92
-msgid "Datasource does not exist"
+msgid "Dataset does not exist"
msgstr "Fonte de dados não existe"
#: superset/common/query_object.py:301
@@ -2439,7 +2439,7 @@ msgstr "Nome"
#: superset/views/base.py:207
#, python-format
-msgid "Datasource %(name)s already exists"
+msgid "Dataset %(name)s already exists"
msgstr "Fonte de dados %(name)s já existe"
#: superset/views/base.py:227
diff --git a/superset/translations/ru/LC_MESSAGES/messages.json b/superset/translations/ru/LC_MESSAGES/messages.json
index 015b286..e346451 100644
--- a/superset/translations/ru/LC_MESSAGES/messages.json
+++ b/superset/translations/ru/LC_MESSAGES/messages.json
@@ -243,7 +243,7 @@
"Charts could not be deleted.": ["Запрос невозможно загрузить"],
"Import chart failed for an unknown reason": [""],
"Owners are invalid": [""],
- "Datasource does not exist": ["Источник данных %(name)s уже существует"],
+ "Dataset does not exist": ["Источник данных %(name)s уже существует"],
"`operation` property of post processing object undefined": [""],
"Unsupported post processing operation: %(operation)s": [""],
"Adding new datasource [{}]": ["Добавить Источник Данных Druid"],
@@ -659,7 +659,7 @@
"Add Annotation Layer": ["Добавить слой аннотации"],
"Edit Annotation Layer": ["Добавить слой аннотации"],
"Name": ["Название"],
- "Datasource %(name)s already exists": [
+ "Dataset %(name)s already exists": [
"Источник данных %(name)s уже существует"
],
"Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [
diff --git a/superset/translations/ru/LC_MESSAGES/messages.po b/superset/translations/ru/LC_MESSAGES/messages.po
index 7b1d509..2731756 100644
--- a/superset/translations/ru/LC_MESSAGES/messages.po
+++ b/superset/translations/ru/LC_MESSAGES/messages.po
@@ -782,7 +782,7 @@ msgid "Owners are invalid"
msgstr ""
#: superset/commands/exceptions.py:92
-msgid "Datasource does not exist"
+msgid "Dataset does not exist"
msgstr "Источник данных %(name)s уже существует"
#: superset/common/query_object.py:301
@@ -2335,7 +2335,7 @@ msgstr "Название"
#: superset/views/base.py:207
#, python-format
-msgid "Datasource %(name)s already exists"
+msgid "Dataset %(name)s already exists"
msgstr "Источник данных %(name)s уже существует"
#: superset/views/base.py:227
diff --git a/superset/translations/zh/LC_MESSAGES/messages.json b/superset/translations/zh/LC_MESSAGES/messages.json
index d9f7e4a..231643f 100644
--- a/superset/translations/zh/LC_MESSAGES/messages.json
+++ b/superset/translations/zh/LC_MESSAGES/messages.json
@@ -223,7 +223,7 @@
"Charts could not be deleted.": ["这个查询无法被加载"],
"Import chart failed for an unknown reason": [""],
"Owners are invalid": [""],
- "Datasource does not exist": ["数据源%(name)s 已存在"],
+ "Dataset does not exist": ["数据集不存在"],
"`operation` property of post processing object undefined": [""],
"Unsupported post processing operation: %(operation)s": [""],
"Adding new datasource [{}]": ["添加 Druid 数据源"],
@@ -617,7 +617,7 @@
"Add Annotation Layer": ["添加注释层"],
"Edit Annotation Layer": ["添加注释层"],
"Name": ["名字"],
- "Datasource %(name)s already exists": ["数据源%(name)s 已存在"],
+ "Dataset %(name)s already exists": ["数据源%(name)s 已存在"],
"Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [
"找不到 [{}] 表,请仔细检查您的数据库连接、Schema 和 表名"
],
diff --git a/superset/translations/zh/LC_MESSAGES/messages.po b/superset/translations/zh/LC_MESSAGES/messages.po
index acc69d6..02f864f 100644
--- a/superset/translations/zh/LC_MESSAGES/messages.po
+++ b/superset/translations/zh/LC_MESSAGES/messages.po
@@ -773,8 +773,8 @@ msgid "Owners are invalid"
msgstr ""
#: superset/commands/exceptions.py:92
-msgid "Datasource does not exist"
-msgstr "数据源%(name)s 已存在"
+msgid "Dataset does not exist"
+msgstr "数据集不存在"
#: superset/common/query_object.py:301
msgid "`operation` property of post processing object undefined"
@@ -2315,7 +2315,7 @@ msgstr "名字"
#: superset/views/base.py:207
#, python-format
-msgid "Datasource %(name)s already exists"
+msgid "Dataset %(name)s already exists"
msgstr "数据源%(name)s 已存在"
#: superset/views/base.py:227
diff --git a/superset/utils/core.py b/superset/utils/core.py
index 8197ae0..a0e6cc7 100644
--- a/superset/utils/core.py
+++ b/superset/utils/core.py
@@ -71,6 +71,7 @@ from flask import current_app, flash, g, Markup, render_template
from flask_appbuilder import SQLA
from flask_appbuilder.security.sqla.models import Role, User
from flask_babel import gettext as __
+from flask_babel.speaklater import LazyString
from sqlalchemy import event, exc, select, Text
from sqlalchemy.dialects.mysql import MEDIUMTEXT
from sqlalchemy.engine import Connection, Engine
@@ -504,6 +505,8 @@ def base_json_conv( # pylint: disable=inconsistent-return-statements,too-many-r
return obj.decode("utf-8")
except Exception: # pylint: disable=broad-except
return "[bytes]"
+ if isinstance(obj, LazyString):
+ return str(obj)
def json_iso_dttm_ser(obj: Any, pessimistic: bool = False) -> str:
diff --git a/superset/views/base.py b/superset/views/base.py
index f88a7fc..c26309c 100644
--- a/superset/views/base.py
+++ b/superset/views/base.py
@@ -47,6 +47,7 @@ from superset import (
security_manager,
)
from superset.connectors.sqla import models
+from superset.datasets.commands.exceptions import get_dataset_exist_error_msg
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import (
SupersetErrorException,
@@ -203,10 +204,6 @@ def handle_api_exception(
return functools.update_wrapper(wraps, f)
-def get_datasource_exist_error_msg(full_name: str) -> str:
- return __("Datasource %(name)s already exists", name=full_name)
-
-
def validate_sqlatable(table: models.SqlaTable) -> None:
"""Checks the table existence in the database."""
with db.session.no_autoflush:
@@ -216,7 +213,7 @@ def validate_sqlatable(table: models.SqlaTable) -> None:
models.SqlaTable.database_id == table.database.id,
)
if db.session.query(table_query.exists()).scalar():
- raise Exception(get_datasource_exist_error_msg(table.full_name))
+ raise Exception(get_dataset_exist_error_msg(table.full_name))
# Fail before adding if the table can't be found
try:
diff --git a/superset/views/core.py b/superset/views/core.py
index a6cdad9..47df3e8 100755
--- a/superset/views/core.py
+++ b/superset/views/core.py
@@ -59,6 +59,7 @@ from superset import (
viz,
)
from superset.charts.dao import ChartDAO
+from superset.connectors.base.models import BaseDatasource
from superset.connectors.connector_registry import ConnectorRegistry
from superset.connectors.sqla.models import (
AnnotationDatasource,
@@ -70,6 +71,7 @@ from superset.dashboards.commands.importers.v0 import ImportDashboardsCommand
from superset.dashboards.dao import DashboardDAO
from superset.databases.dao import DatabaseDAO
from superset.databases.filters import DatabaseFilter
+from superset.datasets.commands.exceptions import DatasetNotFoundError
from superset.exceptions import (
CacheLoadError,
CertificateException,
@@ -294,7 +296,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
dar.datasource_type, dar.datasource_id, session,
)
if not datasource or security_manager.can_access_datasource(datasource):
- # datasource does not exist anymore
+ # Dataset does not exist anymore
session.delete(dar)
session.commit()
@@ -698,50 +700,47 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
)
}
)
-
flash(Markup(config["SIP_15_TOAST_MESSAGE"].format(url=url)))
- error_redirect = "/chart/list/"
try:
datasource_id, datasource_type = get_datasource_info(
datasource_id, datasource_type, form_data
)
- except SupersetException as ex:
- flash(
- _(
- "Error occurred when opening the chart: %(error)s",
- error=utils.error_msg_from_exception(ex),
- ),
- "danger",
- )
- return redirect(error_redirect)
+ except SupersetException:
+ datasource_id = None
+ # fallback unkonw datasource to table type
+ datasource_type = SqlaTable.type
- datasource = ConnectorRegistry.get_datasource(
- cast(str, datasource_type), datasource_id, db.session
- )
- if not datasource:
- flash(DATASOURCE_MISSING_ERR, "danger")
- return redirect(error_redirect)
+ datasource: Optional[BaseDatasource] = None
+ if datasource_id is not None:
+ try:
+ datasource = ConnectorRegistry.get_datasource(
+ cast(str, datasource_type), datasource_id, db.session
+ )
+ except DatasetNotFoundError:
+ pass
+ datasource_name = datasource.name if datasource else _("[Missing Dataset]")
- if config["ENABLE_ACCESS_REQUEST"] and (
- not security_manager.can_access_datasource(datasource)
- ):
- flash(
- __(security_manager.get_datasource_access_error_msg(datasource)),
- "danger",
- )
- return redirect(
- "superset/request_access/?"
- f"datasource_type={datasource_type}&"
- f"datasource_id={datasource_id}&"
- )
+ if datasource:
+ if config["ENABLE_ACCESS_REQUEST"] and (
+ not security_manager.can_access_datasource(datasource)
+ ):
+ flash(
+ __(security_manager.get_datasource_access_error_msg(datasource)),
+ "danger",
+ )
+ return redirect(
+ "superset/request_access/?"
+ f"datasource_type={datasource_type}&"
+ f"datasource_id={datasource_id}&"
+ )
- # if feature enabled, run some health check rules for sqla datasource
- if hasattr(datasource, "health_check"):
- datasource.health_check()
+ # if feature enabled, run some health check rules for sqla datasource
+ if hasattr(datasource, "health_check"):
+ datasource.health_check()
viz_type = form_data.get("viz_type")
- if not viz_type and datasource.default_endpoint:
+ if not viz_type and datasource and datasource.default_endpoint:
return redirect(datasource.default_endpoint)
# slc perms
@@ -774,25 +773,31 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
status=400,
)
- if action in ("saveas", "overwrite"):
+ if action in ("saveas", "overwrite") and datasource:
return self.save_or_overwrite_slice(
slc,
slice_add_perm,
slice_overwrite_perm,
slice_download_perm,
- datasource_id,
- cast(str, datasource_type),
+ datasource.id,
+ datasource.type,
datasource.name,
)
standalone = (
request.args.get(utils.ReservedUrlParameters.STANDALONE.value) == "true"
)
+ dummy_datasource_data: Dict[str, Any] = {
+ "type": datasource_type,
+ "name": datasource_name,
+ "columns": [],
+ "metrics": [],
+ }
bootstrap_data = {
"can_add": slice_add_perm,
"can_download": slice_download_perm,
"can_overwrite": slice_overwrite_perm,
- "datasource": datasource.data,
+ "datasource": datasource.data if datasource else dummy_datasource_data,
"form_data": form_data,
"datasource_id": datasource_id,
"datasource_type": datasource_type,
@@ -802,15 +807,18 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
"forced_height": request.args.get("height"),
"common": common_bootstrap_payload(),
}
- table_name = (
- datasource.table_name
- if datasource_type == "table"
- else datasource.datasource_name
- )
if slc:
title = slc.slice_name
- else:
+ elif datasource:
+ table_name = (
+ datasource.table_name
+ if datasource_type == "table"
+ else datasource.datasource_name
+ )
title = _("Explore - %(table)s", table=table_name)
+ else:
+ title = _("Explore")
+
return self.render_template(
"superset/basic.html",
bootstrap_data=json.dumps(
@@ -1629,6 +1637,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
table_name = request.args.get("table_name")
db_name = request.args.get("db_name")
extra_filters = request.args.get("extra_filters")
+ slices: List[Slice] = []
if not slice_id and not (table_name and db_name):
return json_error_response(
diff --git a/superset/views/datasource.py b/superset/views/datasource.py
index 5c9a41d..d4ae9ef 100644
--- a/superset/views/datasource.py
+++ b/superset/views/datasource.py
@@ -20,7 +20,7 @@ from collections import Counter
from flask import request
from flask_appbuilder import expose
from flask_appbuilder.security.decorators import has_access_api
-from sqlalchemy.orm.exc import NoResultFound
+from flask_babel import _
from superset import db
from superset.connectors.connector_registry import ConnectorRegistry
@@ -42,7 +42,7 @@ class Datasource(BaseSupersetView):
def save(self) -> FlaskResponse:
data = request.form.get("data")
if not isinstance(data, str):
- return json_error_response("Request missing data field.", status=500)
+ return json_error_response(_("Request missing data field."), status=500)
datasource_dict = json.loads(data)
datasource_id = datasource_dict.get("id")
@@ -58,9 +58,7 @@ class Datasource(BaseSupersetView):
try:
check_ownership(orm_datasource)
except SupersetSecurityException:
- return json_error_response(
- f"{DatasetForbiddenError.message}", DatasetForbiddenError.status
- )
+ raise DatasetForbiddenError()
datasource_dict["owners"] = (
db.session.query(orm_datasource.owner_class)
@@ -77,7 +75,11 @@ class Datasource(BaseSupersetView):
]
if duplicates:
return json_error_response(
- f"Duplicate column name(s): {','.join(duplicates)}", status=409
+ _(
+ "Duplicate column name(s): %(columns)s",
+ columns=",".join(duplicates),
+ ),
+ status=409,
)
orm_datasource.update_from_object(datasource_dict)
if hasattr(orm_datasource, "health_check"):
@@ -92,17 +94,10 @@ class Datasource(BaseSupersetView):
@api
@handle_api_exception
def get(self, datasource_type: str, datasource_id: int) -> FlaskResponse:
- try:
- orm_datasource = ConnectorRegistry.get_datasource(
- datasource_type, datasource_id, db.session
- )
- if not orm_datasource.data:
- return json_error_response(
- "Error fetching datasource data.", status=500
- )
- return self.json_response(orm_datasource.data)
- except NoResultFound:
- return json_error_response("This datasource does not exist", status=400)
+ datasource = ConnectorRegistry.get_datasource(
+ datasource_type, datasource_id, db.session
+ )
+ return self.json_response(datasource.data)
@expose("/external_metadata/<datasource_type>/<datasource_id>/")
@has_access_api
@@ -112,11 +107,11 @@ class Datasource(BaseSupersetView):
self, datasource_type: str, datasource_id: int
) -> FlaskResponse:
"""Gets column info from the source system"""
+ datasource = ConnectorRegistry.get_datasource(
+ datasource_type, datasource_id, db.session
+ )
try:
- datasource = ConnectorRegistry.get_datasource(
- datasource_type, datasource_id, db.session
- )
external_metadata = datasource.external_metadata()
- return self.json_response(external_metadata)
except SupersetException as ex:
return json_error_response(str(ex), status=400)
+ return self.json_response(external_metadata)
diff --git a/superset/views/utils.py b/superset/views/utils.py
index c01b750..5f9ec10 100644
--- a/superset/views/utils.py
+++ b/superset/views/utils.py
@@ -27,7 +27,7 @@ import simplejson as json
from flask import g, request
from flask_appbuilder.security.sqla import models as ab_models
from flask_appbuilder.security.sqla.models import User
-from flask_babel import gettext as __
+from flask_babel import _
from sqlalchemy.orm.exc import NoResultFound
import superset.models.core as models
@@ -228,7 +228,7 @@ def get_datasource_info(
if not datasource_id:
raise SupersetException(
- "The dataset associated with this chart no longer exists"
+ _("The dataset associated with this chart no longer exists")
)
datasource_id = int(datasource_id)
@@ -507,7 +507,7 @@ def check_datasource_perms(
SupersetError(
error_type=SupersetErrorType.UNKNOWN_DATASOURCE_TYPE_ERROR,
level=ErrorLevel.ERROR,
- message=__("Could not determine datasource type"),
+ message=_("Could not determine datasource type"),
)
)
@@ -523,7 +523,7 @@ def check_datasource_perms(
SupersetError(
error_type=SupersetErrorType.UNKNOWN_DATASOURCE_TYPE_ERROR,
level=ErrorLevel.ERROR,
- message=__("Could not find viz object"),
+ message=_("Could not find viz object"),
)
)
diff --git a/tests/base_tests.py b/tests/base_tests.py
index e494c89..81e218b 100644
--- a/tests/base_tests.py
+++ b/tests/base_tests.py
@@ -18,6 +18,7 @@
"""Unit tests for Superset"""
import imp
import json
+from contextlib import contextmanager
from typing import Any, Dict, Union, List, Optional
from unittest.mock import Mock, patch
@@ -26,6 +27,7 @@ import pytest
from flask import Response
from flask_appbuilder.security.sqla import models as ab_models
from flask_testing import TestCase
+from sqlalchemy.ext.declarative.api import DeclarativeMeta
from sqlalchemy.orm import Session
from tests.test_app import app
@@ -495,3 +497,16 @@ class SupersetTestCase(TestCase):
else:
mock_method.assert_called_once_with("error", func_name)
return rv
+
+
+@contextmanager
+def db_insert_temp_object(obj: DeclarativeMeta):
+ """Insert a temporary object in database; delete when done."""
+ session = db.session
+ try:
+ session.add(obj)
+ session.commit()
+ yield obj
+ finally:
+ session.delete(obj)
+ session.commit()
diff --git a/tests/charts/api_tests.py b/tests/charts/api_tests.py
index 8e22074..94e4d8f 100644
--- a/tests/charts/api_tests.py
+++ b/tests/charts/api_tests.py
@@ -527,8 +527,7 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin):
"datasource_id": 1,
"datasource_type": "unknown",
}
- uri = f"api/v1/chart/"
- rv = self.post_assert_metric(uri, chart_data, "post")
+ rv = self.post_assert_metric("/api/v1/chart/", chart_data, "post")
self.assertEqual(rv.status_code, 400)
response = json.loads(rv.data.decode("utf-8"))
self.assertEqual(
@@ -540,12 +539,11 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin):
"datasource_id": 0,
"datasource_type": "table",
}
- uri = f"api/v1/chart/"
- rv = self.post_assert_metric(uri, chart_data, "post")
+ rv = self.post_assert_metric("/api/v1/chart/", chart_data, "post")
self.assertEqual(rv.status_code, 422)
response = json.loads(rv.data.decode("utf-8"))
self.assertEqual(
- response, {"message": {"datasource_id": ["Datasource does not exist"]}}
+ response, {"message": {"datasource_id": ["Dataset does not exist"]}}
)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@@ -665,25 +663,26 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin):
Chart API: Test update validate datasource
"""
admin = self.get_user("admin")
- chart = self.insert_chart("title", [admin.id], 1)
+ chart = self.insert_chart("title", owners=[admin.id], datasource_id=1)
self.login(username="admin")
+
chart_data = {"datasource_id": 1, "datasource_type": "unknown"}
- uri = f"api/v1/chart/{chart.id}"
- rv = self.put_assert_metric(uri, chart_data, "put")
+ rv = self.put_assert_metric(f"/api/v1/chart/{chart.id}", chart_data, "put")
self.assertEqual(rv.status_code, 400)
response = json.loads(rv.data.decode("utf-8"))
self.assertEqual(
response,
{"message": {"datasource_type": ["Must be one of: druid, table, view."]}},
)
+
chart_data = {"datasource_id": 0, "datasource_type": "table"}
- uri = f"api/v1/chart/{chart.id}"
- rv = self.put_assert_metric(uri, chart_data, "put")
+ rv = self.put_assert_metric(f"/api/v1/chart/{chart.id}", chart_data, "put")
self.assertEqual(rv.status_code, 422)
response = json.loads(rv.data.decode("utf-8"))
self.assertEqual(
- response, {"message": {"datasource_id": ["Datasource does not exist"]}}
+ response, {"message": {"datasource_id": ["Dataset does not exist"]}}
)
+
db.session.delete(chart)
db.session.commit()
diff --git a/tests/datasets/api_tests.py b/tests/datasets/api_tests.py
index ba1e999..5bcaa90 100644
--- a/tests/datasets/api_tests.py
+++ b/tests/datasets/api_tests.py
@@ -475,12 +475,11 @@ class TestDatasetApi(SupersetTestCase):
"database": energy_usage_ds.database_id,
"table_name": energy_usage_ds.table_name,
}
- uri = "api/v1/dataset/"
- rv = self.post_assert_metric(uri, table_data, "post")
+ rv = self.post_assert_metric("/api/v1/dataset/", table_data, "post")
assert rv.status_code == 422
data = json.loads(rv.data.decode("utf-8"))
assert data == {
- "message": {"table_name": ["Datasource energy_usage already exists"]}
+ "message": {"table_name": ["Dataset energy_usage already exists"]}
}
def test_create_dataset_same_name_different_schema(self):
@@ -838,7 +837,7 @@ class TestDatasetApi(SupersetTestCase):
data = json.loads(rv.data.decode("utf-8"))
assert rv.status_code == 422
expected_response = {
- "message": {"table_name": ["Datasource ab_user already exists"]}
+ "message": {"table_name": ["Dataset ab_user already exists"]}
}
assert data == expected_response
db.session.delete(dataset)
diff --git a/tests/datasource_tests.py b/tests/datasource_tests.py
index 290e135..ef49640 100644
--- a/tests/datasource_tests.py
+++ b/tests/datasource_tests.py
@@ -22,10 +22,11 @@ import pytest
from superset import app, ConnectorRegistry, db
from superset.connectors.sqla.models import SqlaTable
+from superset.datasets.commands.exceptions import DatasetNotFoundError
from superset.utils.core import get_example_database
from tests.fixtures.birth_names_dashboard import load_birth_names_dashboard_with_slices
-from .base_tests import SupersetTestCase
+from .base_tests import db_insert_temp_object, SupersetTestCase
from .fixtures.datasource import datasource_post
@@ -72,42 +73,28 @@ class TestDatasource(SupersetTestCase):
def test_external_metadata_for_malicious_virtual_table(self):
self.login(username="admin")
- session = db.session
table = SqlaTable(
table_name="malicious_sql_table",
database=get_example_database(),
sql="delete table birth_names",
)
- session.add(table)
- session.commit()
-
- table = self.get_table_by_name("malicious_sql_table")
- url = f"/datasource/external_metadata/table/{table.id}/"
- resp = self.get_json_resp(url)
- assert "error" in resp
-
- session.delete(table)
- session.commit()
+ with db_insert_temp_object(table):
+ url = f"/datasource/external_metadata/table/{table.id}/"
+ resp = self.get_json_resp(url)
+ self.assertEqual(resp["error"], "Only `SELECT` statements are allowed")
def test_external_metadata_for_mutistatement_virtual_table(self):
self.login(username="admin")
- session = db.session
table = SqlaTable(
table_name="multistatement_sql_table",
database=get_example_database(),
sql="select 123 as intcol, 'abc' as strcol;"
"select 123 as intcol, 'abc' as strcol",
)
- session.add(table)
- session.commit()
-
- table = self.get_table_by_name("multistatement_sql_table")
- url = f"/datasource/external_metadata/table/{table.id}/"
- resp = self.get_json_resp(url)
- assert "error" in resp
-
- session.delete(table)
- session.commit()
+ with db_insert_temp_object(table):
+ url = f"/datasource/external_metadata/table/{table.id}/"
+ resp = self.get_json_resp(url)
+ self.assertEqual(resp["error"], "Only single queries supported")
def compare_lists(self, l1, l2, key):
l2_lookup = {o.get(key): o for o in l2}
@@ -251,7 +238,16 @@ class TestDatasource(SupersetTestCase):
del app.config["DATASET_HEALTH_CHECK"]
def test_get_datasource_failed(self):
+ pytest.raises(
+ DatasetNotFoundError,
+ lambda: ConnectorRegistry.get_datasource("table", 9999999, db.session),
+ )
+
self.login(username="admin")
- url = f"/datasource/get/druid/500000/"
- resp = self.get_json_resp(url)
- self.assertEqual(resp.get("error"), "This datasource does not exist")
+ resp = self.get_json_resp("/datasource/get/druid/500000/", raise_on_error=False)
+ self.assertEqual(resp.get("error"), "Dataset does not exist")
+
+ resp = self.get_json_resp(
+ "/datasource/get/invalid-datasource-type/500000/", raise_on_error=False
+ )
+ self.assertEqual(resp.get("error"), "Dataset does not exist")