You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@superset.apache.org by vi...@apache.org on 2023/04/21 06:11:54 UTC
[superset] branch master updated: chore: remove RemovedInMarshmallow4 warnings (#23704)
This is an automated email from the ASF dual-hosted git repository.
villebro pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/superset.git
The following commit(s) were added to refs/heads/master by this push:
new b31efbae7c chore: remove RemovedInMarshmallow4 warnings (#23704)
b31efbae7c is described below
commit b31efbae7c58fdf665f4910e895b4f9c1d7ebe1b
Author: Sebastian Liebscher <11...@users.noreply.github.com>
AuthorDate: Fri Apr 21 08:11:46 2023 +0200
chore: remove RemovedInMarshmallow4 warnings (#23704)
---
superset/advanced_data_type/schemas.py | 6 +-
superset/annotation_layers/annotations/schemas.py | 28 +-
superset/annotation_layers/schemas.py | 16 +-
superset/cachekeys/schemas.py | 12 +-
superset/charts/schemas.py | 929 +++++++++++++---------
superset/dashboards/permalink/schemas.py | 18 +-
superset/dashboards/schemas.py | 100 ++-
superset/databases/schemas.py | 227 ++++--
superset/datasets/schemas.py | 36 +-
superset/db_engine_specs/base.py | 23 +-
superset/db_engine_specs/bigquery.py | 2 +-
superset/db_engine_specs/clickhouse.py | 21 +-
superset/db_engine_specs/databricks.py | 5 +-
superset/db_engine_specs/gsheets.py | 6 +-
superset/explore/form_data/schemas.py | 20 +-
superset/explore/permalink/schemas.py | 14 +-
superset/explore/schemas.py | 174 ++--
superset/reports/schemas.py | 107 +--
superset/sqllab/schemas.py | 14 +-
superset/temporary_cache/schemas.py | 4 +-
superset/views/base_api.py | 18 +-
superset/views/datasource/schemas.py | 2 +-
superset/views/log/schemas.py | 23 +-
23 files changed, 1112 insertions(+), 693 deletions(-)
diff --git a/superset/advanced_data_type/schemas.py b/superset/advanced_data_type/schemas.py
index 2175541b31..9c7dd221a1 100644
--- a/superset/advanced_data_type/schemas.py
+++ b/superset/advanced_data_type/schemas.py
@@ -39,8 +39,10 @@ class AdvancedDataTypeSchema(Schema):
"""
error_message = fields.String()
- values = fields.List(fields.String(description="parsed value (can be any value)"))
+ values = fields.List(
+ fields.String(metadata={"description": "parsed value (can be any value)"})
+ )
display_value = fields.String(
- description="The string representation of the parsed values"
+ metadata={"description": "The string representation of the parsed values"}
)
valid_filter_operators = fields.List(fields.String())
diff --git a/superset/annotation_layers/annotations/schemas.py b/superset/annotation_layers/annotations/schemas.py
index 5e0bac56f7..79ea66aad0 100644
--- a/superset/annotation_layers/annotations/schemas.py
+++ b/superset/annotation_layers/annotations/schemas.py
@@ -57,22 +57,24 @@ def validate_json(value: Union[bytes, bytearray, str]) -> None:
class AnnotationPostSchema(Schema):
short_descr = fields.String(
- description=annotation_short_descr,
+ metadata={"description": annotation_short_descr},
required=True,
allow_none=False,
validate=[Length(1, 500)],
)
- long_descr = fields.String(description=annotation_long_descr, allow_none=True)
+ long_descr = fields.String(
+ metadata={"description": annotation_long_descr}, allow_none=True
+ )
start_dttm = fields.DateTime(
- description=annotation_start_dttm,
+ metadata={"description": annotation_start_dttm},
required=True,
allow_none=False,
)
end_dttm = fields.DateTime(
- description=annotation_end_dttm, required=True, allow_none=False
+ metadata={"description": annotation_end_dttm}, required=True, allow_none=False
)
json_metadata = fields.String(
- description=annotation_json_metadata,
+ metadata={"description": annotation_json_metadata},
validate=validate_json,
allow_none=True,
)
@@ -80,15 +82,21 @@ class AnnotationPostSchema(Schema):
class AnnotationPutSchema(Schema):
short_descr = fields.String(
- description=annotation_short_descr, required=False, validate=[Length(1, 500)]
+ metadata={"description": annotation_short_descr},
+ required=False,
+ validate=[Length(1, 500)],
)
long_descr = fields.String(
- description=annotation_long_descr, required=False, allow_none=True
+ metadata={"description": annotation_long_descr}, required=False, allow_none=True
+ )
+ start_dttm = fields.DateTime(
+ metadata={"description": annotation_start_dttm}, required=False
+ )
+ end_dttm = fields.DateTime(
+ metadata={"description": annotation_end_dttm}, required=False
)
- start_dttm = fields.DateTime(description=annotation_start_dttm, required=False)
- end_dttm = fields.DateTime(description=annotation_end_dttm, required=False)
json_metadata = fields.String(
- description=annotation_json_metadata,
+ metadata={"description": annotation_json_metadata},
validate=validate_json,
required=False,
allow_none=True,
diff --git a/superset/annotation_layers/schemas.py b/superset/annotation_layers/schemas.py
index 4cd7493a32..17c3c274cd 100644
--- a/superset/annotation_layers/schemas.py
+++ b/superset/annotation_layers/schemas.py
@@ -40,13 +40,21 @@ annotation_layer_descr = "Give a description for this annotation layer"
class AnnotationLayerPostSchema(Schema):
name = fields.String(
- description=annotation_layer_name, required=True, validate=[Length(1, 250)]
+ metadata={"description": annotation_layer_name},
+ required=True,
+ validate=[Length(1, 250)],
+ )
+ descr = fields.String(
+ metadata={"description": annotation_layer_descr}, allow_none=True
)
- descr = fields.String(description=annotation_layer_descr, allow_none=True)
class AnnotationLayerPutSchema(Schema):
name = fields.String(
- description=annotation_layer_name, required=False, validate=[Length(1, 250)]
+ metadata={"description": annotation_layer_name},
+ required=False,
+ validate=[Length(1, 250)],
+ )
+ descr = fields.String(
+ metadata={"description": annotation_layer_descr}, required=False
)
- descr = fields.String(description=annotation_layer_descr, required=False)
diff --git a/superset/cachekeys/schemas.py b/superset/cachekeys/schemas.py
index 3d913e8b5f..e58a45ac56 100644
--- a/superset/cachekeys/schemas.py
+++ b/superset/cachekeys/schemas.py
@@ -27,16 +27,16 @@ from superset.utils.core import DatasourceType
class Datasource(Schema):
database_name = fields.String(
- description="Datasource name",
+ metadata={"description": "Datasource name"},
)
datasource_name = fields.String(
- description=datasource_name_description,
+ metadata={"description": datasource_name_description},
)
schema = fields.String(
- description="Datasource schema",
+ metadata={"description": "Datasource schema"},
)
datasource_type = fields.String(
- description=datasource_type_description,
+ metadata={"description": datasource_type_description},
validate=validate.OneOf(choices=[ds.value for ds in DatasourceType]),
required=True,
)
@@ -45,9 +45,9 @@ class Datasource(Schema):
class CacheInvalidationRequestSchema(Schema):
datasource_uids = fields.List(
fields.String(),
- description=datasource_uid_description,
+ metadata={"description": datasource_uid_description},
)
datasources = fields.List(
fields.Nested(Datasource),
- description="A list of the data source and database names",
+ metadata={"description": "A list of the data source and database names"},
)
diff --git a/superset/charts/schemas.py b/superset/charts/schemas.py
index 5c345b8ea2..e70dbad476 100644
--- a/superset/charts/schemas.py
+++ b/superset/charts/schemas.py
@@ -160,18 +160,20 @@ class ChartEntityResponseSchema(Schema):
Schema for a chart object
"""
- id = fields.Integer(description=id_description)
- slice_name = fields.String(description=slice_name_description)
- cache_timeout = fields.Integer(description=cache_timeout_description)
- changed_on = fields.String(description=changed_on_description)
- description = fields.String(description=description_description)
+ id = fields.Integer(metadata={"description": id_description})
+ slice_name = fields.String(metadata={"description": slice_name_description})
+ cache_timeout = fields.Integer(metadata={"description": cache_timeout_description})
+ changed_on = fields.String(metadata={"description": changed_on_description})
+ description = fields.String(metadata={"description": description_description})
description_markeddown = fields.String(
- description=description_markeddown_description
+ metadata={"description": description_markeddown_description}
+ )
+ form_data = fields.Dict(metadata={"description": form_data_description})
+ slice_url = fields.String(metadata={"description": slice_url_description})
+ certified_by = fields.String(metadata={"description": certified_by_description})
+ certification_details = fields.String(
+ metadata={"description": certification_details_description}
)
- form_data = fields.Dict(description=form_data_description)
- slice_url = fields.String(description=slice_url_description)
- certified_by = fields.String(description=certified_by_description)
- certification_details = fields.String(description=certification_details_description)
class ChartPostSchema(Schema):
@@ -180,44 +182,58 @@ class ChartPostSchema(Schema):
"""
slice_name = fields.String(
- description=slice_name_description, required=True, validate=Length(1, 250)
+ metadata={"description": slice_name_description},
+ required=True,
+ validate=Length(1, 250),
+ )
+ description = fields.String(
+ metadata={"description": description_description}, allow_none=True
)
- description = fields.String(description=description_description, allow_none=True)
viz_type = fields.String(
- description=viz_type_description,
+ metadata={
+ "description": viz_type_description,
+ "example": ["bar", "line_multi", "area", "table"],
+ },
validate=Length(0, 250),
- example=["bar", "line_multi", "area", "table"],
)
- owners = fields.List(fields.Integer(description=owners_description))
+ owners = fields.List(fields.Integer(metadata={"description": owners_description}))
params = fields.String(
- description=params_description, allow_none=True, validate=utils.validate_json
+ metadata={"description": params_description},
+ allow_none=True,
+ validate=utils.validate_json,
)
query_context = fields.String(
- description=query_context_description,
+ metadata={"description": query_context_description},
allow_none=True,
validate=utils.validate_json,
)
query_context_generation = fields.Boolean(
- description=query_context_generation_description, allow_none=True
+ metadata={"description": query_context_generation_description}, allow_none=True
)
cache_timeout = fields.Integer(
- description=cache_timeout_description, allow_none=True
+ metadata={"description": cache_timeout_description}, allow_none=True
+ )
+ datasource_id = fields.Integer(
+ metadata={"description": datasource_id_description}, required=True
)
- datasource_id = fields.Integer(description=datasource_id_description, required=True)
datasource_type = fields.String(
- description=datasource_type_description,
+ metadata={"description": datasource_type_description},
validate=validate.OneOf(choices=[ds.value for ds in DatasourceType]),
required=True,
)
datasource_name = fields.String(
- description=datasource_name_description, allow_none=True
+ metadata={"description": datasource_name_description}, allow_none=True
+ )
+ dashboards = fields.List(
+ fields.Integer(metadata={"description": dashboards_description})
+ )
+ certified_by = fields.String(
+ metadata={"description": certified_by_description}, allow_none=True
)
- dashboards = fields.List(fields.Integer(description=dashboards_description))
- certified_by = fields.String(description=certified_by_description, allow_none=True)
certification_details = fields.String(
- description=certification_details_description, allow_none=True
+ metadata={"description": certification_details_description}, allow_none=True
)
- is_managed_externally = fields.Boolean(allow_none=True, default=False)
+ is_managed_externally = fields.Boolean(allow_none=True, dump_default=False)
external_url = fields.String(allow_none=True)
@@ -227,70 +243,87 @@ class ChartPutSchema(Schema):
"""
slice_name = fields.String(
- description=slice_name_description, allow_none=True, validate=Length(0, 250)
+ metadata={"description": slice_name_description},
+ allow_none=True,
+ validate=Length(0, 250),
+ )
+ description = fields.String(
+ metadata={"description": description_description}, allow_none=True
)
- description = fields.String(description=description_description, allow_none=True)
viz_type = fields.String(
- description=viz_type_description,
+ metadata={
+ "description": viz_type_description,
+ "example": ["bar", "line_multi", "area", "table"],
+ },
allow_none=True,
validate=Length(0, 250),
- example=["bar", "line_multi", "area", "table"],
)
- owners = fields.List(fields.Integer(description=owners_description))
- params = fields.String(description=params_description, allow_none=True)
+ owners = fields.List(fields.Integer(metadata={"description": owners_description}))
+ params = fields.String(
+ metadata={"description": params_description}, allow_none=True
+ )
query_context = fields.String(
- description=query_context_description, allow_none=True
+ metadata={"description": query_context_description}, allow_none=True
)
query_context_generation = fields.Boolean(
- description=query_context_generation_description, allow_none=True
+ metadata={"description": query_context_generation_description}, allow_none=True
)
cache_timeout = fields.Integer(
- description=cache_timeout_description, allow_none=True
+ metadata={"description": cache_timeout_description}, allow_none=True
)
datasource_id = fields.Integer(
- description=datasource_id_description, allow_none=True
+ metadata={"description": datasource_id_description}, allow_none=True
)
datasource_type = fields.String(
- description=datasource_type_description,
+ metadata={"description": datasource_type_description},
validate=validate.OneOf(choices=[ds.value for ds in DatasourceType]),
allow_none=True,
)
- dashboards = fields.List(fields.Integer(description=dashboards_description))
- certified_by = fields.String(description=certified_by_description, allow_none=True)
+ dashboards = fields.List(
+ fields.Integer(metadata={"description": dashboards_description})
+ )
+ certified_by = fields.String(
+ metadata={"description": certified_by_description}, allow_none=True
+ )
certification_details = fields.String(
- description=certification_details_description, allow_none=True
+ metadata={"description": certification_details_description}, allow_none=True
)
- is_managed_externally = fields.Boolean(allow_none=True, default=False)
+ is_managed_externally = fields.Boolean(allow_none=True, dump_default=False)
external_url = fields.String(allow_none=True)
class ChartGetDatasourceObjectDataResponseSchema(Schema):
- datasource_id = fields.Integer(description="The datasource identifier")
- datasource_type = fields.Integer(description="The datasource type")
+ datasource_id = fields.Integer(
+ metadata={"description": "The datasource identifier"}
+ )
+ datasource_type = fields.Integer(metadata={"description": "The datasource type"})
class ChartGetDatasourceObjectResponseSchema(Schema):
- label = fields.String(description="The name of the datasource")
+ label = fields.String(metadata={"description": "The name of the datasource"})
value = fields.Nested(ChartGetDatasourceObjectDataResponseSchema)
class ChartGetDatasourceResponseSchema(Schema):
- count = fields.Integer(description="The total number of datasources")
+ count = fields.Integer(metadata={"description": "The total number of datasources"})
result = fields.Nested(ChartGetDatasourceObjectResponseSchema)
class ChartCacheScreenshotResponseSchema(Schema):
- cache_key = fields.String(description="The cache key")
- chart_url = fields.String(description="The url to render the chart")
- image_url = fields.String(description="The url to fetch the screenshot")
+ cache_key = fields.String(metadata={"description": "The cache key"})
+ chart_url = fields.String(metadata={"description": "The url to render the chart"})
+ image_url = fields.String(
+ metadata={"description": "The url to fetch the screenshot"}
+ )
class ChartDataColumnSchema(Schema):
column_name = fields.String(
- description="The name of the target column",
- example="mycol",
+ metadata={"description": "The name of the target column", "example": "mycol"},
+ )
+ type = fields.String(
+ metadata={"description": "Type of target column", "example": "BIGINT"}
)
- type = fields.String(description="Type of target column", example="BIGINT")
class ChartDataAdhocMetricSchema(Schema):
@@ -299,71 +332,86 @@ class ChartDataAdhocMetricSchema(Schema):
"""
expressionType = fields.String(
- description="Simple or SQL metric",
+ metadata={"description": "Simple or SQL metric", "example": "SQL"},
required=True,
validate=validate.OneOf(choices=("SIMPLE", "SQL")),
- example="SQL",
)
aggregate = fields.String(
- description="Aggregation operator. Only required for simple expression types.",
+ metadata={
+ "description": "Aggregation operator."
+ "Only required for simple expression types."
+ },
validate=validate.OneOf(
choices=("AVG", "COUNT", "COUNT_DISTINCT", "MAX", "MIN", "SUM")
),
)
column = fields.Nested(ChartDataColumnSchema)
sqlExpression = fields.String(
- description="The metric as defined by a SQL aggregate expression. "
- "Only required for SQL expression type.",
- example="SUM(weight * observations) / SUM(weight)",
+ metadata={
+ "description": "The metric as defined by a SQL aggregate expression. "
+ "Only required for SQL expression type.",
+ "example": "SUM(weight * observations) / SUM(weight)",
+ },
)
label = fields.String(
- description="Label for the metric. Is automatically generated unless "
- "hasCustomLabel is true, in which case label must be defined.",
- example="Weighted observations",
+ metadata={
+ "description": "Label for the metric. Is automatically generated unless"
+ "hasCustomLabel is true, in which case label must be defined.",
+ "example": "Weighted observations",
+ },
)
hasCustomLabel = fields.Boolean(
- description="When false, the label will be automatically generated based on "
- "the aggregate expression. When true, a custom label has to be "
- "specified.",
- example=True,
+ metadata={
+ "description": "When false, the label will be automatically generated based "
+ "on the aggregate expression. When true, a custom label has to be specified.",
+ "example": True,
+ },
)
optionName = fields.String(
- description="Unique identifier. Can be any string value, as long as all "
- "metrics have a unique identifier. If undefined, a random name "
- "will be generated.",
- example="metric_aec60732-fac0-4b17-b736-93f1a5c93e30",
+ metadata={
+ "description": "Unique identifier. Can be any string value, as long as all "
+ "metrics have a unique identifier. If undefined, a random name"
+ "will be generated.",
+ "example": "metric_aec60732-fac0-4b17-b736-93f1a5c93e30",
+ },
)
timeGrain = fields.String(
- description="Optional time grain for temporal filters",
- example="PT1M",
+ metadata={
+ "description": "Optional time grain for temporal filters",
+ "example": "PT1M",
+ },
)
isExtra = fields.Boolean(
- description="Indicates if the filter has been added by a filter component as "
- "opposed to being a part of the original query."
+ metadata={
+ "description": "Indicates if the filter has been added by a filter component "
+ "as opposed to being a part of the original query."
+ }
)
class ChartDataAggregateConfigField(fields.Dict):
def __init__(self) -> None:
super().__init__(
- description="The keys are the name of the aggregate column to be created, "
- "and the values specify the details of how to apply the "
- "aggregation. If an operator requires additional options, "
- "these can be passed here to be unpacked in the operator call. The "
- "following numpy operators are supported: average, argmin, argmax, cumsum, "
- "cumprod, max, mean, median, nansum, nanmin, nanmax, nanmean, nanmedian, "
- "min, percentile, prod, product, std, sum, var. Any options required by "
- "the operator can be passed to the `options` object.\n"
- "\n"
- "In the example, a new column `first_quantile` is created based on values "
- "in the column `my_col` using the `percentile` operator with "
- "the `q=0.25` parameter.",
- example={
- "first_quantile": {
- "operator": "percentile",
- "column": "my_col",
- "options": {"q": 0.25},
- }
+ metadata={
+ "description": "The keys are the name of the aggregate column to be "
+ "created, and the values specify the details of how to apply the "
+ "aggregation. If an operator requires additional options, "
+ "these can be passed here to be unpacked in the operator call. The "
+ "following numpy operators are supported: average, argmin, argmax, "
+ "cumsum, cumprod, max, mean, median, nansum, nanmin, nanmax, nanmean, "
+ "nanmedian, min, percentile, prod, product, std, sum, var. Any options "
+ "required by the operator can be passed to the `options` object.\n"
+ "\n"
+ "In the example, a new column `first_quantile` is created based on "
+ "values in the column `my_col` using the `percentile` operator with "
+ "the `q=0.25` parameter.",
+ "example": {
+ "first_quantile": {
+ "operator": "percentile",
+ "column": "my_col",
+ "options": {"q": 0.25},
+ }
+ },
},
)
@@ -381,7 +429,7 @@ class ChartDataAggregateOptionsSchema(ChartDataPostProcessingOperationOptionsSch
fields.List(
fields.String(
allow_none=False,
- description="Columns by which to group by",
+ metadata={"description": "Columns by which to group by"},
),
minLength=1,
required=True,
@@ -397,16 +445,21 @@ class ChartDataRollingOptionsSchema(ChartDataPostProcessingOperationOptionsSchem
columns = (
fields.Dict(
- description="columns on which to perform rolling, mapping source column to "
- "target column. For instance, `{'y': 'y'}` will replace the "
- "column `y` with the rolling value in `y`, while `{'y': 'y2'}` "
- "will add a column `y2` based on rolling values calculated "
- "from `y`, leaving the original column `y` unchanged.",
- example={"weekly_rolling_sales": "sales"},
+ metadata={
+ "description": "columns on which to perform rolling, mapping source "
+ "column to target column. For instance, `{'y': 'y'}` will replace the "
+ "column `y` with the rolling value in `y`, while `{'y': 'y2'}` will add "
+ "a column `y2` based on rolling values calculated from `y`, leaving the "
+ "original column `y` unchanged.",
+ "example": {"weekly_rolling_sales": "sales"},
+ },
),
)
rolling_type = fields.String(
- description="Type of rolling window. Any numpy function will work.",
+ metadata={
+ "description": "Type of rolling window. Any numpy function will work.",
+ "example": "percentile",
+ },
validate=validate.OneOf(
choices=(
"average",
@@ -433,29 +486,35 @@ class ChartDataRollingOptionsSchema(ChartDataPostProcessingOperationOptionsSchem
)
),
required=True,
- example="percentile",
)
window = fields.Integer(
- description="Size of the rolling window in days.",
+ metadata={"description": "Size of the rolling window in days.", "example": 7},
required=True,
- example=7,
)
rolling_type_options = fields.Dict(
- description="Optional options to pass to rolling method. Needed for "
- "e.g. quantile operation.",
- example={},
+ metadata={
+ "description": "Optional options to pass to rolling method. Needed for "
+ "e.g. quantile operation.",
+ "example": {},
+ },
)
center = fields.Boolean(
- description="Should the label be at the center of the window. Default: `false`",
- example=False,
+ metadata={
+ "description": "Should the label be at the center of the window."
+ "Default: `false`",
+ "example": False,
+ },
)
win_type = fields.String(
- description="Type of window function. See "
- "[SciPy window functions](https://docs.scipy.org/doc/scipy/reference"
- "/signal.windows.html#module-scipy.signal.windows) "
- "for more details. Some window functions require passing "
- "additional parameters to `rolling_type_options`. For instance, "
- "to use `gaussian`, the parameter `std` needs to be provided.",
+ metadata={
+ "description": "Type of window function. See "
+ "[SciPy window functions](https://docs.scipy.org/doc/scipy/reference "
+ "/signal.windows.html#module-scipy.signal.windows) "
+ "for more details. Some window functions require passing "
+ "additional parameters to `rolling_type_options`. For instance, "
+ "to use `gaussian`, the parameter `std` needs to be provided."
+ ""
+ },
validate=validate.OneOf(
choices=(
"boxcar",
@@ -477,9 +536,11 @@ class ChartDataRollingOptionsSchema(ChartDataPostProcessingOperationOptionsSchem
),
)
min_periods = fields.Integer(
- description="The minimum amount of periods required for a row to be included "
- "in the result set.",
- example=7,
+ metadata={
+ "description": "The minimum amount of periods required for a row to be "
+ "included in the result set.",
+ "example": 7,
+ },
)
@@ -490,21 +551,27 @@ class ChartDataSelectOptionsSchema(ChartDataPostProcessingOperationOptionsSchema
columns = fields.List(
fields.String(),
- description="Columns which to select from the input data, in the desired "
- "order. If columns are renamed, the original column name should be "
- "referenced here.",
- example=["country", "gender", "age"],
+ metadata={
+ "description": "Columns which to select from the input data, in the desired "
+ "order. If columns are renamed, the original column name should be "
+ "referenced here.",
+ "example": ["country", "gender", "age"],
+ },
)
exclude = fields.List(
fields.String(),
- description="Columns to exclude from selection.",
- example=["my_temp_column"],
+ metadata={
+ "description": "Columns to exclude from selection.",
+ "example": ["my_temp_column"],
+ },
)
rename = fields.List(
fields.Dict(),
- description="columns which to rename, mapping source column to target column. "
- "For instance, `{'y': 'y2'}` will rename the column `y` to `y2`.",
- example=[{"age": "average_age"}],
+ metadata={
+ "description": "columns which to rename, mapping source column to target "
+ "column. For instance, `{'y': 'y2'}` will rename the column `y` to `y2`.",
+ "example": [{"age": "average_age"}],
+ },
)
@@ -514,9 +581,11 @@ class ChartDataSortOptionsSchema(ChartDataPostProcessingOperationOptionsSchema):
"""
columns = fields.Dict(
- description="columns by by which to sort. The key specifies the column name, "
- "value specifies if sorting in ascending order.",
- example={"country": True, "gender": False},
+ metadata={
+ "description": "columns by by which to sort. The key specifies the column "
+ "name, value specifies if sorting in ascending order.",
+ "example": {"country": True, "gender": False},
+ },
required=True,
)
aggregates = ChartDataAggregateConfigField()
@@ -528,12 +597,14 @@ class ChartDataContributionOptionsSchema(ChartDataPostProcessingOperationOptions
"""
orientation = fields.String(
- description="Should cell values be calculated across the row or column.",
+ metadata={
+ "description": "Should cell values be calculated across the row or column.",
+ "example": "row",
+ },
required=True,
validate=validate.OneOf(
choices=[val.value for val in PostProcessingContributionOrientation]
),
- example="row",
)
@@ -543,9 +614,12 @@ class ChartDataProphetOptionsSchema(ChartDataPostProcessingOperationOptionsSchem
"""
time_grain = fields.String(
- description="Time grain used to specify time period increments in prediction. "
- "Supports [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601#Durations) "
- "durations.",
+ metadata={
+ "description": "Time grain used to specify time period increments in "
+ "prediction. Supports "
+ "[ISO 8601](https://en.wikipedia.org/wiki/ISO_8601#Durations) durations.",
+ "example": "P1D",
+ },
validate=validate.OneOf(
choices=[
i
@@ -553,17 +627,22 @@ class ChartDataProphetOptionsSchema(ChartDataPostProcessingOperationOptionsSchem
if i
]
),
- example="P1D",
required=True,
)
periods = fields.Integer(
- description="Time periods (in units of `time_grain`) to predict into the future",
+ metadata={
+ "description": "Time periods (in units of `time_grain`) to predict into "
+ "the future",
+ "example": 7,
+ },
min=0,
- example=7,
required=True,
)
confidence_interval = fields.Float(
- description="Width of predicted confidence interval",
+ metadata={
+ "description": "Width of predicted confidence interval",
+ "example": 0.8,
+ },
validate=[
Range(
min=0,
@@ -573,29 +652,34 @@ class ChartDataProphetOptionsSchema(ChartDataPostProcessingOperationOptionsSchem
error=_("`confidence_interval` must be between 0 and 1 (exclusive)"),
)
],
- example=0.8,
required=True,
)
yearly_seasonality = fields.Raw(
# TODO: add correct union type once supported by Marshmallow
- description="Should yearly seasonality be applied. "
- "An integer value will specify Fourier order of seasonality, `None` will "
- "automatically detect seasonality.",
- example=False,
+ metadata={
+ "description": "Should yearly seasonality be applied. "
+ "An integer value will specify Fourier order of seasonality, `None` will "
+ "automatically detect seasonality.",
+ "example": False,
+ },
)
weekly_seasonality = fields.Raw(
# TODO: add correct union type once supported by Marshmallow
- description="Should weekly seasonality be applied. "
- "An integer value will specify Fourier order of seasonality, `None` will "
- "automatically detect seasonality.",
- example=False,
+ metadata={
+ "description": "Should weekly seasonality be applied. "
+ "An integer value will specify Fourier order of seasonality, `None` will "
+ "automatically detect seasonality.",
+ "example": False,
+ },
)
monthly_seasonality = fields.Raw(
# TODO: add correct union type once supported by Marshmallow
- description="Should monthly seasonality be applied. "
- "An integer value will specify Fourier order of seasonality, `None` will "
- "automatically detect seasonality.",
- example=False,
+ metadata={
+ "description": "Should monthly seasonality be applied. "
+ "An integer value will specify Fourier order of seasonality, `None` will "
+ "automatically detect seasonality.",
+ "example": False,
+ },
)
@@ -606,36 +690,40 @@ class ChartDataBoxplotOptionsSchema(ChartDataPostProcessingOperationOptionsSchem
groupby = fields.List(
fields.String(
- description="Columns by which to group the query.",
+ metadata={"description": "Columns by which to group the query."},
),
allow_none=True,
)
metrics = fields.List(
fields.Raw(),
- description="Aggregate expressions. Metrics can be passed as both "
- "references to datasource metrics (strings), or ad-hoc metrics"
- "which are defined only within the query object. See "
- "`ChartDataAdhocMetricSchema` for the structure of ad-hoc metrics. "
- "When metrics is undefined or null, the query is executed without a groupby. "
- "However, when metrics is an array (length >= 0), a groupby clause is added to "
- "the query.",
+ metadata={
+ "description": "Aggregate expressions. Metrics can be passed as both "
+ "references to datasource metrics (strings), or ad-hoc metrics"
+ "which are defined only within the query object. See "
+ "`ChartDataAdhocMetricSchema` for the structure of ad-hoc metrics. "
+ "When metrics is undefined or null, the query is executed without a groupby. "
+ "However, when metrics is an array (length >= 0), a groupby clause is added "
+ "to the query."
+ },
allow_none=True,
)
whisker_type = fields.String(
- description="Whisker type. Any numpy function will work.",
+ metadata={
+ "description": "Whisker type. Any numpy function will work.",
+ "example": "tukey",
+ },
validate=validate.OneOf(
choices=([val.value for val in PostProcessingBoxplotWhiskerType])
),
required=True,
- example="tukey",
)
percentiles = fields.Tuple(
(
fields.Float(
- description="Lower percentile",
+ metadata={"description": "Lower percentile"},
validate=[
Range(
min=0,
@@ -650,7 +738,7 @@ class ChartDataBoxplotOptionsSchema(ChartDataPostProcessingOperationOptionsSchem
],
),
fields.Float(
- description="Upper percentile",
+ metadata={"description": "Upper percentile"},
validate=[
Range(
min=0,
@@ -665,8 +753,10 @@ class ChartDataBoxplotOptionsSchema(ChartDataPostProcessingOperationOptionsSchem
],
),
),
- description="Upper and lower percentiles for percentile whisker type.",
- example=[1, 99],
+ metadata={
+ "description": "Upper and lower percentiles for percentile whisker type.",
+ "example": [1, 99],
+ },
)
@@ -678,30 +768,38 @@ class ChartDataPivotOptionsSchema(ChartDataPostProcessingOperationOptionsSchema)
index = (
fields.List(
fields.String(allow_none=False),
- description="Columns to group by on the table index (=rows)",
+ metadata={"description": "Columns to group by on the table index (=rows)"},
minLength=1,
required=True,
),
)
columns = fields.List(
fields.String(allow_none=False),
- description="Columns to group by on the table columns",
+ metadata={"description": "Columns to group by on the table columns"},
)
metric_fill_value = fields.Number(
- description="Value to replace missing values with in aggregate calculations.",
+ metadata={
+ "description": "Value to replace missing values with in "
+ "aggregate calculations."
+ },
)
column_fill_value = fields.String(
- description="Value to replace missing pivot columns names with."
+ metadata={"description": "Value to replace missing pivot columns names with."}
)
drop_missing_columns = fields.Boolean(
- description="Do not include columns whose entries are all missing "
- "(default: `true`).",
+ metadata={
+ "description": "Do not include columns whose entries are all missing "
+ "(default: `true`)."
+ },
)
marginal_distributions = fields.Boolean(
- description="Add totals for row/column. (default: `false`)",
+ metadata={"description": "Add totals for row/column. (default: `false`)"},
)
marginal_distribution_name = fields.String(
- description="Name of marginal distribution row/column. (default: `All`)",
+ metadata={
+ "description": "Name of marginal distribution row/column. "
+ "(default: `All`)"
+ },
)
aggregates = ChartDataAggregateConfigField()
@@ -714,15 +812,15 @@ class ChartDataGeohashDecodeOptionsSchema(
"""
geohash = fields.String(
- description="Name of source column containing geohash string",
+ metadata={"description": "Name of source column containing geohash string"},
required=True,
)
latitude = fields.String(
- description="Name of target column for decoded latitude",
+ metadata={"description": "Name of target column for decoded latitude"},
required=True,
)
longitude = fields.String(
- description="Name of target column for decoded longitude",
+ metadata={"description": "Name of target column for decoded longitude"},
required=True,
)
@@ -735,15 +833,15 @@ class ChartDataGeohashEncodeOptionsSchema(
"""
latitude = fields.String(
- description="Name of source latitude column",
+ metadata={"description": "Name of source latitude column"},
required=True,
)
longitude = fields.String(
- description="Name of source longitude column",
+ metadata={"description": "Name of source longitude column"},
required=True,
)
geohash = fields.String(
- description="Name of target column for encoded geohash string",
+ metadata={"description": "Name of target column for encoded geohash string"},
required=True,
)
@@ -756,26 +854,33 @@ class ChartDataGeodeticParseOptionsSchema(
"""
geodetic = fields.String(
- description="Name of source column containing geodetic point strings",
+ metadata={
+ "description": "Name of source column containing geodetic point strings"
+ },
required=True,
)
latitude = fields.String(
- description="Name of target column for decoded latitude",
+ metadata={"description": "Name of target column for decoded latitude"},
required=True,
)
longitude = fields.String(
- description="Name of target column for decoded longitude",
+ metadata={"description": "Name of target column for decoded longitude"},
required=True,
)
altitude = fields.String(
- description="Name of target column for decoded altitude. If omitted, "
- "altitude information in geodetic string is ignored.",
+ metadata={
+ "description": "Name of target column for decoded altitude. If omitted, "
+ "altitude information in geodetic string is ignored."
+ },
)
class ChartDataPostProcessingOperationSchema(Schema):
operation = fields.String(
- description="Post processing operation type",
+ metadata={
+ "description": "Post processing operation type",
+ "example": "aggregate",
+ },
required=True,
validate=validate.OneOf(
choices=[
@@ -785,24 +890,25 @@ class ChartDataPostProcessingOperationSchema(Schema):
)
]
),
- example="aggregate",
)
options = fields.Dict(
- description="Options specifying how to perform the operation. Please refer "
- "to the respective post processing operation option schemas. "
- "For example, `ChartDataPostProcessingOperationOptions` specifies "
- "the required options for the pivot operation.",
- example={
- "groupby": ["country", "gender"],
- "aggregates": {
- "age_q1": {
- "operator": "percentile",
- "column": "age",
- "options": {"q": 0.25},
- },
- "age_mean": {
- "operator": "mean",
- "column": "age",
+ metadata={
+ "description": "Options specifying how to perform the operation. Please "
+ "refer to the respective post processing operation option schemas. "
+ "For example, `ChartDataPostProcessingOperationOptions` specifies "
+ "the required options for the pivot operation.",
+ "example": {
+ "groupby": ["country", "gender"],
+ "aggregates": {
+ "age_q1": {
+ "operator": "percentile",
+ "column": "age",
+ "options": {"q": 0.25},
+ },
+ "age_mean": {
+ "operator": "mean",
+ "column": "age",
+ },
},
},
},
@@ -811,63 +917,83 @@ class ChartDataPostProcessingOperationSchema(Schema):
class ChartDataFilterSchema(Schema):
col = fields.Raw(
- description="The column to filter by. Can be either a string (physical or "
- "saved expression) or an object (adhoc column)",
+ metadata={
+ "description": "The column to filter by. Can be either a string (physical or "
+ "saved expression) or an object (adhoc column)",
+ "example": "country",
+ },
required=True,
- example="country",
)
op = fields.String( # pylint: disable=invalid-name
- description="The comparison operator.",
+ metadata={"description": "The comparison operator.", "example": "IN"},
validate=utils.OneOfCaseInsensitive(
choices=[filter_op.value for filter_op in FilterOperator]
),
required=True,
- example="IN",
)
val = fields.Raw(
- description="The value or values to compare against. Can be a string, "
- "integer, decimal, None or list, depending on the operator.",
+ metadata={
+ "description": "The value or values to compare against. Can be a string, "
+ "integer, decimal, None or list, depending on the operator.",
+ "example": ["China", "France", "Japan"],
+ },
allow_none=True,
- example=["China", "France", "Japan"],
)
grain = fields.String(
- description="Optional time grain for temporal filters",
- example="PT1M",
+ metadata={
+ "description": "Optional time grain for temporal filters",
+ "example": "PT1M",
+ },
)
isExtra = fields.Boolean(
- description="Indicates if the filter has been added by a filter component as "
- "opposed to being a part of the original query."
+ metadata={
+ "description": "Indicates if the filter has been added by a filter "
+ "component as opposed to being a part of the original query."
+ }
)
class ChartDataExtrasSchema(Schema):
relative_start = fields.String(
- description="Start time for relative time deltas. "
- 'Default: `config["DEFAULT_RELATIVE_START_TIME"]`',
+ metadata={
+ "description": "Start time for relative time deltas. "
+ 'Default: `config["DEFAULT_RELATIVE_START_TIME"]`'
+ },
validate=validate.OneOf(choices=("today", "now")),
)
relative_end = fields.String(
- description="End time for relative time deltas. "
- 'Default: `config["DEFAULT_RELATIVE_START_TIME"]`',
+ metadata={
+ "description": "End time for relative time deltas. "
+ 'Default: `config["DEFAULT_RELATIVE_START_TIME"]`'
+ },
validate=validate.OneOf(choices=("today", "now")),
)
where = fields.String(
- description="WHERE clause to be added to queries using AND operator.",
+ metadata={
+ "description": "WHERE clause to be added to queries using AND operator."
+ },
)
having = fields.String(
- description="HAVING clause to be added to aggregate queries using "
- "AND operator.",
+ metadata={
+ "description": "HAVING clause to be added to aggregate queries using "
+ "AND operator."
+ },
)
having_druid = fields.List(
fields.Nested(ChartDataFilterSchema),
- description="HAVING filters to be added to legacy Druid datasource queries. "
- "This field is deprecated",
- deprecated=True,
+ metadata={
+ "description": "HAVING filters to be added to legacy Druid datasource "
+ "queries. This field is deprecated",
+ "deprecated": True,
+ },
)
time_grain_sqla = fields.String(
- description="To what level of granularity should the temporal column be "
- "aggregated. Supports "
- "[ISO 8601](https://en.wikipedia.org/wiki/ISO_8601#Durations) durations.",
+ metadata={
+ "description": "To what level of granularity should the temporal column be "
+ "aggregated. Supports "
+ "[ISO 8601](https://en.wikipedia.org/wiki/ISO_8601#Durations) durations.",
+ "example": "P1D",
+ },
validate=validate.OneOf(
choices=[
i
@@ -875,38 +1001,42 @@ class ChartDataExtrasSchema(Schema):
if i
]
),
- example="P1D",
allow_none=True,
)
class AnnotationLayerSchema(Schema):
annotationType = fields.String(
- description="Type of annotation layer",
+ metadata={"description": "Type of annotation layer"},
validate=validate.OneOf(choices=[ann.value for ann in AnnotationType]),
)
color = fields.String(
- description="Layer color",
+ metadata={"description": "Layer color"},
allow_none=True,
)
descriptionColumns = fields.List(
fields.String(),
- description="Columns to use as the description. If none are provided, "
- "all will be shown.",
+ metadata={
+ "description": "Columns to use as the description. If none are provided, "
+ "all will be shown."
+ },
)
hideLine = fields.Boolean(
- description="Should line be hidden. Only applies to line annotations",
+ metadata={
+ "description": "Should line be hidden. Only applies to line annotations"
+ },
allow_none=True,
)
intervalEndColumn = fields.String(
- description=(
- "Column containing end of interval. Only applies to interval layers"
- ),
+ metadata={
+ "description": "Column containing end of interval. "
+ "Only applies to interval layers"
+ },
allow_none=True,
)
- name = fields.String(description="Name of layer", required=True)
+ name = fields.String(metadata={"description": "Name of layer"}, required=True)
opacity = fields.String(
- description="Opacity of layer",
+ metadata={"description": "Opacity of layer"},
validate=validate.OneOf(
choices=("", "opacityLow", "opacityMedium", "opacityHigh"),
),
@@ -915,26 +1045,30 @@ class AnnotationLayerSchema(Schema):
)
overrides = fields.Dict(
keys=fields.String(
- description="Name of property to be overridden",
+ metadata={"description": "Name of property to be overridden"},
validate=validate.OneOf(
choices=("granularity", "time_grain_sqla", "time_range", "time_shift"),
),
),
values=fields.Raw(allow_none=True),
- description="which properties should be overridable",
+ metadata={"description": "which properties should be overridable"},
allow_none=True,
)
- show = fields.Boolean(description="Should the layer be shown", required=True)
+ show = fields.Boolean(
+ metadata={"description": "Should the layer be shown"}, required=True
+ )
showLabel = fields.Boolean(
- description="Should the label always be shown",
+ metadata={"description": "Should the label always be shown"},
allow_none=True,
)
showMarkers = fields.Boolean(
- description="Should markers be shown. Only applies to line annotations.",
+ metadata={
+ "description": "Should markers be shown. Only applies to line annotations."
+ },
required=True,
)
sourceType = fields.String(
- description="Type of source for annotation data",
+ metadata={"description": "Type of source for annotation data"},
validate=validate.OneOf(
choices=(
"",
@@ -945,7 +1079,7 @@ class AnnotationLayerSchema(Schema):
),
)
style = fields.String(
- description="Line style. Only applies to time-series annotations",
+ metadata={"description": "Line style. Only applies to time-series annotations"},
validate=validate.OneOf(
choices=(
"dashed",
@@ -956,15 +1090,15 @@ class AnnotationLayerSchema(Schema):
),
)
timeColumn = fields.String(
- description="Column with event date or interval start date",
+ metadata={"description": "Column with event date or interval start date"},
allow_none=True,
)
titleColumn = fields.String(
- description="Column with title",
+ metadata={"description": "Column with title"},
allow_none=True,
)
width = fields.Float(
- description="Width of annotation line",
+ metadata={"description": "Width of annotation line"},
validate=[
Range(
min=0,
@@ -974,8 +1108,10 @@ class AnnotationLayerSchema(Schema):
],
)
value = fields.Raw(
- description="For formula annotations, this contains the formula. "
- "For other types, this is the primary key of the source object.",
+ metadata={
+ "description": "For formula annotations, this contains the formula. "
+ "For other types, this is the primary key of the source object."
+ },
required=True,
)
@@ -983,11 +1119,11 @@ class AnnotationLayerSchema(Schema):
class ChartDataDatasourceSchema(Schema):
description = "Chart datasource"
id = fields.Integer(
- description="Datasource id",
+ metadata={"description": "Datasource id"},
required=True,
)
type = fields.String(
- description="Datasource type",
+ metadata={"description": "Datasource type"},
validate=validate.OneOf(choices=[ds.value for ds in DatasourceType]),
)
@@ -1001,138 +1137,170 @@ class ChartDataQueryObjectSchema(Schema):
annotation_layers = fields.List(
fields.Nested(AnnotationLayerSchema),
- description="Annotation layers to apply to chart",
+ metadata={"description": "Annotation layers to apply to chart"},
allow_none=True,
)
applied_time_extras = fields.Dict(
- description="A mapping of temporal extras that have been applied to the query",
+ metadata={
+ "description": "A mapping of temporal extras that have been applied to "
+ "the query",
+ "example": {"__time_range": "1 year ago : now"},
+ },
allow_none=True,
- example={"__time_range": "1 year ago : now"},
)
apply_fetch_values_predicate = fields.Boolean(
- description="Add fetch values predicate (where clause) to query "
- "if defined in datasource",
+ metadata={
+ "description": "Add fetch values predicate (where clause) to query "
+ "if defined in datasource"
+ },
allow_none=True,
)
filters = fields.List(fields.Nested(ChartDataFilterSchema), allow_none=True)
granularity = fields.String(
- description="Name of temporal column used for time filtering. For legacy Druid "
- "datasources this defines the time grain.",
+ metadata={
+ "description": "Name of temporal column used for time filtering. "
+ "For legacy Druid datasources this defines the time grain."
+ },
allow_none=True,
)
granularity_sqla = fields.String(
- description="Name of temporal column used for time filtering for SQL "
- "datasources. This field is deprecated, use `granularity` "
- "instead.",
+ metadata={
+ "description": "Name of temporal column used for time filtering for SQL "
+ "datasources. This field is deprecated, use `granularity` "
+ "instead.",
+ "deprecated": True,
+ },
allow_none=True,
- deprecated=True,
)
groupby = fields.List(
fields.Raw(),
- description="Columns by which to group the query. "
- "This field is deprecated, use `columns` instead.",
+ metadata={
+ "description": "Columns by which to group the query. "
+ "This field is deprecated, use `columns` instead."
+ },
allow_none=True,
)
metrics = fields.List(
fields.Raw(),
- description="Aggregate expressions. Metrics can be passed as both "
- "references to datasource metrics (strings), or ad-hoc metrics"
- "which are defined only within the query object. See "
- "`ChartDataAdhocMetricSchema` for the structure of ad-hoc metrics.",
+ metadata={
+ "description": "Aggregate expressions. Metrics can be passed as both "
+ "references to datasource metrics (strings), or ad-hoc metrics"
+ "which are defined only within the query object. See "
+ "`ChartDataAdhocMetricSchema` for the structure of ad-hoc metrics."
+ },
allow_none=True,
)
post_processing = fields.List(
fields.Nested(ChartDataPostProcessingOperationSchema, allow_none=True),
allow_none=True,
- description="Post processing operations to be applied to the result set. "
- "Operations are applied to the result set in sequential order.",
+ metadata={
+ "description": "Post processing operations to be applied to the result set. "
+ "Operations are applied to the result set in sequential order."
+ },
)
time_range = fields.String(
- description="A time rage, either expressed as a colon separated string "
- "`since : until` or human readable freeform. Valid formats for "
- "`since` and `until` are: \n"
- "- ISO 8601\n"
- "- X days/years/hours/day/year/weeks\n"
- "- X days/years/hours/day/year/weeks ago\n"
- "- X days/years/hours/day/year/weeks from now\n"
- "\n"
- "Additionally, the following freeform can be used:\n"
- "\n"
- "- Last day\n"
- "- Last week\n"
- "- Last month\n"
- "- Last quarter\n"
- "- Last year\n"
- "- No filter\n"
- "- Last X seconds/minutes/hours/days/weeks/months/years\n"
- "- Next X seconds/minutes/hours/days/weeks/months/years\n",
- example="Last week",
+ metadata={
+ "description": "A time rage, either expressed as a colon separated string "
+ "`since : until` or human readable freeform. Valid formats for "
+ "`since` and `until` are: \n"
+ "- ISO 8601\n"
+ "- X days/years/hours/day/year/weeks\n"
+ "- X days/years/hours/day/year/weeks ago\n"
+ "- X days/years/hours/day/year/weeks from now\n"
+ "\n"
+ "Additionally, the following freeform can be used:\n"
+ "\n"
+ "- Last day\n"
+ "- Last week\n"
+ "- Last month\n"
+ "- Last quarter\n"
+ "- Last year\n"
+ "- No filter\n"
+ "- Last X seconds/minutes/hours/days/weeks/months/years\n"
+ "- Next X seconds/minutes/hours/days/weeks/months/years\n",
+ "example": "Last week",
+ },
allow_none=True,
)
time_shift = fields.String(
- description="A human-readable date/time string. "
- "Please refer to [parsdatetime](https://github.com/bear/parsedatetime) "
- "documentation for details on valid values.",
+ metadata={
+ "description": "A human-readable date/time string. "
+ "Please refer to [parsdatetime](https://github.com/bear/parsedatetime) "
+ "documentation for details on valid values."
+ },
allow_none=True,
)
is_timeseries = fields.Boolean(
- description="Is the `query_object` a timeseries.",
+ metadata={"description": "Is the `query_object` a timeseries."},
allow_none=True,
)
series_columns = fields.List(
fields.Raw(),
- description="Columns to use when limiting series count. "
- "All columns must be present in the `columns` property. "
- "Requires `series_limit` and `series_limit_metric` to be set.",
+ metadata={
+ "description": "Columns to use when limiting series count. "
+ "All columns must be present in the `columns` property. "
+ "Requires `series_limit` and `series_limit_metric` to be set."
+ },
allow_none=True,
)
series_limit = fields.Integer(
- description="Maximum number of series. "
- "Requires `series` and `series_limit_metric` to be set.",
+ metadata={
+ "description": "Maximum number of series. "
+ "Requires `series` and `series_limit_metric` to be set."
+ },
allow_none=True,
)
series_limit_metric = fields.Raw(
- description="Metric used to limit timeseries queries by. "
- "Requires `series` and `series_limit` to be set.",
+ metadata={
+ "description": "Metric used to limit timeseries queries by. "
+ "Requires `series` and `series_limit` to be set."
+ },
allow_none=True,
)
timeseries_limit = fields.Integer(
- description="Maximum row count for timeseries queries. "
- "This field is deprecated, use `series_limit` instead."
- "Default: `0`",
+ metadata={
+ "description": "Maximum row count for timeseries queries. "
+ "This field is deprecated, use `series_limit` instead."
+ "Default: `0`"
+ },
allow_none=True,
)
timeseries_limit_metric = fields.Raw(
- description="Metric used to limit timeseries queries by. "
- "This field is deprecated, use `series_limit_metric` instead.",
+ metadata={
+ "description": "Metric used to limit timeseries queries by. "
+ "This field is deprecated, use `series_limit_metric` instead."
+ },
allow_none=True,
)
row_limit = fields.Integer(
- description='Maximum row count (0=disabled). Default: `config["ROW_LIMIT"]`',
+ metadata={
+ "description": "Maximum row count (0=disabled). "
+ 'Default: `config["ROW_LIMIT"]`'
+ },
allow_none=True,
validate=[
Range(min=0, error=_("`row_limit` must be greater than or equal to 0"))
],
)
row_offset = fields.Integer(
- description="Number of rows to skip. Default: `0`",
+ metadata={"description": "Number of rows to skip. Default: `0`"},
allow_none=True,
validate=[
Range(min=0, error=_("`row_offset` must be greater than or equal to 0"))
],
)
order_desc = fields.Boolean(
- description="Reverse order. Default: `false`",
+ metadata={"description": "Reverse order. Default: `false`"},
allow_none=True,
)
extras = fields.Nested(
ChartDataExtrasSchema,
- description="Extra parameters to add to the query.",
+ metadata={"description": "Extra parameters to add to the query."},
allow_none=True,
)
columns = fields.List(
fields.Raw(),
- description="Columns which to select in the query.",
+ metadata={"description": "Columns which to select in the query."},
allow_none=True,
)
orderby = fields.List(
@@ -1147,48 +1315,63 @@ class ChartDataQueryObjectSchema(Schema):
fields.Boolean(),
)
),
- description="Expects a list of lists where the first element is the column "
- "name which to sort by, and the second element is a boolean.",
+ metadata={
+ "description": "Expects a list of lists where the first element is the "
+ "column name which to sort by, and the second element is a boolean.",
+ "example": [("my_col_1", False), ("my_col_2", True)],
+ },
allow_none=True,
- example=[("my_col_1", False), ("my_col_2", True)],
)
where = fields.String(
- description="WHERE clause to be added to queries using AND operator."
- "This field is deprecated and should be passed to `extras`.",
+ metadata={
+ "description": "WHERE clause to be added to queries using AND operator."
+ "This field is deprecated and should be passed to `extras`.",
+ "deprecated": True,
+ },
allow_none=True,
- deprecated=True,
)
having = fields.String(
- description="HAVING clause to be added to aggregate queries using "
- "AND operator. This field is deprecated and should be passed "
- "to `extras`.",
+ metadata={
+ "description": "HAVING clause to be added to aggregate queries using "
+ "AND operator. This field is deprecated and should be passed "
+ "to `extras`.",
+ "deprecated": True,
+ },
allow_none=True,
- deprecated=True,
)
having_filters = fields.List(
fields.Nested(ChartDataFilterSchema),
- description="HAVING filters to be added to legacy Druid datasource queries. "
- "This field is deprecated and should be passed to `extras` "
- "as `having_druid`.",
+ metadata={
+ "description": "HAVING filters to be added to legacy Druid datasource "
+ "queries. This field is deprecated and should be passed to `extras` "
+ "as `having_druid`.",
+ "deprecated": True,
+ },
allow_none=True,
- deprecated=True,
)
druid_time_origin = fields.String(
- description="Starting point for time grain counting on legacy Druid "
- "datasources. Used to change e.g. Monday/Sunday first-day-of-week. "
- "This field is deprecated and should be passed to `extras` "
- "as `druid_time_origin`.",
+ metadata={
+ "description": "Starting point for time grain counting on legacy Druid "
+ "datasources. Used to change e.g. Monday/Sunday first-day-of-week. "
+ "This field is deprecated and should be passed to `extras` "
+ "as `druid_time_origin`.",
+ "deprecated": True,
+ },
allow_none=True,
- deprecated=True,
)
url_params = fields.Dict(
- description="Optional query parameters passed to a dashboard or Explore view",
- keys=fields.String(description="The query parameter"),
- values=fields.String(description="The value of the query parameter"),
+ metadata={
+ "description": "Optional query parameters passed to a dashboard or Explore "
+ " view"
+ },
+ keys=fields.String(metadata={"description": "The query parameter"}),
+ values=fields.String(
+ metadata={"description": "The value of the query parameter"}
+ ),
allow_none=True,
)
is_rowcount = fields.Boolean(
- description="Should the rowcount of the actual query be returned",
+ metadata={"description": "Should the rowcount of the actual query be returned"},
allow_none=True,
)
time_offsets = fields.List(
@@ -1202,14 +1385,16 @@ class ChartDataQueryContextSchema(Schema):
datasource = fields.Nested(ChartDataDatasourceSchema)
queries = fields.List(fields.Nested(ChartDataQueryObjectSchema))
custom_cache_timeout = fields.Integer(
- description="Override the default cache timeout",
+ metadata={"description": "Override the default cache timeout"},
required=False,
allow_none=True,
)
force = fields.Boolean(
- description="Should the queries be forced to load from the source. "
- "Default: `false`",
+ metadata={
+ "description": "Should the queries be forced to load from the source. "
+ "Default: `false`"
+ },
allow_none=True,
)
@@ -1236,14 +1421,14 @@ class ChartDataQueryContextSchema(Schema):
class AnnotationDataSchema(Schema):
columns = fields.List(
fields.String(),
- description="columns available in the annotation result",
+ metadata={"description": "columns available in the annotation result"},
required=True,
)
records = fields.List(
fields.Dict(
keys=fields.String(),
),
- description="records mapping the column name to it's value",
+ metadata={"description": "records mapping the column name to it's value"},
required=True,
)
@@ -1251,44 +1436,46 @@ class AnnotationDataSchema(Schema):
class ChartDataResponseResult(Schema):
annotation_data = fields.List(
fields.Dict(
- keys=fields.String(description="Annotation layer name"),
+ keys=fields.String(metadata={"description": "Annotation layer name"}),
values=fields.String(),
),
- description="All requested annotation data",
+ metadata={"description": "All requested annotation data"},
allow_none=True,
)
cache_key = fields.String(
- description="Unique cache key for query object",
+ metadata={"description": "Unique cache key for query object"},
required=True,
allow_none=True,
)
cached_dttm = fields.String(
- description="Cache timestamp",
+ metadata={"description": "Cache timestamp"},
required=True,
allow_none=True,
)
cache_timeout = fields.Integer(
- description="Cache timeout in following order: custom timeout, datasource "
- "timeout, cache default timeout, config default cache timeout.",
+ metadata={
+ "description": "Cache timeout in following order: custom timeout, datasource "
+ "timeout, cache default timeout, config default cache timeout."
+ },
required=True,
allow_none=True,
)
error = fields.String(
- description="Error",
+ metadata={"description": "Error"},
allow_none=True,
)
is_cached = fields.Boolean(
- description="Is the result cached",
+ metadata={"description": "Is the result cached"},
required=True,
allow_none=None,
)
query = fields.String(
- description="The executed query statement",
+ metadata={"description": "The executed query statement"},
required=True,
allow_none=False,
)
status = fields.String(
- description="Status of the query",
+ metadata={"description": "Status of the query"},
validate=validate.OneOf(
choices=(
"stopped",
@@ -1303,71 +1490,83 @@ class ChartDataResponseResult(Schema):
allow_none=False,
)
stacktrace = fields.String(
- description="Stacktrace if there was an error",
+ metadata={"description": "Stacktrace if there was an error"},
allow_none=True,
)
rowcount = fields.Integer(
- description="Amount of rows in result set",
+ metadata={"description": "Amount of rows in result set"},
allow_none=False,
)
- data = fields.List(fields.Dict(), description="A list with results")
- colnames = fields.List(fields.String(), description="A list of column names")
+ data = fields.List(fields.Dict(), metadata={"description": "A list with results"})
+ colnames = fields.List(
+ fields.String(), metadata={"description": "A list of column names"}
+ )
coltypes = fields.List(
- fields.Integer(), description="A list of generic data types of each column"
+ fields.Integer(),
+ metadata={"description": "A list of generic data types of each column"},
)
applied_filters = fields.List(
- fields.Dict(), description="A list with applied filters"
+ fields.Dict(), metadata={"description": "A list with applied filters"}
)
rejected_filters = fields.List(
- fields.Dict(), description="A list with rejected filters"
+ fields.Dict(), metadata={"description": "A list with rejected filters"}
)
from_dttm = fields.Integer(
- description="Start timestamp of time range", required=False, allow_none=True
+ metadata={"description": "Start timestamp of time range"},
+ required=False,
+ allow_none=True,
)
to_dttm = fields.Integer(
- description="End timestamp of time range", required=False, allow_none=True
+ metadata={"description": "End timestamp of time range"},
+ required=False,
+ allow_none=True,
)
class ChartDataResponseSchema(Schema):
result = fields.List(
fields.Nested(ChartDataResponseResult),
- description="A list of results for each corresponding query in the request.",
+ metadata={
+ "description": "A list of results for each corresponding query in the "
+ "request."
+ },
)
class ChartDataAsyncResponseSchema(Schema):
channel_id = fields.String(
- description="Unique session async channel ID",
+ metadata={"description": "Unique session async channel ID"},
allow_none=False,
)
job_id = fields.String(
- description="Unique async job ID",
+ metadata={"description": "Unique async job ID"},
allow_none=False,
)
user_id = fields.String(
- description="Requesting user ID",
+ metadata={"description": "Requesting user ID"},
allow_none=True,
)
status = fields.String(
- description="Status value for async job",
+ metadata={"description": "Status value for async job"},
allow_none=False,
)
result_url = fields.String(
- description="Unique result URL for fetching async query data",
+ metadata={"description": "Unique result URL for fetching async query data"},
allow_none=False,
)
class ChartFavStarResponseResult(Schema):
- id = fields.Integer(description="The Chart id")
- value = fields.Boolean(description="The FaveStar value")
+ id = fields.Integer(metadata={"description": "The Chart id"})
+ value = fields.Boolean(metadata={"description": "The FaveStar value"})
class GetFavStarIdsSchema(Schema):
result = fields.List(
fields.Nested(ChartFavStarResponseResult),
- description="A list of results for each corresponding chart in the request",
+ metadata={
+ "description": "A list of results for each corresponding chart in the request"
+ },
)
@@ -1383,7 +1582,7 @@ class ImportV1ChartSchema(Schema):
uuid = fields.UUID(required=True)
version = fields.String(required=True)
dataset_uuid = fields.UUID(required=True)
- is_managed_externally = fields.Boolean(allow_none=True, default=False)
+ is_managed_externally = fields.Boolean(allow_none=True, dump_default=False)
external_url = fields.String(allow_none=True)
diff --git a/superset/dashboards/permalink/schemas.py b/superset/dashboards/permalink/schemas.py
index ce222d7ed6..6fc7909cf0 100644
--- a/superset/dashboards/permalink/schemas.py
+++ b/superset/dashboards/permalink/schemas.py
@@ -21,30 +21,34 @@ class DashboardPermalinkPostSchema(Schema):
dataMask = fields.Dict(
required=False,
allow_none=True,
- description="Data mask used for native filter state",
+ metadata={"description": "Data mask used for native filter state"},
)
activeTabs = fields.List(
fields.String(),
required=False,
allow_none=True,
- description="Current active dashboard tabs",
+ metadata={"description": "Current active dashboard tabs"},
)
urlParams = fields.List(
fields.Tuple(
(
- fields.String(required=True, allow_none=True, description="Key"),
- fields.String(required=True, allow_none=True, description="Value"),
+ fields.String(
+ required=True, allow_none=True, metadata={"description": "Key"}
+ ),
+ fields.String(
+ required=True, allow_none=True, metadata={"description": "Value"}
+ ),
),
required=False,
allow_none=True,
- description="URL Parameter key-value pair",
+ metadata={"description": "URL Parameter key-value pair"},
),
required=False,
allow_none=True,
- description="URL Parameters",
+ metadata={"description": "URL Parameters"},
)
anchor = fields.String(
required=False,
allow_none=True,
- description="Optional anchor link added to url hash",
+ metadata={"description": "Optional anchor link added to url hash"},
)
diff --git a/superset/dashboards/schemas.py b/superset/dashboards/schemas.py
index c1f435301c..156a3ac1f9 100644
--- a/superset/dashboards/schemas.py
+++ b/superset/dashboards/schemas.py
@@ -129,7 +129,7 @@ class DashboardJSONMetadataSchema(Schema):
label_colors = fields.Dict()
shared_label_colors = fields.Dict()
color_scheme_domain = fields.List(fields.Str())
- cross_filters_enabled = fields.Boolean(default=True)
+ cross_filters_enabled = fields.Boolean(dump_default=True)
# used for v0 import/export
import_time = fields.Integer()
remote_id = fields.Integer()
@@ -176,24 +176,28 @@ class DashboardGetResponseSchema(Schema):
id = fields.Int()
slug = fields.String()
url = fields.String()
- dashboard_title = fields.String(description=dashboard_title_description)
+ dashboard_title = fields.String(
+ metadata={"description": dashboard_title_description}
+ )
thumbnail_url = fields.String()
published = fields.Boolean()
- css = fields.String(description=css_description)
- json_metadata = fields.String(description=json_metadata_description)
- position_json = fields.String(description=position_json_description)
- certified_by = fields.String(description=certified_by_description)
- certification_details = fields.String(description=certification_details_description)
+ css = fields.String(metadata={"description": css_description})
+ json_metadata = fields.String(metadata={"description": json_metadata_description})
+ position_json = fields.String(metadata={"description": position_json_description})
+ certified_by = fields.String(metadata={"description": certified_by_description})
+ certification_details = fields.String(
+ metadata={"description": certification_details_description}
+ )
changed_by_name = fields.String()
changed_by_url = fields.String()
changed_by = fields.Nested(UserSchema)
changed_on = fields.DateTime()
- charts = fields.List(fields.String(description=charts_description))
+ charts = fields.List(fields.String(metadata={"description": charts_description}))
owners = fields.List(fields.Nested(UserSchema))
roles = fields.List(fields.Nested(RolesSchema))
tags = fields.Nested(TagSchema, many=True)
changed_on_humanized = fields.String(data_key="changed_on_delta_humanized")
- is_managed_externally = fields.Boolean(allow_none=True, default=False)
+ is_managed_externally = fields.Boolean(allow_none=True, dump_default=False)
class DatabaseSchema(Schema):
@@ -255,89 +259,107 @@ class BaseDashboardSchema(Schema):
class DashboardPostSchema(BaseDashboardSchema):
dashboard_title = fields.String(
- description=dashboard_title_description,
+ metadata={"description": dashboard_title_description},
allow_none=True,
validate=Length(0, 500),
)
slug = fields.String(
- description=slug_description, allow_none=True, validate=[Length(1, 255)]
+ metadata={"description": slug_description},
+ allow_none=True,
+ validate=[Length(1, 255)],
)
- owners = fields.List(fields.Integer(description=owners_description))
- roles = fields.List(fields.Integer(description=roles_description))
+ owners = fields.List(fields.Integer(metadata={"description": owners_description}))
+ roles = fields.List(fields.Integer(metadata={"description": roles_description}))
position_json = fields.String(
- description=position_json_description, validate=validate_json
+ metadata={"description": position_json_description}, validate=validate_json
)
- css = fields.String(description=css_description)
+ css = fields.String(metadata={"description": css_description})
json_metadata = fields.String(
- description=json_metadata_description,
+ metadata={"description": json_metadata_description},
validate=validate_json_metadata,
)
- published = fields.Boolean(description=published_description)
- certified_by = fields.String(description=certified_by_description, allow_none=True)
+ published = fields.Boolean(metadata={"description": published_description})
+ certified_by = fields.String(
+ metadata={"description": certified_by_description}, allow_none=True
+ )
certification_details = fields.String(
- description=certification_details_description, allow_none=True
+ metadata={"description": certification_details_description}, allow_none=True
)
- is_managed_externally = fields.Boolean(allow_none=True, default=False)
+ is_managed_externally = fields.Boolean(allow_none=True, dump_default=False)
external_url = fields.String(allow_none=True)
class DashboardCopySchema(Schema):
dashboard_title = fields.String(
- description=dashboard_title_description,
+ metadata={"description": dashboard_title_description},
allow_none=True,
validate=Length(0, 500),
)
- css = fields.String(description=css_description)
+ css = fields.String(metadata={"description": css_description})
json_metadata = fields.String(
- description=json_metadata_description,
+ metadata={"description": json_metadata_description},
validate=validate_json_metadata,
required=True,
)
duplicate_slices = fields.Boolean(
- description="Whether or not to also copy all charts on the dashboard"
+ metadata={
+ "description": "Whether or not to also copy all charts on the dashboard"
+ }
)
class DashboardPutSchema(BaseDashboardSchema):
dashboard_title = fields.String(
- description=dashboard_title_description,
+ metadata={"description": dashboard_title_description},
allow_none=True,
validate=Length(0, 500),
)
slug = fields.String(
- description=slug_description, allow_none=True, validate=Length(0, 255)
+ metadata={"description": slug_description},
+ allow_none=True,
+ validate=Length(0, 255),
)
owners = fields.List(
- fields.Integer(description=owners_description, allow_none=True)
+ fields.Integer(metadata={"description": owners_description}, allow_none=True)
+ )
+ roles = fields.List(
+ fields.Integer(metadata={"description": roles_description}, allow_none=True)
)
- roles = fields.List(fields.Integer(description=roles_description, allow_none=True))
position_json = fields.String(
- description=position_json_description, allow_none=True, validate=validate_json
+ metadata={"description": position_json_description},
+ allow_none=True,
+ validate=validate_json,
)
- css = fields.String(description=css_description, allow_none=True)
+ css = fields.String(metadata={"description": css_description}, allow_none=True)
json_metadata = fields.String(
- description=json_metadata_description,
+ metadata={"description": json_metadata_description},
allow_none=True,
validate=validate_json_metadata,
)
- published = fields.Boolean(description=published_description, allow_none=True)
- certified_by = fields.String(description=certified_by_description, allow_none=True)
+ published = fields.Boolean(
+ metadata={"description": published_description}, allow_none=True
+ )
+ certified_by = fields.String(
+ metadata={"description": certified_by_description}, allow_none=True
+ )
certification_details = fields.String(
- description=certification_details_description, allow_none=True
+ metadata={"description": certification_details_description}, allow_none=True
)
- is_managed_externally = fields.Boolean(allow_none=True, default=False)
+ is_managed_externally = fields.Boolean(allow_none=True, dump_default=False)
external_url = fields.String(allow_none=True)
class ChartFavStarResponseResult(Schema):
- id = fields.Integer(description="The Chart id")
- value = fields.Boolean(description="The FaveStar value")
+ id = fields.Integer(metadata={"description": "The Chart id"})
+ value = fields.Boolean(metadata={"description": "The FaveStar value"})
class GetFavStarIdsSchema(Schema):
result = fields.List(
fields.Nested(ChartFavStarResponseResult),
- description="A list of results for each corresponding chart in the request",
+ metadata={
+ "description": "A list of results for each corresponding chart in the request"
+ },
)
@@ -350,7 +372,7 @@ class ImportV1DashboardSchema(Schema):
position = fields.Dict()
metadata = fields.Dict()
version = fields.String(required=True)
- is_managed_externally = fields.Boolean(allow_none=True, default=False)
+ is_managed_externally = fields.Boolean(allow_none=True, dump_default=False)
external_url = fields.String(allow_none=True)
diff --git a/superset/databases/schemas.py b/superset/databases/schemas.py
index 7a1e99404f..e1e38667c0 100644
--- a/superset/databases/schemas.py
+++ b/superset/databases/schemas.py
@@ -243,18 +243,22 @@ class DatabaseParametersSchemaMixin: # pylint: disable=too-few-public-methods
When using this mixin make sure that `sqlalchemy_uri` is not required.
"""
- engine = fields.String(allow_none=True, description="SQLAlchemy engine to use")
- driver = fields.String(allow_none=True, description="SQLAlchemy driver to use")
+ engine = fields.String(
+ allow_none=True, metadata={"description": "SQLAlchemy engine to use"}
+ )
+ driver = fields.String(
+ allow_none=True, metadata={"description": "SQLAlchemy driver to use"}
+ )
parameters = fields.Dict(
keys=fields.String(),
values=fields.Raw(),
- description="DB-specific parameters for configuration",
+ metadata={"description": "DB-specific parameters for configuration"},
)
configuration_method = EnumField(
ConfigurationMethod,
by_value=True,
- description=configuration_method_description,
- missing=ConfigurationMethod.SQLALCHEMY_FORM,
+ metadata={"description": configuration_method_description},
+ load_default=ConfigurationMethod.SQLALCHEMY_FORM,
)
@pre_load
@@ -341,33 +345,45 @@ class DatabaseValidateParametersSchema(Schema):
rename_encrypted_extra = pre_load(rename_encrypted_extra)
- id = fields.Integer(allow_none=True, description="Database ID (for updates)")
- engine = fields.String(required=True, description="SQLAlchemy engine to use")
- driver = fields.String(allow_none=True, description="SQLAlchemy driver to use")
+ id = fields.Integer(
+ allow_none=True, metadata={"description": "Database ID (for updates)"}
+ )
+ engine = fields.String(
+ required=True, metadata={"description": "SQLAlchemy engine to use"}
+ )
+ driver = fields.String(
+ allow_none=True, metadata={"description": "SQLAlchemy driver to use"}
+ )
parameters = fields.Dict(
keys=fields.String(),
values=fields.Raw(allow_none=True),
- description="DB-specific parameters for configuration",
+ metadata={"description": "DB-specific parameters for configuration"},
)
catalog = fields.Dict(
keys=fields.String(),
values=fields.Raw(allow_none=True),
- description="Gsheets specific column for managing label to sheet urls",
+ metadata={
+ "description": "Gsheets specific column for managing label to sheet urls"
+ },
)
database_name = fields.String(
- description=database_name_description,
+ metadata={"description": database_name_description},
allow_none=True,
validate=Length(1, 250),
)
- impersonate_user = fields.Boolean(description=impersonate_user_description)
- extra = fields.String(description=extra_description, validate=extra_validator)
+ impersonate_user = fields.Boolean(
+ metadata={"description": impersonate_user_description}
+ )
+ extra = fields.String(
+ metadata={"description": extra_description}, validate=extra_validator
+ )
masked_encrypted_extra = fields.String(
- description=encrypted_extra_description,
+ metadata={"description": encrypted_extra_description},
validate=encrypted_extra_validator,
allow_none=True,
)
server_cert = fields.String(
- description=server_cert_description,
+ metadata={"description": server_cert_description},
allow_none=True,
validate=server_cert_validator,
)
@@ -375,12 +391,14 @@ class DatabaseValidateParametersSchema(Schema):
ConfigurationMethod,
by_value=True,
required=True,
- description=configuration_method_description,
+ metadata={"description": configuration_method_description},
)
class DatabaseSSHTunnel(Schema):
- id = fields.Integer(allow_none=True, description="SSH Tunnel ID (for updates)")
+ id = fields.Integer(
+ allow_none=True, metadata={"description": "SSH Tunnel ID (for updates)"}
+ )
server_address = fields.String()
server_port = fields.Integer()
username = fields.String()
@@ -400,41 +418,51 @@ class DatabasePostSchema(Schema, DatabaseParametersSchemaMixin):
rename_encrypted_extra = pre_load(rename_encrypted_extra)
database_name = fields.String(
- description=database_name_description,
+ metadata={"description": database_name_description},
required=True,
validate=Length(1, 250),
)
cache_timeout = fields.Integer(
- description=cache_timeout_description, allow_none=True
- )
- expose_in_sqllab = fields.Boolean(description=expose_in_sqllab_description)
- allow_run_async = fields.Boolean(description=allow_run_async_description)
- allow_file_upload = fields.Boolean(description=allow_file_upload_description)
- allow_ctas = fields.Boolean(description=allow_ctas_description)
- allow_cvas = fields.Boolean(description=allow_cvas_description)
- allow_dml = fields.Boolean(description=allow_dml_description)
+ metadata={"description": cache_timeout_description}, allow_none=True
+ )
+ expose_in_sqllab = fields.Boolean(
+ metadata={"description": expose_in_sqllab_description}
+ )
+ allow_run_async = fields.Boolean(
+ metadata={"description": allow_run_async_description}
+ )
+ allow_file_upload = fields.Boolean(
+ metadata={"description": allow_file_upload_description}
+ )
+ allow_ctas = fields.Boolean(metadata={"description": allow_ctas_description})
+ allow_cvas = fields.Boolean(metadata={"description": allow_cvas_description})
+ allow_dml = fields.Boolean(metadata={"description": allow_dml_description})
force_ctas_schema = fields.String(
- description=force_ctas_schema_description,
+ metadata={"description": force_ctas_schema_description},
allow_none=True,
validate=Length(0, 250),
)
- impersonate_user = fields.Boolean(description=impersonate_user_description)
+ impersonate_user = fields.Boolean(
+ metadata={"description": impersonate_user_description}
+ )
masked_encrypted_extra = fields.String(
- description=encrypted_extra_description,
+ metadata={"description": encrypted_extra_description},
validate=encrypted_extra_validator,
allow_none=True,
)
- extra = fields.String(description=extra_description, validate=extra_validator)
+ extra = fields.String(
+ metadata={"description": extra_description}, validate=extra_validator
+ )
server_cert = fields.String(
- description=server_cert_description,
+ metadata={"description": server_cert_description},
allow_none=True,
validate=server_cert_validator,
)
sqlalchemy_uri = fields.String(
- description=sqlalchemy_uri_description,
+ metadata={"description": sqlalchemy_uri_description},
validate=[Length(1, 1024), sqlalchemy_uri_validator],
)
- is_managed_externally = fields.Boolean(allow_none=True, default=False)
+ is_managed_externally = fields.Boolean(allow_none=True, dump_default=False)
external_url = fields.String(allow_none=True)
uuid = fields.String(required=False)
ssh_tunnel = fields.Nested(DatabaseSSHTunnel, allow_none=True)
@@ -447,41 +475,51 @@ class DatabasePutSchema(Schema, DatabaseParametersSchemaMixin):
rename_encrypted_extra = pre_load(rename_encrypted_extra)
database_name = fields.String(
- description=database_name_description,
+ metadata={"description": database_name_description},
allow_none=True,
validate=Length(1, 250),
)
cache_timeout = fields.Integer(
- description=cache_timeout_description, allow_none=True
- )
- expose_in_sqllab = fields.Boolean(description=expose_in_sqllab_description)
- allow_run_async = fields.Boolean(description=allow_run_async_description)
- allow_file_upload = fields.Boolean(description=allow_file_upload_description)
- allow_ctas = fields.Boolean(description=allow_ctas_description)
- allow_cvas = fields.Boolean(description=allow_cvas_description)
- allow_dml = fields.Boolean(description=allow_dml_description)
+ metadata={"description": cache_timeout_description}, allow_none=True
+ )
+ expose_in_sqllab = fields.Boolean(
+ metadata={"description": expose_in_sqllab_description}
+ )
+ allow_run_async = fields.Boolean(
+ metadata={"description": allow_run_async_description}
+ )
+ allow_file_upload = fields.Boolean(
+ metadata={"description": allow_file_upload_description}
+ )
+ allow_ctas = fields.Boolean(metadata={"description": allow_ctas_description})
+ allow_cvas = fields.Boolean(metadata={"description": allow_cvas_description})
+ allow_dml = fields.Boolean(metadata={"description": allow_dml_description})
force_ctas_schema = fields.String(
- description=force_ctas_schema_description,
+ metadata={"description": force_ctas_schema_description},
allow_none=True,
validate=Length(0, 250),
)
- impersonate_user = fields.Boolean(description=impersonate_user_description)
+ impersonate_user = fields.Boolean(
+ metadata={"description": impersonate_user_description}
+ )
masked_encrypted_extra = fields.String(
- description=encrypted_extra_description,
+ metadata={"description": encrypted_extra_description},
allow_none=True,
validate=encrypted_extra_validator,
)
- extra = fields.String(description=extra_description, validate=extra_validator)
+ extra = fields.String(
+ metadata={"description": extra_description}, validate=extra_validator
+ )
server_cert = fields.String(
- description=server_cert_description,
+ metadata={"description": server_cert_description},
allow_none=True,
validate=server_cert_validator,
)
sqlalchemy_uri = fields.String(
- description=sqlalchemy_uri_description,
+ metadata={"description": sqlalchemy_uri_description},
validate=[Length(0, 1024), sqlalchemy_uri_validator],
)
- is_managed_externally = fields.Boolean(allow_none=True, default=False)
+ is_managed_externally = fields.Boolean(allow_none=True, dump_default=False)
external_url = fields.String(allow_none=True)
ssh_tunnel = fields.Nested(DatabaseSSHTunnel, allow_none=True)
uuid = fields.String(required=False)
@@ -491,24 +529,28 @@ class DatabaseTestConnectionSchema(Schema, DatabaseParametersSchemaMixin):
rename_encrypted_extra = pre_load(rename_encrypted_extra)
database_name = fields.String(
- description=database_name_description,
+ metadata={"description": database_name_description},
allow_none=True,
validate=Length(1, 250),
)
- impersonate_user = fields.Boolean(description=impersonate_user_description)
- extra = fields.String(description=extra_description, validate=extra_validator)
+ impersonate_user = fields.Boolean(
+ metadata={"description": impersonate_user_description}
+ )
+ extra = fields.String(
+ metadata={"description": extra_description}, validate=extra_validator
+ )
masked_encrypted_extra = fields.String(
- description=encrypted_extra_description,
+ metadata={"description": encrypted_extra_description},
validate=encrypted_extra_validator,
allow_none=True,
)
server_cert = fields.String(
- description=server_cert_description,
+ metadata={"description": server_cert_description},
allow_none=True,
validate=server_cert_validator,
)
sqlalchemy_uri = fields.String(
- description=sqlalchemy_uri_description,
+ metadata={"description": sqlalchemy_uri_description},
validate=[Length(1, 1024), sqlalchemy_uri_validator],
)
@@ -524,20 +566,27 @@ class TableMetadataOptionsResponseSchema(Schema):
class TableMetadataColumnsResponseSchema(Schema):
- keys = fields.List(fields.String(), description="")
- longType = fields.String(description="The actual backend long type for the column")
- name = fields.String(description="The column name")
- type = fields.String(description="The column type")
+ keys = fields.List(fields.String(), metadata={"description": ""})
+ longType = fields.String(
+ metadata={"description": "The actual backend long type for the column"}
+ )
+ name = fields.String(metadata={"description": "The column name"})
+ type = fields.String(metadata={"description": "The column type"})
duplicates_constraint = fields.String(required=False)
class TableMetadataForeignKeysIndexesResponseSchema(Schema):
column_names = fields.List(
fields.String(
- description="A list of column names that compose the foreign key or index"
+ metadata={
+ "description": "A list of column names that compose the foreign key or "
+ " index"
+ }
)
)
- name = fields.String(description="The name of the foreign key or index")
+ name = fields.String(
+ metadata={"description": "The name of the foreign key or index"}
+ )
options = fields.Nested(TableMetadataOptionsResponseSchema)
referred_columns = fields.List(fields.String())
referred_schema = fields.String()
@@ -547,30 +596,35 @@ class TableMetadataForeignKeysIndexesResponseSchema(Schema):
class TableMetadataPrimaryKeyResponseSchema(Schema):
column_names = fields.List(
- fields.String(description="A list of column names that compose the primary key")
+ fields.String(
+ metadata={
+ "description": "A list of column names that compose the primary key"
+ }
+ )
)
- name = fields.String(description="The primary key index name")
+ name = fields.String(metadata={"description": "The primary key index name"})
type = fields.String()
class TableMetadataResponseSchema(Schema):
- name = fields.String(description="The name of the table")
+ name = fields.String(metadata={"description": "The name of the table"})
columns = fields.List(
fields.Nested(TableMetadataColumnsResponseSchema),
- description="A list of columns and their metadata",
+ metadata={"description": "A list of columns and their metadata"},
)
foreignKeys = fields.List(
fields.Nested(TableMetadataForeignKeysIndexesResponseSchema),
- description="A list of foreign keys and their metadata",
+ metadata={"description": "A list of foreign keys and their metadata"},
)
indexes = fields.List(
fields.Nested(TableMetadataForeignKeysIndexesResponseSchema),
- description="A list of indexes and their metadata",
+ metadata={"description": "A list of indexes and their metadata"},
)
primaryKey = fields.Nested(
- TableMetadataPrimaryKeyResponseSchema, description="Primary keys metadata"
+ TableMetadataPrimaryKeyResponseSchema,
+ metadata={"description": "Primary keys metadata"},
)
- selectStar = fields.String(description="SQL select star")
+ selectStar = fields.String(metadata={"description": "SQL select star"})
class TableExtraMetadataResponseSchema(Schema):
@@ -580,21 +634,27 @@ class TableExtraMetadataResponseSchema(Schema):
class SelectStarResponseSchema(Schema):
- result = fields.String(description="SQL select star")
+ result = fields.String(metadata={"description": "SQL select star"})
class SchemasResponseSchema(Schema):
- result = fields.List(fields.String(description="A database schema name"))
+ result = fields.List(
+ fields.String(metadata={"description": "A database schema name"})
+ )
class DatabaseTablesResponse(Schema):
- extra = fields.Dict(description="Extra data used to specify column metadata")
- type = fields.String(description="table or view")
- value = fields.String(description="The table or view name")
+ extra = fields.Dict(
+ metadata={"description": "Extra data used to specify column metadata"}
+ )
+ type = fields.String(metadata={"description": "table or view"})
+ value = fields.String(metadata={"description": "The table or view name"})
class ValidateSQLRequest(Schema):
- sql = fields.String(required=True, description="SQL statement to validate")
+ sql = fields.String(
+ required=True, metadata={"description": "SQL statement to validate"}
+ )
schema = fields.String(required=False, allow_none=True)
template_params = fields.Dict(required=False, allow_none=True)
@@ -620,16 +680,18 @@ class DatabaseRelatedDashboard(Schema):
class DatabaseRelatedCharts(Schema):
- count = fields.Integer(description="Chart count")
+ count = fields.Integer(metadata={"description": "Chart count"})
result = fields.List(
- fields.Nested(DatabaseRelatedChart), description="A list of dashboards"
+ fields.Nested(DatabaseRelatedChart),
+ metadata={"description": "A list of dashboards"},
)
class DatabaseRelatedDashboards(Schema):
- count = fields.Integer(description="Dashboard count")
+ count = fields.Integer(metadata={"description": "Dashboard count"})
result = fields.List(
- fields.Nested(DatabaseRelatedDashboard), description="A list of dashboards"
+ fields.Nested(DatabaseRelatedDashboard),
+ metadata={"description": "A list of dashboards"},
)
@@ -708,7 +770,7 @@ class ImportV1DatabaseSchema(Schema):
extra = fields.Nested(ImportV1DatabaseExtraSchema)
uuid = fields.UUID(required=True)
version = fields.String(required=True)
- is_managed_externally = fields.Boolean(allow_none=True, default=False)
+ is_managed_externally = fields.Boolean(allow_none=True, dump_default=False)
external_url = fields.String(allow_none=True)
ssh_tunnel = fields.Nested(DatabaseSSHTunnel, allow_none=True)
@@ -813,5 +875,8 @@ def encrypted_field_properties(self, field: Any, **_) -> Dict[str, Any]: # type
class DatabaseSchemaAccessForFileUploadResponse(Schema):
schemas = fields.List(
fields.String(),
- description="The list of schemas allowed for the database to upload information",
+ metadata={
+ "description": "The list of schemas allowed for the database to upload "
+ "information"
+ },
)
diff --git a/superset/datasets/schemas.py b/superset/datasets/schemas.py
index 1d49bc1cfb..eaf5963fdf 100644
--- a/superset/datasets/schemas.py
+++ b/superset/datasets/schemas.py
@@ -49,14 +49,14 @@ class DatasetColumnsPutSchema(Schema):
column_name = fields.String(required=True, validate=Length(1, 255))
type = fields.String(allow_none=True)
advanced_data_type = fields.String(allow_none=True, validate=Length(1, 255))
- verbose_name = fields.String(allow_none=True, Length=(1, 1024))
+ verbose_name = fields.String(allow_none=True, metadata={Length: (1, 1024)})
description = fields.String(allow_none=True)
expression = fields.String(allow_none=True)
extra = fields.String(allow_none=True)
filterable = fields.Boolean()
groupby = fields.Boolean()
is_active = fields.Boolean(allow_none=True)
- is_dttm = fields.Boolean(default=False)
+ is_dttm = fields.Boolean(dump_default=False)
python_date_format = fields.String(
allow_none=True, validate=[Length(1, 255), validate_python_date_format]
)
@@ -71,7 +71,7 @@ class DatasetMetricsPutSchema(Schema):
metric_name = fields.String(required=True, validate=Length(1, 255))
metric_type = fields.String(allow_none=True, validate=Length(1, 32))
d3format = fields.String(allow_none=True, validate=Length(1, 128))
- verbose_name = fields.String(allow_none=True, Length=(1, 1024))
+ verbose_name = fields.String(allow_none=True, metadata={Length: (1, 1024)})
warning_text = fields.String(allow_none=True)
uuid = fields.UUID(allow_none=True)
@@ -82,7 +82,7 @@ class DatasetPostSchema(Schema):
table_name = fields.String(required=True, allow_none=False, validate=Length(1, 250))
sql = fields.String(allow_none=True)
owners = fields.List(fields.Integer())
- is_managed_externally = fields.Boolean(allow_none=True, default=False)
+ is_managed_externally = fields.Boolean(allow_none=True, dump_default=False)
external_url = fields.String(allow_none=True)
@@ -104,7 +104,7 @@ class DatasetPutSchema(Schema):
columns = fields.List(fields.Nested(DatasetColumnsPutSchema))
metrics = fields.List(fields.Nested(DatasetMetricsPutSchema))
extra = fields.String(allow_none=True)
- is_managed_externally = fields.Boolean(allow_none=True, default=False)
+ is_managed_externally = fields.Boolean(allow_none=True, dump_default=False)
external_url = fields.String(allow_none=True)
@@ -127,16 +127,18 @@ class DatasetRelatedDashboard(Schema):
class DatasetRelatedCharts(Schema):
- count = fields.Integer(description="Chart count")
+ count = fields.Integer(metadata={"description": "Chart count"})
result = fields.List(
- fields.Nested(DatasetRelatedChart), description="A list of dashboards"
+ fields.Nested(DatasetRelatedChart),
+ metadata={"description": "A list of dashboards"},
)
class DatasetRelatedDashboards(Schema):
- count = fields.Integer(description="Dashboard count")
+ count = fields.Integer(metadata={"description": "Dashboard count"})
result = fields.List(
- fields.Nested(DatasetRelatedDashboard), description="A list of dashboards"
+ fields.Nested(DatasetRelatedDashboard),
+ metadata={"description": "A list of dashboards"},
)
@@ -160,8 +162,8 @@ class ImportV1ColumnSchema(Schema):
column_name = fields.String(required=True)
extra = fields.Dict(allow_none=True)
verbose_name = fields.String(allow_none=True)
- is_dttm = fields.Boolean(default=False, allow_none=True)
- is_active = fields.Boolean(default=True, allow_none=True)
+ is_dttm = fields.Boolean(dump_default=False, allow_none=True)
+ is_active = fields.Boolean(dump_default=True, allow_none=True)
type = fields.String(allow_none=True)
advanced_data_type = fields.String(allow_none=True)
groupby = fields.Boolean()
@@ -224,19 +226,21 @@ class ImportV1DatasetSchema(Schema):
version = fields.String(required=True)
database_uuid = fields.UUID(required=True)
data = fields.URL()
- is_managed_externally = fields.Boolean(allow_none=True, default=False)
+ is_managed_externally = fields.Boolean(allow_none=True, dump_default=False)
external_url = fields.String(allow_none=True)
class GetOrCreateDatasetSchema(Schema):
- table_name = fields.String(required=True, description="Name of table")
+ table_name = fields.String(required=True, metadata={"description": "Name of table"})
database_id = fields.Integer(
- required=True, description="ID of database table belongs to"
+ required=True, metadata={"description": "ID of database table belongs to"}
)
schema = fields.String(
- description="The schema the table belongs to", allow_none=True
+ metadata={"description": "The schema the table belongs to"}, allow_none=True
+ )
+ template_params = fields.String(
+ metadata={"description": "Template params for the table"}
)
- template_params = fields.String(description="Template params for the table")
class DatasetSchema(SQLAlchemyAutoSchema):
diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py
index 93df7c7216..aac971dea0 100644
--- a/superset/db_engine_specs/base.py
+++ b/superset/db_engine_specs/base.py
@@ -1859,20 +1859,29 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
# schema for adding a database by providing parameters instead of the
# full SQLAlchemy URI
class BasicParametersSchema(Schema):
- username = fields.String(required=True, allow_none=True, description=__("Username"))
- password = fields.String(allow_none=True, description=__("Password"))
- host = fields.String(required=True, description=__("Hostname or IP address"))
+ username = fields.String(
+ required=True, allow_none=True, metadata={"description": __("Username")}
+ )
+ password = fields.String(allow_none=True, metadata={"description": __("Password")})
+ host = fields.String(
+ required=True, metadata={"description": __("Hostname or IP address")}
+ )
port = fields.Integer(
required=True,
- description=__("Database port"),
+ metadata={"description": __("Database port")},
validate=Range(min=0, max=2**16, max_inclusive=False),
)
- database = fields.String(required=True, description=__("Database name"))
+ database = fields.String(
+ required=True, metadata={"description": __("Database name")}
+ )
query = fields.Dict(
- keys=fields.Str(), values=fields.Raw(), description=__("Additional parameters")
+ keys=fields.Str(),
+ values=fields.Raw(),
+ metadata={"description": __("Additional parameters")},
)
encryption = fields.Boolean(
- required=False, description=__("Use an encrypted connection to the database")
+ required=False,
+ metadata={"description": __("Use an encrypted connection to the database")},
)
diff --git a/superset/db_engine_specs/bigquery.py b/superset/db_engine_specs/bigquery.py
index 976b2ee316..1f2ee51068 100644
--- a/superset/db_engine_specs/bigquery.py
+++ b/superset/db_engine_specs/bigquery.py
@@ -93,7 +93,7 @@ ma_plugin = MarshmallowPlugin()
class BigQueryParametersSchema(Schema):
credentials_info = EncryptedString(
required=False,
- description="Contents of BigQuery JSON credentials.",
+ metadata={"description": "Contents of BigQuery JSON credentials."},
)
query = fields.Dict(required=False)
diff --git a/superset/db_engine_specs/clickhouse.py b/superset/db_engine_specs/clickhouse.py
index 8de1cef3bc..4c229ca116 100644
--- a/superset/db_engine_specs/clickhouse.py
+++ b/superset/db_engine_specs/clickhouse.py
@@ -198,20 +198,27 @@ class ClickHouseEngineSpec(ClickHouseBaseEngineSpec):
class ClickHouseParametersSchema(Schema):
- username = fields.String(allow_none=True, description=__("Username"))
- password = fields.String(allow_none=True, description=__("Password"))
- host = fields.String(required=True, description=__("Hostname or IP address"))
+ username = fields.String(allow_none=True, metadata={"description": __("Username")})
+ password = fields.String(allow_none=True, metadata={"description": __("Password")})
+ host = fields.String(
+ required=True, metadata={"description": __("Hostname or IP address")}
+ )
port = fields.Integer(
allow_none=True,
- description=__("Database port"),
+ metadata={"description": __("Database port")},
validate=Range(min=0, max=65535),
)
- database = fields.String(allow_none=True, description=__("Database name"))
+ database = fields.String(
+ allow_none=True, metadata={"description": __("Database name")}
+ )
encryption = fields.Boolean(
- default=True, description=__("Use an encrypted connection to the database")
+ dump_default=True,
+ metadata={"description": __("Use an encrypted connection to the database")},
)
query = fields.Dict(
- keys=fields.Str(), values=fields.Raw(), description=__("Additional parameters")
+ keys=fields.Str(),
+ values=fields.Raw(),
+ metadata={"description": __("Additional parameters")},
)
diff --git a/superset/db_engine_specs/databricks.py b/superset/db_engine_specs/databricks.py
index a3650523b2..d9ad65e490 100644
--- a/superset/db_engine_specs/databricks.py
+++ b/superset/db_engine_specs/databricks.py
@@ -49,12 +49,13 @@ class DatabricksParametersSchema(Schema):
host = fields.Str(required=True)
port = fields.Integer(
required=True,
- description=__("Database port"),
+ metadata={"description": __("Database port")},
validate=Range(min=0, max=2**16, max_inclusive=False),
)
database = fields.Str(required=True)
encryption = fields.Boolean(
- required=False, description=__("Use an encrypted connection to the database")
+ required=False,
+ metadata={"description": __("Use an encrypted connection to the database")},
)
diff --git a/superset/db_engine_specs/gsheets.py b/superset/db_engine_specs/gsheets.py
index 17156f5a9e..73a66c464f 100644
--- a/superset/db_engine_specs/gsheets.py
+++ b/superset/db_engine_specs/gsheets.py
@@ -47,8 +47,10 @@ class GSheetsParametersSchema(Schema):
catalog = fields.Dict()
service_account_info = EncryptedString(
required=False,
- description="Contents of GSheets JSON credentials.",
- field_name="service_account_info",
+ metadata={
+ "description": "Contents of GSheets JSON credentials.",
+ "field_name": "service_account_info",
+ },
)
diff --git a/superset/explore/form_data/schemas.py b/superset/explore/form_data/schemas.py
index 192df089e8..d4f72d45d6 100644
--- a/superset/explore/form_data/schemas.py
+++ b/superset/explore/form_data/schemas.py
@@ -21,31 +21,35 @@ from superset.utils.core import DatasourceType
class FormDataPostSchema(Schema):
datasource_id = fields.Integer(
- required=True, allow_none=False, description="The datasource ID"
+ required=True, allow_none=False, metadata={"description": "The datasource ID"}
)
datasource_type = fields.String(
required=True,
allow_none=False,
- description="The datasource type",
+ metadata={"description": "The datasource type"},
validate=validate.OneOf(choices=[ds.value for ds in DatasourceType]),
)
- chart_id = fields.Integer(required=False, description="The chart ID")
+ chart_id = fields.Integer(required=False, metadata={"description": "The chart ID"})
form_data = fields.String(
- required=True, allow_none=False, description="Any type of JSON supported text."
+ required=True,
+ allow_none=False,
+ metadata={"description": "Any type of JSON supported text."},
)
class FormDataPutSchema(Schema):
datasource_id = fields.Integer(
- required=True, allow_none=False, description="The datasource ID"
+ required=True, allow_none=False, metadata={"description": "The datasource ID"}
)
datasource_type = fields.String(
required=True,
allow_none=False,
- description="The datasource type",
+ metadata={"description": "The datasource type"},
validate=validate.OneOf(choices=[ds.value for ds in DatasourceType]),
)
- chart_id = fields.Integer(required=False, description="The chart ID")
+ chart_id = fields.Integer(required=False, metadata={"description": "The chart ID"})
form_data = fields.String(
- required=True, allow_none=False, description="Any type of JSON supported text."
+ required=True,
+ allow_none=False,
+ metadata={"description": "Any type of JSON supported text."},
)
diff --git a/superset/explore/permalink/schemas.py b/superset/explore/permalink/schemas.py
index e1f9d069b8..b56715e903 100644
--- a/superset/explore/permalink/schemas.py
+++ b/superset/explore/permalink/schemas.py
@@ -21,19 +21,23 @@ class ExplorePermalinkPostSchema(Schema):
formData = fields.Dict(
required=True,
allow_none=False,
- description="Chart form data",
+ metadata={"description": "Chart form data"},
)
urlParams = fields.List(
fields.Tuple(
(
- fields.String(required=True, allow_none=True, description="Key"),
- fields.String(required=True, allow_none=True, description="Value"),
+ fields.String(
+ required=True, allow_none=True, metadata={"description": "Key"}
+ ),
+ fields.String(
+ required=True, allow_none=True, metadata={"description": "Value"}
+ ),
),
required=False,
allow_none=True,
- description="URL Parameter key-value pair",
+ metadata={"description": "URL Parameter key-value pair"},
),
required=False,
allow_none=True,
- description="URL Parameters",
+ metadata={"description": "URL Parameters"},
)
diff --git a/superset/explore/schemas.py b/superset/explore/schemas.py
index 457c99422a..f0060360cf 100644
--- a/superset/explore/schemas.py
+++ b/superset/explore/schemas.py
@@ -19,96 +19,146 @@ from marshmallow import fields, Schema
class DatasetSchema(Schema):
cache_timeout = fields.Integer(
- description="Duration (in seconds) of the caching timeout for this dataset."
- )
- column_formats = fields.Dict(description="Column formats.")
- columns = fields.List(fields.Dict(), description="Columns metadata.")
- database = fields.Dict(description="Database associated with the dataset.")
- datasource_name = fields.String(description="Dataset name.")
- default_endpoint = fields.String(description="Default endpoint for the dataset.")
- description = fields.String(description="Dataset description.")
- edit_url = fields.String(description="The URL for editing the dataset.")
+ metadata={
+ "description": "Duration (in seconds) of the caching timeout for this "
+ "dataset."
+ }
+ )
+ column_formats = fields.Dict(metadata={"description": "Column formats."})
+ columns = fields.List(fields.Dict(), metadata={"description": "Columns metadata."})
+ database = fields.Dict(
+ metadata={"description": "Database associated with the dataset."}
+ )
+ datasource_name = fields.String(metadata={"description": "Dataset name."})
+ default_endpoint = fields.String(
+ metadata={"description": "Default endpoint for the dataset."}
+ )
+ description = fields.String(metadata={"description": "Dataset description."})
+ edit_url = fields.String(
+ metadata={"description": "The URL for editing the dataset."}
+ )
extra = fields.Dict(
- description="JSON string containing extra configuration elements."
+ metadata={"description": "JSON string containing extra configuration elements."}
)
fetch_values_predicate = fields.String(
- description="Predicate used when fetching values from the dataset."
+ metadata={
+ "description": "Predicate used when fetching values from the dataset."
+ }
+ )
+ filter_select = fields.Bool(
+ metadata={"description": "SELECT filter applied to the dataset."}
+ )
+ filter_select_enabled = fields.Bool(
+ metadata={"description": "If the SELECT filter is enabled."}
)
- filter_select = fields.Bool(description="SELECT filter applied to the dataset.")
- filter_select_enabled = fields.Bool(description="If the SELECT filter is enabled.")
granularity_sqla = fields.List(
fields.List(fields.Dict()),
- description=(
- "Name of temporal column used for time filtering for SQL datasources. "
- "This field is deprecated, use `granularity` instead."
- ),
- )
- health_check_message = fields.String(description="Health check message.")
- id = fields.Integer(description="Dataset ID.")
- is_sqllab_view = fields.Bool(description="If the dataset is a SQL Lab view.")
- main_dttm_col = fields.String(description="The main temporal column.")
- metrics = fields.List(fields.Dict(), description="Dataset metrics.")
- name = fields.String(description="Dataset name.")
- offset = fields.Integer(description="Dataset offset.")
+ metadata={
+ "description": (
+ "Name of temporal column used for time filtering for SQL datasources. "
+ "This field is deprecated, use `granularity` instead."
+ )
+ },
+ )
+ health_check_message = fields.String(
+ metadata={"description": "Health check message."}
+ )
+ id = fields.Integer(metadata={"description": "Dataset ID."})
+ is_sqllab_view = fields.Bool(
+ metadata={"description": "If the dataset is a SQL Lab view."}
+ )
+ main_dttm_col = fields.String(metadata={"description": "The main temporal column."})
+ metrics = fields.List(fields.Dict(), metadata={"description": "Dataset metrics."})
+ name = fields.String(metadata={"description": "Dataset name."})
+ offset = fields.Integer(metadata={"description": "Dataset offset."})
order_by_choices = fields.List(
- fields.List(fields.String()), description="List of order by columns."
- )
- owners = fields.List(fields.Integer(), description="List of owners identifiers")
- params = fields.Dict(description="Extra params for the dataset.")
- perm = fields.String(description="Permission expression.")
- schema = fields.String(description="Dataset schema.")
- select_star = fields.String(description="Select all clause.")
- sql = fields.String(description="A SQL statement that defines the dataset.")
+ fields.List(fields.String()),
+ metadata={"description": "List of order by columns."},
+ )
+ owners = fields.List(
+ fields.Integer(), metadata={"description": "List of owners identifiers"}
+ )
+ params = fields.Dict(metadata={"description": "Extra params for the dataset."})
+ perm = fields.String(metadata={"description": "Permission expression."})
+ schema = fields.String(metadata={"description": "Dataset schema."})
+ select_star = fields.String(metadata={"description": "Select all clause."})
+ sql = fields.String(
+ metadata={"description": "A SQL statement that defines the dataset."}
+ )
table_name = fields.String(
- description="The name of the table associated with the dataset."
+ metadata={"description": "The name of the table associated with the dataset."}
)
- template_params = fields.Dict(description="Table template params.")
+ template_params = fields.Dict(metadata={"description": "Table template params."})
time_grain_sqla = fields.List(
fields.List(fields.String()),
- description="List of temporal granularities supported by the dataset.",
+ metadata={
+ "description": "List of temporal granularities supported by the dataset."
+ },
+ )
+ type = fields.String(metadata={"description": "Dataset type."})
+ uid = fields.String(metadata={"description": "Dataset unique identifier."})
+ verbose_map = fields.Dict(
+ metadata={"description": "Mapping from raw name to verbose name."}
)
- type = fields.String(description="Dataset type.")
- uid = fields.String(description="Dataset unique identifier.")
- verbose_map = fields.Dict(description="Mapping from raw name to verbose name.")
class SliceSchema(Schema):
cache_timeout = fields.Integer(
- description="Duration (in seconds) of the caching timeout for this chart."
+ metadata={
+ "description": "Duration (in seconds) of the caching timeout for this chart."
+ }
+ )
+ certification_details = fields.String(
+ metadata={"description": "Details of the certification."}
)
- certification_details = fields.String(description="Details of the certification.")
certified_by = fields.String(
- description="Person or group that has certified this dashboard."
+ metadata={"description": "Person or group that has certified this dashboard."}
+ )
+ changed_on = fields.String(
+ metadata={"description": "Timestamp of the last modification."}
)
- changed_on = fields.String(description="Timestamp of the last modification.")
changed_on_humanized = fields.String(
- description="Timestamp of the last modification in human readable form."
+ metadata={
+ "description": "Timestamp of the last modification in human readable form."
+ }
)
- datasource = fields.String(description="Datasource identifier.")
- description = fields.String(description="Slice description.")
+ datasource = fields.String(metadata={"description": "Datasource identifier."})
+ description = fields.String(metadata={"description": "Slice description."})
description_markeddown = fields.String(
- description="Sanitized HTML version of the chart description."
+ metadata={"description": "Sanitized HTML version of the chart description."}
+ )
+ edit_url = fields.String(metadata={"description": "The URL for editing the slice."})
+ form_data = fields.Dict(
+ metadata={"description": "Form data associated with the slice."}
)
- edit_url = fields.String(description="The URL for editing the slice.")
- form_data = fields.Dict(description="Form data associated with the slice.")
is_managed_externally = fields.Bool(
- description="If the chart is managed outside externally."
- )
- modified = fields.String(description="Last modification in human readable form.")
- owners = fields.List(fields.Integer(), description="Owners identifiers.")
- query_context = fields.Dict(description="The context associated with the query.")
- slice_id = fields.Integer(description="The slice ID.")
- slice_name = fields.String(description="The slice name.")
- slice_url = fields.String(description="The slice URL.")
+ metadata={"description": "If the chart is managed outside externally."}
+ )
+ modified = fields.String(
+ metadata={"description": "Last modification in human readable form."}
+ )
+ owners = fields.List(
+ fields.Integer(), metadata={"description": "Owners identifiers."}
+ )
+ query_context = fields.Dict(
+ metadata={"description": "The context associated with the query."}
+ )
+ slice_id = fields.Integer(metadata={"description": "The slice ID."})
+ slice_name = fields.String(metadata={"description": "The slice name."})
+ slice_url = fields.String(metadata={"description": "The slice URL."})
class ExploreContextSchema(Schema):
form_data = fields.Dict(
- description=(
- "Form data from the Explore controls used to form the "
- "chart's data query."
- )
+ metadata={
+ "description": (
+ "Form data from the Explore controls used to form the "
+ "chart's data query."
+ )
+ }
)
dataset = fields.Nested(DatasetSchema)
slice = fields.Nested(SliceSchema)
- message = fields.String(description="Any message related to the processed request.")
+ message = fields.String(
+ metadata={"description": "Any message related to the processed request."}
+ )
diff --git a/superset/reports/schemas.py b/superset/reports/schemas.py
index c64593f7f9..a45ee4cc38 100644
--- a/superset/reports/schemas.py
+++ b/superset/reports/schemas.py
@@ -100,7 +100,7 @@ def validate_crontab(value: Union[bytes, bytearray, str]) -> None:
class ValidatorConfigJSONSchema(Schema):
op = fields.String( # pylint: disable=invalid-name
- description=validator_config_json_op_description,
+ metadata={"description": validator_config_json_op_description},
validate=validate.OneOf(choices=["<", "<=", ">", ">=", "==", "!="]),
)
threshold = fields.Float()
@@ -113,7 +113,7 @@ class ReportRecipientConfigJSONSchema(Schema):
class ReportRecipientSchema(Schema):
type = fields.String(
- description="The recipient type, check spec for valid options",
+ metadata={"description": "The recipient type, check spec for valid options"},
allow_none=False,
required=True,
validate=validate.OneOf(
@@ -125,88 +125,90 @@ class ReportRecipientSchema(Schema):
class ReportSchedulePostSchema(Schema):
type = fields.String(
- description=type_description,
+ metadata={"description": type_description},
allow_none=False,
required=True,
validate=validate.OneOf(choices=tuple(key.value for key in ReportScheduleType)),
)
name = fields.String(
- description=name_description,
+ metadata={"description": name_description, "example": "Daily dashboard email"},
allow_none=False,
required=True,
validate=[Length(1, 150)],
- example="Daily dashboard email",
)
description = fields.String(
- description=description_description,
+ metadata={
+ "description": description_description,
+ "example": "Daily sales dashboard to marketing",
+ },
allow_none=True,
required=False,
- example="Daily sales dashboard to marketing",
)
context_markdown = fields.String(
- description=context_markdown_description, allow_none=True, required=False
+ metadata={"description": context_markdown_description},
+ allow_none=True,
+ required=False,
)
active = fields.Boolean()
crontab = fields.String(
- description=crontab_description,
+ metadata={"description": crontab_description, "example": "*/5 * * * *"},
validate=[validate_crontab, Length(1, 1000)],
- example="*/5 * * * *",
allow_none=False,
required=True,
)
timezone = fields.String(
- description=timezone_description,
- default="UTC",
+ metadata={"description": timezone_description},
+ dump_default="UTC",
validate=validate.OneOf(choices=tuple(all_timezones)),
)
sql = fields.String(
- description=sql_description, example="SELECT value FROM time_series_table"
+ metadata={
+ "description": sql_description,
+ "example": "SELECT value FROM time_series_table",
+ }
)
chart = fields.Integer(required=False, allow_none=True)
creation_method = EnumField(
ReportCreationMethod,
by_value=True,
required=False,
- description=creation_method_description,
+ metadata={"description": creation_method_description},
)
dashboard = fields.Integer(required=False, allow_none=True)
selected_tabs = fields.List(fields.Integer(), required=False, allow_none=True)
database = fields.Integer(required=False)
- owners = fields.List(fields.Integer(description=owners_description))
+ owners = fields.List(fields.Integer(metadata={"description": owners_description}))
validator_type = fields.String(
- description=validator_type_description,
+ metadata={"description": validator_type_description},
validate=validate.OneOf(
choices=tuple(key.value for key in ReportScheduleValidatorType)
),
)
validator_config_json = fields.Nested(ValidatorConfigJSONSchema)
log_retention = fields.Integer(
- description=log_retention_description,
- example=90,
+ metadata={"description": log_retention_description, "example": 90},
validate=[Range(min=1, error=_("Value must be greater than 0"))],
)
grace_period = fields.Integer(
- description=grace_period_description,
- example=60 * 60 * 4,
- default=60 * 60 * 4,
+ metadata={"description": grace_period_description, "example": 60 * 60 * 4},
+ dump_default=60 * 60 * 4,
validate=[Range(min=1, error=_("Value must be greater than 0"))],
)
working_timeout = fields.Integer(
- description=working_timeout_description,
- example=60 * 60 * 1,
- default=60 * 60 * 1,
+ metadata={"description": working_timeout_description, "example": 60 * 60 * 1},
+ dump_default=60 * 60 * 1,
validate=[Range(min=1, error=_("Value must be greater than 0"))],
)
recipients = fields.List(fields.Nested(ReportRecipientSchema))
report_format = fields.String(
- default=ReportDataFormat.VISUALIZATION,
+ dump_default=ReportDataFormat.VISUALIZATION,
validate=validate.OneOf(choices=tuple(key.value for key in ReportDataFormat)),
)
extra = fields.Dict(
- default=None,
+ dump_default=None,
)
- force_screenshot = fields.Boolean(default=False)
+ force_screenshot = fields.Boolean(dump_default=False)
@validates_schema
def validate_report_references( # pylint: disable=unused-argument,no-self-use
@@ -221,36 +223,44 @@ class ReportSchedulePostSchema(Schema):
class ReportSchedulePutSchema(Schema):
type = fields.String(
- description=type_description,
+ metadata={"description": type_description},
required=False,
validate=validate.OneOf(choices=tuple(key.value for key in ReportScheduleType)),
)
name = fields.String(
- description=name_description, required=False, validate=[Length(1, 150)]
+ metadata={"description": name_description},
+ required=False,
+ validate=[Length(1, 150)],
)
description = fields.String(
- description=description_description,
+ metadata={
+ "description": description_description,
+ "example": "Daily sales dashboard to marketing",
+ },
allow_none=True,
required=False,
- example="Daily sales dashboard to marketing",
)
context_markdown = fields.String(
- description=context_markdown_description, allow_none=True, required=False
+ metadata={"description": context_markdown_description},
+ allow_none=True,
+ required=False,
)
active = fields.Boolean(required=False)
crontab = fields.String(
- description=crontab_description,
+ metadata={"description": crontab_description},
validate=[validate_crontab, Length(1, 1000)],
required=False,
)
timezone = fields.String(
- description=timezone_description,
- default="UTC",
+ metadata={"description": timezone_description},
+ dump_default="UTC",
validate=validate.OneOf(choices=tuple(all_timezones)),
)
sql = fields.String(
- description=sql_description,
- example="SELECT value FROM time_series_table",
+ metadata={
+ "description": sql_description,
+ "example": "SELECT value FROM time_series_table",
+ },
required=False,
allow_none=True,
)
@@ -259,13 +269,15 @@ class ReportSchedulePutSchema(Schema):
ReportCreationMethod,
by_value=True,
allow_none=True,
- description=creation_method_description,
+ metadata={"description": creation_method_description},
)
dashboard = fields.Integer(required=False, allow_none=True)
database = fields.Integer(required=False)
- owners = fields.List(fields.Integer(description=owners_description), required=False)
+ owners = fields.List(
+ fields.Integer(metadata={"description": owners_description}), required=False
+ )
validator_type = fields.String(
- description=validator_type_description,
+ metadata={"description": validator_type_description},
validate=validate.OneOf(
choices=tuple(key.value for key in ReportScheduleValidatorType)
),
@@ -274,28 +286,25 @@ class ReportSchedulePutSchema(Schema):
)
validator_config_json = fields.Nested(ValidatorConfigJSONSchema, required=False)
log_retention = fields.Integer(
- description=log_retention_description,
- example=90,
+ metadata={"description": log_retention_description, "example": 90},
required=False,
validate=[Range(min=1, error=_("Value must be greater than 0"))],
)
grace_period = fields.Integer(
- description=grace_period_description,
- example=60 * 60 * 4,
+ metadata={"description": grace_period_description, "example": 60 * 60 * 4},
required=False,
validate=[Range(min=1, error=_("Value must be greater than 0"))],
)
working_timeout = fields.Integer(
- description=working_timeout_description,
- example=60 * 60 * 1,
+ metadata={"description": working_timeout_description, "example": 60 * 60 * 1},
allow_none=True,
required=False,
validate=[Range(min=1, error=_("Value must be greater than 0"))],
)
recipients = fields.List(fields.Nested(ReportRecipientSchema), required=False)
report_format = fields.String(
- default=ReportDataFormat.VISUALIZATION,
+ dump_default=ReportDataFormat.VISUALIZATION,
validate=validate.OneOf(choices=tuple(key.value for key in ReportDataFormat)),
)
- extra = fields.Dict(default=None)
- force_screenshot = fields.Boolean(default=False)
+ extra = fields.Dict(dump_default=None)
+ force_screenshot = fields.Boolean(dump_default=False)
diff --git a/superset/sqllab/schemas.py b/superset/sqllab/schemas.py
index 134b9ea7bb..30274d6de9 100644
--- a/superset/sqllab/schemas.py
+++ b/superset/sqllab/schemas.py
@@ -26,12 +26,18 @@ sql_lab_get_results_schema = {
class EstimateQueryCostSchema(Schema):
- database_id = fields.Integer(required=True, description="The database id")
- sql = fields.String(required=True, description="The SQL query to estimate")
+ database_id = fields.Integer(
+ required=True, metadata={"description": "The database id"}
+ )
+ sql = fields.String(
+ required=True, metadata={"description": "The SQL query to estimate"}
+ )
template_params = fields.Dict(
- keys=fields.String(), description="The SQL query template params"
+ keys=fields.String(), metadata={"description": "The SQL query template params"}
+ )
+ schema = fields.String(
+ allow_none=True, metadata={"description": "The database schema"}
)
- schema = fields.String(allow_none=True, description="The database schema")
class ExecutePayloadSchema(Schema):
diff --git a/superset/temporary_cache/schemas.py b/superset/temporary_cache/schemas.py
index 474d88b5fc..024c907e91 100644
--- a/superset/temporary_cache/schemas.py
+++ b/superset/temporary_cache/schemas.py
@@ -23,7 +23,7 @@ class TemporaryCachePostSchema(Schema):
value = fields.String(
required=True,
allow_none=False,
- description="Any type of JSON supported text.",
+ metadata={"description": "Any type of JSON supported text."},
validate=validate_json,
)
@@ -32,6 +32,6 @@ class TemporaryCachePutSchema(Schema):
value = fields.String(
required=True,
allow_none=False,
- description="Any type of JSON supported text.",
+ metadata={"description": "Any type of JSON supported text."},
validate=validate_json,
)
diff --git a/superset/views/base_api.py b/superset/views/base_api.py
index 0b7448b958..45f13e24ba 100644
--- a/superset/views/base_api.py
+++ b/superset/views/base_api.py
@@ -57,22 +57,28 @@ get_related_schema = {
class RelatedResultResponseSchema(Schema):
- value = fields.Integer(description="The related item identifier")
- text = fields.String(description="The related item string representation")
- extra = fields.Dict(description="The extra metadata for related item")
+ value = fields.Integer(metadata={"description": "The related item identifier"})
+ text = fields.String(
+ metadata={"description": "The related item string representation"}
+ )
+ extra = fields.Dict(metadata={"description": "The extra metadata for related item"})
class RelatedResponseSchema(Schema):
- count = fields.Integer(description="The total number of related values")
+ count = fields.Integer(
+ metadata={"description": "The total number of related values"}
+ )
result = fields.List(fields.Nested(RelatedResultResponseSchema))
class DistinctResultResponseSchema(Schema):
- text = fields.String(description="The distinct item")
+ text = fields.String(metadata={"description": "The distinct item"})
class DistincResponseSchema(Schema):
- count = fields.Integer(description="The total number of distinct values")
+ count = fields.Integer(
+ metadata={"description": "The total number of distinct values"}
+ )
result = fields.List(fields.Nested(DistinctResultResponseSchema))
diff --git a/superset/views/datasource/schemas.py b/superset/views/datasource/schemas.py
index f9be7a7d4e..b71b3defa8 100644
--- a/superset/views/datasource/schemas.py
+++ b/superset/views/datasource/schemas.py
@@ -70,7 +70,7 @@ class SamplesPayloadSchema(Schema):
)
extras = fields.Nested(
ChartDataExtrasSchema,
- description="Extra parameters to add to the query.",
+ metadata={"description": "Extra parameters to add to the query."},
allow_none=True,
)
diff --git a/superset/views/log/schemas.py b/superset/views/log/schemas.py
index bb4569893d..303af1e7a9 100644
--- a/superset/views/log/schemas.py
+++ b/superset/views/log/schemas.py
@@ -28,18 +28,27 @@ get_recent_activity_schema = {
class RecentActivitySchema(Schema):
- action = fields.String(description="Action taken describing type of activity")
- item_type = fields.String(description="Type of item, e.g. slice or dashboard")
- item_url = fields.String(description="URL to item")
- item_title = fields.String(description="Title of item")
- time = fields.Float(description="Time of activity, in epoch milliseconds")
+ action = fields.String(
+ metadata={"description": "Action taken describing type of activity"}
+ )
+ item_type = fields.String(
+ metadata={"description": "Type of item, e.g. slice or dashboard"}
+ )
+ item_url = fields.String(metadata={"description": "URL to item"})
+ item_title = fields.String(metadata={"description": "Title of item"})
+ time = fields.Float(
+ metadata={"description": "Time of activity, in epoch milliseconds"}
+ )
time_delta_humanized = fields.String(
- description="Human-readable description of how long ago activity took place"
+ metadata={
+ "description": "Human-readable description of how long ago activity took "
+ "place."
+ }
)
class RecentActivityResponseSchema(Schema):
result = fields.List(
fields.Nested(RecentActivitySchema),
- description="A list of recent activity objects",
+ metadata={"description": "A list of recent activity objects"},
)