You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@superset.apache.org by mi...@apache.org on 2023/06/09 18:33:50 UTC

[superset] branch master updated: chore(druid): Remove legacy Druid NoSQL logic (#23997)

This is an automated email from the ASF dual-hosted git repository.

michaelsmolina pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/superset.git


The following commit(s) were added to refs/heads/master by this push:
     new 9adb023880 chore(druid): Remove legacy Druid NoSQL logic (#23997)
9adb023880 is described below

commit 9adb023880c199432a1a683c4f6e228f98cd7eee
Author: John Bodley <45...@users.noreply.github.com>
AuthorDate: Fri Jun 9 11:33:23 2023 -0700

    chore(druid): Remove legacy Druid NoSQL logic (#23997)
---
 CONTRIBUTING.md                                    | 12 ++--
 RESOURCES/STANDARD_ROLES.md                        | 27 ---------
 docs/docs/miscellaneous/chart-params.mdx           | 12 ++--
 .../importing-exporting-datasources.mdx            | 18 +-----
 docs/static/resources/openapi.json                 | 22 +-------
 .../superset-ui-chart-controls/src/types.ts        |  1 -
 .../superset-ui-core/src/query/buildQueryObject.ts |  2 +-
 .../superset-ui-core/src/query/types/Query.ts      |  6 +-
 .../test/query/buildQueryObject.test.ts            |  9 ---
 .../legacy-preset-chart-deckgl/Arc/payload.js      |  1 -
 .../legacy-preset-chart-deckgl/Grid/payload.js     |  1 -
 .../legacy-preset-chart-deckgl/Hex/payload.js      |  1 -
 .../legacy-preset-chart-deckgl/Path/payload.js     |  1 -
 .../Polygon/geojsonPayload.js                      |  1 -
 .../legacy-preset-chart-deckgl/Polygon/payload.js  |  1 -
 .../legacy-preset-chart-deckgl/Scatter/payload.js  |  1 -
 .../Screengrid/payload.js                          |  1 -
 .../src/components/AlteredSliceTag/index.jsx       |  2 +-
 .../FiltersConfigForm/FilterScope/utils.test.ts    |  4 --
 .../dashboard/util/getFilterConfigsFromFormdata.js |  8 ---
 superset-frontend/src/explore/constants.ts         |  2 -
 .../src/explore/controlPanels/Separator.js         |  3 -
 .../src/visualizations/FilterBox/FilterBox.jsx     | 13 +----
 .../src/visualizations/FilterBox/transformProps.ts |  4 --
 superset/charts/post_processing.py                 |  3 -
 superset/charts/schemas.py                         | 33 +----------
 superset/common/query_object.py                    |  3 +-
 superset/config.py                                 | 12 ----
 superset/constants.py                              |  1 -
 superset/security/manager.py                       |  9 +--
 superset/utils/core.py                             | 64 ++++------------------
 superset/utils/dict_import_export.py               |  2 +-
 superset/views/core.py                             |  2 +-
 superset/views/utils.py                            |  4 +-
 superset/viz.py                                    | 41 ++++----------
 tests/common/query_context_generator.py            |  3 +-
 tests/integration_tests/access_tests.py            |  5 --
 tests/integration_tests/charts/data/api_tests.py   |  1 -
 .../integration_tests/fixtures/energy_dashboard.py |  2 +-
 tests/integration_tests/fixtures/importexport.py   |  2 +-
 ...baba_rm_time_range_endpoints_from_qc_3__test.py |  2 -
 tests/integration_tests/utils_tests.py             | 39 +++----------
 42 files changed, 58 insertions(+), 323 deletions(-)

diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index c8c699a926..e36363a7a5 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1403,13 +1403,11 @@ Note not all fields are correctly categorized. The fields vary based on visualiz
 
 ### Time
 
-| Field               | Type     | Notes                                 |
-| ------------------- | -------- | ------------------------------------- |
-| `druid_time_origin` | _string_ | The Druid **Origin** widget           |
-| `granularity`       | _string_ | The Druid **Time Granularity** widget |
-| `granularity_sqla`  | _string_ | The SQLA **Time Column** widget       |
-| `time_grain_sqla`   | _string_ | The SQLA **Time Grain** widget        |
-| `time_range`        | _string_ | The **Time range** widget             |
+| Field              | Type     | Notes                                 |
+| ------------------ | -------- | ------------------------------------- |
+| `granularity_sqla` | _string_ | The SQLA **Time Column** widget       |
+| `time_grain_sqla`  | _string_ | The SQLA **Time Grain** widget        |
+| `time_range`       | _string_ | The **Time range** widget             |
 
 ### GROUP BY
 
diff --git a/RESOURCES/STANDARD_ROLES.md b/RESOURCES/STANDARD_ROLES.md
index ad67a68159..b247585396 100644
--- a/RESOURCES/STANDARD_ROLES.md
+++ b/RESOURCES/STANDARD_ROLES.md
@@ -197,30 +197,6 @@
 |can add on AccessRequestsModelView|:heavy_check_mark:|O|O|O|
 |can delete on AccessRequestsModelView|:heavy_check_mark:|O|O|O|
 |muldelete on AccessRequestsModelView|:heavy_check_mark:|O|O|O|
-|can edit on DruidDatasourceModelView|:heavy_check_mark:|:heavy_check_mark:|O|O|
-|can list on DruidDatasourceModelView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
-|can show on DruidDatasourceModelView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
-|can add on DruidDatasourceModelView|:heavy_check_mark:|:heavy_check_mark:|O|O|
-|can delete on DruidDatasourceModelView|:heavy_check_mark:|:heavy_check_mark:|O|O|
-|muldelete on DruidDatasourceModelView|:heavy_check_mark:|:heavy_check_mark:|O|O|
-|yaml export on DruidDatasourceModelView|:heavy_check_mark:|:heavy_check_mark:|O|O|
-|can edit on DruidClusterModelView|:heavy_check_mark:|O|O|O|
-|can list on DruidClusterModelView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
-|can show on DruidClusterModelView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
-|can add on DruidClusterModelView|:heavy_check_mark:|O|O|O|
-|can delete on DruidClusterModelView|:heavy_check_mark:|O|O|O|
-|muldelete on DruidClusterModelView|:heavy_check_mark:|O|O|O|
-|yaml export on DruidClusterModelView|:heavy_check_mark:|O|O|O|
-|can list on DruidMetricInlineView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
-|can add on DruidMetricInlineView|:heavy_check_mark:|:heavy_check_mark:|O|O|
-|can delete on DruidMetricInlineView|:heavy_check_mark:|:heavy_check_mark:|O|O|
-|can edit on DruidMetricInlineView|:heavy_check_mark:|:heavy_check_mark:|O|O|
-|can list on DruidColumnInlineView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
-|can add on DruidColumnInlineView|:heavy_check_mark:|:heavy_check_mark:|O|O|
-|can delete on DruidColumnInlineView|:heavy_check_mark:|:heavy_check_mark:|O|O|
-|can edit on DruidColumnInlineView|:heavy_check_mark:|:heavy_check_mark:|O|O|
-|can refresh datasources on Druid|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
-|can scan new datasources on Druid|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
 |menu access on Row Level Security|:heavy_check_mark:|O|O|O|
 |menu access on Access requests|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
 |menu access on Home|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
@@ -229,10 +205,7 @@
 |menu access on Chart Emails|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
 |menu access on Alerts|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
 |menu access on Alerts & Report|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
-|menu access on Druid Datasources|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
-|menu access on Druid Clusters|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
 |menu access on Scan New Datasources|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
-|menu access on Refresh Druid Metadata|:heavy_check_mark:|O|O|O|
 |can share dashboard on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
 |can share chart on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
 |can list on FilterSets|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
diff --git a/docs/docs/miscellaneous/chart-params.mdx b/docs/docs/miscellaneous/chart-params.mdx
index e7b8e26dc1..1cdf726e39 100644
--- a/docs/docs/miscellaneous/chart-params.mdx
+++ b/docs/docs/miscellaneous/chart-params.mdx
@@ -26,13 +26,11 @@ Note not all fields are correctly categorized. The fields vary based on visualiz
 
 ### Time
 
-| Field               | Type     | Notes                                 |
-| ------------------- | -------- | ------------------------------------- |
-| `druid_time_origin` | _string_ | The Druid **Origin** widget           |
-| `granularity`       | _string_ | The Druid **Time Granularity** widget |
-| `granularity_sqla`  | _string_ | The SQLA **Time Column** widget       |
-| `time_grain_sqla`   | _string_ | The SQLA **Time Grain** widget        |
-| `time_range`        | _string_ | The **Time range** widget             |
+| Field              | Type     | Notes                                 |
+| ------------------ | -------- | ------------------------------------- |
+| `granularity_sqla` | _string_ | The SQLA **Time Column** widget       |
+| `time_grain_sqla`  | _string_ | The SQLA **Time Grain** widget        |
+| `time_range`       | _string_ | The **Time range** widget             |
 
 ### GROUP BY
 
diff --git a/docs/docs/miscellaneous/importing-exporting-datasources.mdx b/docs/docs/miscellaneous/importing-exporting-datasources.mdx
index 9083cdd50e..c2f8e6460c 100644
--- a/docs/docs/miscellaneous/importing-exporting-datasources.mdx
+++ b/docs/docs/miscellaneous/importing-exporting-datasources.mdx
@@ -8,7 +8,7 @@ version: 1
 ## Importing and Exporting Datasources
 
 The superset cli allows you to import and export datasources from and to YAML. Datasources include
-both databases and druid clusters. The data is expected to be organized in the following hierarchy:
+databases. The data is expected to be organized in the following hierarchy:
 
 ```
 ├──databases
@@ -24,19 +24,6 @@ both databases and druid clusters. The data is expected to be organized in the f
 |  |  |     └──... (more metrics)
 |  |  └── ... (more tables)
 |  └── ... (more databases)
-└──druid_clusters
-   ├──cluster_1
-   |  ├──datasource_1
-   |  |  ├──columns
-   |  |  |  ├──column_1
-   |  |  |  ├──column_2
-   |  |  |  └──... (more columns)
-   |  |  └──metrics
-   |  |     ├──metric_1
-   |  |     ├──metric_2
-   |  |     └──... (more metrics)
-   |  └── ... (more datasources)
-   └── ... (more clusters)
 ```
 
 ### Exporting Datasources to YAML
@@ -59,8 +46,7 @@ references to be included (e.g. a column to include the table id it belongs to)
 Alternatively, you can export datasources using the UI:
 
 1. Open **Sources -> Databases** to export all tables associated to a single or multiple databases.
-   (**Tables** for one or more tables, **Druid Clusters** for clusters, **Druid Datasources** for
-   datasources)
+   (**Tables** for one or more tables)
 2. Select the items you would like to export.
 3. Click **Actions -> Export** to YAML
 4. If you want to import an item that you exported through the UI, you will need to nest it inside
diff --git a/docs/static/resources/openapi.json b/docs/static/resources/openapi.json
index 6b6e458864..c52592aae9 100644
--- a/docs/static/resources/openapi.json
+++ b/docs/static/resources/openapi.json
@@ -850,13 +850,6 @@
             "description": "HAVING clause to be added to aggregate queries using AND operator.",
             "type": "string"
           },
-          "having_druid": {
-            "description": "HAVING filters to be added to legacy Druid datasource queries. This field is deprecated",
-            "items": {
-              "$ref": "#/components/schemas/ChartDataFilter"
-            },
-            "type": "array"
-          },
           "relative_end": {
             "description": "End time for relative time deltas. Default: `config[\"DEFAULT_RELATIVE_START_TIME\"]`",
             "enum": ["today", "now"],
@@ -1228,11 +1221,6 @@
             ],
             "nullable": true
           },
-          "druid_time_origin": {
-            "description": "Starting point for time grain counting on legacy Druid datasources. Used to change e.g. Monday/Sunday first-day-of-week. This field is deprecated and should be passed to `extras` as `druid_time_origin`.",
-            "nullable": true,
-            "type": "string"
-          },
           "extras": {
             "allOf": [
               {
@@ -1250,7 +1238,7 @@
             "type": "array"
           },
           "granularity": {
-            "description": "Name of temporal column used for time filtering. For legacy Druid datasources this defines the time grain.",
+            "description": "Name of temporal column used for time filtering.
             "nullable": true,
             "type": "string"
           },
@@ -1270,14 +1258,6 @@
             "nullable": true,
             "type": "string"
           },
-          "having_filters": {
-            "description": "HAVING filters to be added to legacy Druid datasource queries. This field is deprecated and should be passed to `extras` as `having_druid`.",
-            "items": {
-              "$ref": "#/components/schemas/ChartDataFilter"
-            },
-            "nullable": true,
-            "type": "array"
-          },
           "is_rowcount": {
             "description": "Should the rowcount of the actual query be returned",
             "nullable": true,
diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/types.ts b/superset-frontend/packages/superset-ui-chart-controls/src/types.ts
index c26f53b6a2..8ba428d7bc 100644
--- a/superset-frontend/packages/superset-ui-chart-controls/src/types.ts
+++ b/superset-frontend/packages/superset-ui-chart-controls/src/types.ts
@@ -334,7 +334,6 @@ export type SharedSectionAlias =
   | 'annotations'
   | 'colorScheme'
   | 'datasourceAndVizType'
-  | 'druidTimeSeries'
   | 'sqlaTimeSeries'
   | 'NVD3TimeSeries';
 
diff --git a/superset-frontend/packages/superset-ui-core/src/query/buildQueryObject.ts b/superset-frontend/packages/superset-ui-core/src/query/buildQueryObject.ts
index cf434f1385..725c2c87aa 100644
--- a/superset-frontend/packages/superset-ui-core/src/query/buildQueryObject.ts
+++ b/superset-frontend/packages/superset-ui-core/src/query/buildQueryObject.ts
@@ -37,7 +37,7 @@ import { isDefined } from '../utils';
 
 /**
  * Build the common segments of all query objects (e.g. the granularity field derived from
- * either sql alchemy or druid). The segments specific to each viz type is constructed in the
+ * SQLAlchemy). The segments specific to each viz type is constructed in the
  * buildQuery method for each viz type (see `wordcloud/buildQuery.ts` for an example).
  * Note the type of the formData argument passed in here is the type of the formData for a
  * specific viz, which is a subtype of the generic formData shared among all viz types.
diff --git a/superset-frontend/packages/superset-ui-core/src/query/types/Query.ts b/superset-frontend/packages/superset-ui-core/src/query/types/Query.ts
index 8a7e403703..d71928420c 100644
--- a/superset-frontend/packages/superset-ui-core/src/query/types/Query.ts
+++ b/superset-frontend/packages/superset-ui-core/src/query/types/Query.ts
@@ -61,7 +61,6 @@ export type QueryObjectFilterClause =
   | UnaryQueryObjectFilterClause;
 
 export type QueryObjectExtras = Partial<{
-  /** HAVING condition for Druid */
   /** HAVING condition for SQLAlchemy */
   having?: string;
   relative_start?: string;
@@ -107,7 +106,7 @@ export interface QueryObject
   /** SIMPLE where filters */
   filters?: QueryObjectFilterClause[];
 
-  /** Time column for SQL, time-grain for Druid (deprecated) */
+  /** Time column for SQL */
   granularity?: string;
 
   /** If set, will group by timestamp */
@@ -119,9 +118,6 @@ export interface QueryObject
   /** Free-form HAVING SQL, multiple clauses are concatenated by AND */
   having?: string;
 
-  /** SIMPLE having filters */
-  having_filters?: QueryObjectFilterClause[];
-
   post_processing?: (PostProcessingRule | undefined)[];
 
   /** Maximum numbers of rows to return */
diff --git a/superset-frontend/packages/superset-ui-core/test/query/buildQueryObject.test.ts b/superset-frontend/packages/superset-ui-core/test/query/buildQueryObject.test.ts
index cdabcff57e..187fb22a74 100644
--- a/superset-frontend/packages/superset-ui-core/test/query/buildQueryObject.test.ts
+++ b/superset-frontend/packages/superset-ui-core/test/query/buildQueryObject.test.ts
@@ -39,15 +39,6 @@ describe('buildQueryObject', () => {
     expect(query.granularity).toEqual('ds');
   });
 
-  it('should build granularity for druid datasources', () => {
-    query = buildQueryObject({
-      datasource: '5__druid',
-      granularity: 'ds',
-      viz_type: 'table',
-    });
-    expect(query.granularity).toEqual('ds');
-  });
-
   it('should build metrics based on default queryFields', () => {
     query = buildQueryObject({
       datasource: '5__table',
diff --git a/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Arc/payload.js b/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Arc/payload.js
index b3a3f6c66a..12cc3a1f1d 100644
--- a/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Arc/payload.js
+++ b/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Arc/payload.js
@@ -117,7 +117,6 @@ export default {
     js_columns: [],
     where: '',
     having: '',
-    having_filters: [],
     filters: [
       {
         col: 'LATITUDE',
diff --git a/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Grid/payload.js b/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Grid/payload.js
index 7ce02ecf0c..379ce12041 100644
--- a/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Grid/payload.js
+++ b/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Grid/payload.js
@@ -69,7 +69,6 @@ export default {
     ],
     where: '',
     having: '',
-    having_filters: [],
     filters: [
       { col: 'LAT', op: 'IS NOT NULL', val: '' },
       { col: 'LON', op: 'IS NOT NULL', val: '' },
diff --git a/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Hex/payload.js b/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Hex/payload.js
index 7ce02ecf0c..379ce12041 100644
--- a/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Hex/payload.js
+++ b/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Hex/payload.js
@@ -69,7 +69,6 @@ export default {
     ],
     where: '',
     having: '',
-    having_filters: [],
     filters: [
       { col: 'LAT', op: 'IS NOT NULL', val: '' },
       { col: 'LON', op: 'IS NOT NULL', val: '' },
diff --git a/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Path/payload.js b/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Path/payload.js
index a303f8ac08..5a36d7e32c 100644
--- a/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Path/payload.js
+++ b/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Path/payload.js
@@ -70,7 +70,6 @@ export const payload = theme => ({
     js_columns: ['color'],
     where: '',
     having: '',
-    having_filters: [],
     filters: [{ col: 'path_json', op: 'IS NOT NULL', val: '' }],
   },
   is_cached: false,
diff --git a/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Polygon/geojsonPayload.js b/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Polygon/geojsonPayload.js
index eb37db0168..948061a677 100644
--- a/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Polygon/geojsonPayload.js
+++ b/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Polygon/geojsonPayload.js
@@ -84,7 +84,6 @@ export default {
     js_columns: [],
     where: '',
     having: '',
-    having_filters: [],
     filters: [
       {
         col: 'geometry',
diff --git a/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Polygon/payload.js b/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Polygon/payload.js
index 24926529f9..b4c9f36a76 100644
--- a/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Polygon/payload.js
+++ b/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Polygon/payload.js
@@ -84,7 +84,6 @@ export default {
     js_columns: ['population', 'area'],
     where: '',
     having: '',
-    having_filters: [],
     filters: [{ col: 'contour', op: 'IS NOT NULL', val: '' }],
   },
   is_cached: false,
diff --git a/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Scatter/payload.js b/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Scatter/payload.js
index e5c5be625e..11f42446de 100644
--- a/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Scatter/payload.js
+++ b/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Scatter/payload.js
@@ -67,7 +67,6 @@ export default {
     ],
     where: '',
     having: '',
-    having_filters: [],
     filters: [
       { col: 'LAT', op: 'IS NOT NULL', val: '' },
       { col: 'LON', op: 'IS NOT NULL', val: '' },
diff --git a/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Screengrid/payload.js b/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Screengrid/payload.js
index 6b01b369a2..a9c9bfc778 100644
--- a/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Screengrid/payload.js
+++ b/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/legacy-preset-chart-deckgl/Screengrid/payload.js
@@ -68,7 +68,6 @@ export default {
     ],
     where: '',
     having: '',
-    having_filters: [],
     filters: [
       { col: 'LAT', op: 'IS NOT NULL', val: '' },
       { col: 'LON', op: 'IS NOT NULL', val: '' },
diff --git a/superset-frontend/src/components/AlteredSliceTag/index.jsx b/superset-frontend/src/components/AlteredSliceTag/index.jsx
index 5c9b04949b..83458fc0a4 100644
--- a/superset-frontend/src/components/AlteredSliceTag/index.jsx
+++ b/superset-frontend/src/components/AlteredSliceTag/index.jsx
@@ -104,7 +104,7 @@ export default class AlteredSliceTag extends React.Component {
       if (!ofd[fdKey] && !cfd[fdKey]) {
         return;
       }
-      if (['filters', 'having', 'having_filters', 'where'].includes(fdKey)) {
+      if (['filters', 'having', 'where'].includes(fdKey)) {
         return;
       }
       if (!this.isEqualish(ofd[fdKey], cfd[fdKey])) {
diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FilterScope/utils.test.ts b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FilterScope/utils.test.ts
index cf56d63f62..5a7e0198da 100644
--- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FilterScope/utils.test.ts
+++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FilterScope/utils.test.ts
@@ -765,7 +765,6 @@ describe('Ensure buildTree does not throw runtime errors when encountering an in
             applied_time_extras: {},
             where: '',
             having: '',
-            having_filters: [],
             filters: [],
           },
           is_cached: false,
@@ -3131,7 +3130,6 @@ describe('Ensure buildTree does not throw runtime errors when encountering an in
             applied_time_extras: {},
             where: '',
             having: '',
-            having_filters: [],
             filters: [],
           },
           is_cached: false,
@@ -16668,7 +16666,6 @@ describe('Ensure buildTree does not throw runtime errors when encountering an in
             applied_time_extras: {},
             where: '',
             having: '',
-            having_filters: [],
             filters: [
               {
                 col: 'rank',
@@ -17723,7 +17720,6 @@ describe('Ensure buildTree does not throw runtime errors when encountering an in
             applied_time_extras: {},
             where: '',
             having: '',
-            having_filters: [],
             filters: [],
           },
           is_cached: false,
diff --git a/superset-frontend/src/dashboard/util/getFilterConfigsFromFormdata.js b/superset-frontend/src/dashboard/util/getFilterConfigsFromFormdata.js
index 46f4512a0a..f85155c856 100644
--- a/superset-frontend/src/dashboard/util/getFilterConfigsFromFormdata.js
+++ b/superset-frontend/src/dashboard/util/getFilterConfigsFromFormdata.js
@@ -27,7 +27,6 @@ export default function getFilterConfigsFromFormdata(form_data = {}) {
   const {
     date_filter,
     filter_configs = [],
-    show_druid_time_granularity,
     show_sqla_time_column,
     show_sqla_time_granularity,
   } = form_data;
@@ -93,13 +92,6 @@ export default function getFilterConfigsFromFormdata(form_data = {}) {
       };
     }
 
-    if (show_druid_time_granularity) {
-      updatedColumns = {
-        ...updatedColumns,
-        [TIME_FILTER_MAP.granularity]: form_data.granularity,
-      };
-    }
-
     configs = {
       ...configs,
       columns: updatedColumns,
diff --git a/superset-frontend/src/explore/constants.ts b/superset-frontend/src/explore/constants.ts
index e01dae4935..e539c16de7 100644
--- a/superset-frontend/src/explore/constants.ts
+++ b/superset-frontend/src/explore/constants.ts
@@ -125,8 +125,6 @@ export const sqlaAutoGeneratedMetricNameRegex =
   /^(sum|min|max|avg|count|count_distinct)__.*$/i;
 export const sqlaAutoGeneratedMetricRegex =
   /^(LONG|DOUBLE|FLOAT)?(SUM|AVG|MAX|MIN|COUNT)\([A-Z0-9_."]*\)$/i;
-export const druidAutoGeneratedMetricRegex =
-  /^(LONG|DOUBLE|FLOAT)?(SUM|MAX|MIN|COUNT)\([A-Z0-9_."]*\)$/i;
 
 export const TIME_FILTER_LABELS = {
   time_range: t('Time range'),
diff --git a/superset-frontend/src/explore/controlPanels/Separator.js b/superset-frontend/src/explore/controlPanels/Separator.js
index 2be8594690..d49d389b90 100644
--- a/superset-frontend/src/explore/controlPanels/Separator.js
+++ b/superset-frontend/src/explore/controlPanels/Separator.js
@@ -69,9 +69,6 @@ export default {
     },
   },
   sectionOverrides: {
-    druidTimeSeries: {
-      controlSetRows: [],
-    },
     sqlaTimeSeries: {
       controlSetRows: [],
     },
diff --git a/superset-frontend/src/visualizations/FilterBox/FilterBox.jsx b/superset-frontend/src/visualizations/FilterBox/FilterBox.jsx
index f7e816bcaf..a2b2a9a282 100644
--- a/superset-frontend/src/visualizations/FilterBox/FilterBox.jsx
+++ b/superset-frontend/src/visualizations/FilterBox/FilterBox.jsx
@@ -158,7 +158,7 @@ class FilterBox extends React.PureComponent {
   getControlData(controlName) {
     const { selectedValues } = this.state;
     const control = {
-      ...controls[controlName], // TODO: make these controls ('druid_time_origin', 'granularity', 'granularity_sqla', 'time_grain_sqla') accessible from getControlsForVizType.
+      ...controls[controlName], // TODO: make these controls ('granularity_sqla', 'time_grain_sqla') accessible from getControlsForVizType.
       name: controlName,
       key: `control-${controlName}`,
       value: selectedValues[TIME_FILTER_MAP[controlName]],
@@ -324,7 +324,6 @@ class FilterBox extends React.PureComponent {
     const { showSqlaTimeGrain, showSqlaTimeColumn } = this.props;
     const datasourceFilters = [];
     const sqlaFilters = [];
-    const druidFilters = [];
     if (showSqlaTimeGrain) sqlaFilters.push('time_grain_sqla');
     if (showSqlaTimeColumn) sqlaFilters.push('granularity_sqla');
     if (sqlaFilters.length) {
@@ -337,16 +336,6 @@ class FilterBox extends React.PureComponent {
         />,
       );
     }
-    if (druidFilters.length) {
-      datasourceFilters.push(
-        <ControlRow
-          key="druid-filters"
-          controls={druidFilters.map(control => (
-            <Control {...this.getControlData(control)} />
-          ))}
-        />,
-      );
-    }
     return datasourceFilters;
   }
 
diff --git a/superset-frontend/src/visualizations/FilterBox/transformProps.ts b/superset-frontend/src/visualizations/FilterBox/transformProps.ts
index 19a962ddd1..59387edae1 100644
--- a/superset-frontend/src/visualizations/FilterBox/transformProps.ts
+++ b/superset-frontend/src/visualizations/FilterBox/transformProps.ts
@@ -41,8 +41,6 @@ export default function transformProps(chartProps: FilterBoxChartProps) {
     sliceId,
     dateFilter,
     instantFiltering,
-    showDruidTimeGranularity,
-    showDruidTimeOrigin,
     showSqlaTimeColumn,
     showSqlaTimeGranularity,
   } = formData;
@@ -68,8 +66,6 @@ export default function transformProps(chartProps: FilterBoxChartProps) {
     onFilterMenuClose,
     origSelectedValues: initialValues || {},
     showDateFilter: dateFilter,
-    showDruidTimeGrain: showDruidTimeGranularity,
-    showDruidTimeOrigin,
     showSqlaTimeColumn,
     showSqlaTimeGrain: showSqlaTimeGranularity,
     // the original form data, needed for async select options
diff --git a/superset/charts/post_processing.py b/superset/charts/post_processing.py
index 2ee03a3b66..398df55828 100644
--- a/superset/charts/post_processing.py
+++ b/superset/charts/post_processing.py
@@ -34,7 +34,6 @@ from flask_babel import gettext as __
 
 from superset.common.chart_data import ChartDataResultFormat
 from superset.utils.core import (
-    DTTM_ALIAS,
     extract_dataframe_dtypes,
     get_column_names,
     get_metric_names,
@@ -230,8 +229,6 @@ def pivot_table_v2(
     Pivot table v2.
     """
     verbose_map = datasource.data["verbose_map"] if datasource else None
-    if form_data.get("granularity_sqla") == "all" and DTTM_ALIAS in df:
-        del df[DTTM_ALIAS]
 
     return pivot_df(
         df,
diff --git a/superset/charts/schemas.py b/superset/charts/schemas.py
index 9010840bfd..0ba4084c89 100644
--- a/superset/charts/schemas.py
+++ b/superset/charts/schemas.py
@@ -981,14 +981,6 @@ class ChartDataExtrasSchema(Schema):
             "AND operator."
         },
     )
-    having_druid = fields.List(
-        fields.Nested(ChartDataFilterSchema),
-        metadata={
-            "description": "HAVING filters to be added to legacy Druid datasource "
-            "queries. This field is deprecated",
-            "deprecated": True,
-        },
-    )
     time_grain_sqla = fields.String(
         metadata={
             "description": "To what level of granularity should the temporal column be "
@@ -1159,10 +1151,7 @@ class ChartDataQueryObjectSchema(Schema):
     )
     filters = fields.List(fields.Nested(ChartDataFilterSchema), allow_none=True)
     granularity = fields.String(
-        metadata={
-            "description": "Name of temporal column used for time filtering. "
-            "For legacy Druid datasources this defines the time grain."
-        },
+        metadata={"description": "Name of temporal column used for time filtering. "},
         allow_none=True,
     )
     granularity_sqla = fields.String(
@@ -1341,26 +1330,6 @@ class ChartDataQueryObjectSchema(Schema):
         },
         allow_none=True,
     )
-    having_filters = fields.List(
-        fields.Nested(ChartDataFilterSchema),
-        metadata={
-            "description": "HAVING filters to be added to legacy Druid datasource "
-            "queries. This field is deprecated and should be passed to `extras` "
-            "as `having_druid`.",
-            "deprecated": True,
-        },
-        allow_none=True,
-    )
-    druid_time_origin = fields.String(
-        metadata={
-            "description": "Starting point for time grain counting on legacy Druid "
-            "datasources. Used to change e.g. Monday/Sunday first-day-of-week. "
-            "This field is deprecated and should be passed to `extras` "
-            "as `druid_time_origin`.",
-            "deprecated": True,
-        },
-        allow_none=True,
-    )
     url_params = fields.Dict(
         metadata={
             "description": "Optional query parameters passed to a dashboard or Explore "
diff --git a/superset/common/query_object.py b/superset/common/query_object.py
index dc02b774e5..1e826761ec 100644
--- a/superset/common/query_object.py
+++ b/superset/common/query_object.py
@@ -77,8 +77,7 @@ DEPRECATED_EXTRAS_FIELDS = (
 
 class QueryObject:  # pylint: disable=too-many-instance-attributes
     """
-    The query object's schema matches the interfaces of DB connectors like sqla
-    and druid. The query objects are constructed on the client.
+    The query objects are constructed on the client.
     """
 
     annotation_layers: list[dict[str, Any]]
diff --git a/superset/config.py b/superset/config.py
index f9fdf3cdd5..80e7132e68 100644
--- a/superset/config.py
+++ b/superset/config.py
@@ -38,7 +38,6 @@ from typing import Any, Callable, Literal, TYPE_CHECKING, TypedDict
 import pkg_resources
 from cachelib.base import BaseCache
 from celery.schedules import crontab
-from dateutil import tz
 from flask import Blueprint
 from flask_appbuilder.security.manager import AUTH_DB
 from pandas import Series
@@ -288,17 +287,6 @@ LOGO_RIGHT_TEXT: Callable[[], str] | str = ""
 # ex: http://localhost:8080/swagger/v1
 FAB_API_SWAGGER_UI = True
 
-# Druid query timezone
-# tz.tzutc() : Using utc timezone
-# tz.tzlocal() : Using local timezone
-# tz.gettz('Asia/Shanghai') : Using the time zone with specific name
-# [TimeZone List]
-# See: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones
-# other tz can be overridden by providing a local_config
-DRUID_TZ = tz.tzutc()
-DRUID_ANALYSIS_TYPES = ["cardinality"]
-
-
 # ----------------------------------------------------
 # AUTHENTICATION CONFIG
 # ----------------------------------------------------
diff --git a/superset/constants.py b/superset/constants.py
index f867d901cd..48b08506ca 100644
--- a/superset/constants.py
+++ b/superset/constants.py
@@ -167,7 +167,6 @@ EXTRA_FORM_DATA_APPEND_KEYS = {
 }
 
 EXTRA_FORM_DATA_OVERRIDE_REGULAR_MAPPINGS = {
-    "granularity": "granularity",
     "granularity_sqla": "granularity",
     "time_column": "time_column",
     "time_grain": "time_grain",
diff --git a/superset/security/manager.py b/superset/security/manager.py
index 94a731a3ff..39283d9941 100644
--- a/superset/security/manager.py
+++ b/superset/security/manager.py
@@ -143,7 +143,7 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
     SecurityManager
 ):
     userstatschartview = None
-    READ_ONLY_MODEL_VIEWS = {"Database", "DruidClusterModelView", "DynamicPlugin"}
+    READ_ONLY_MODEL_VIEWS = {"Database", "DynamicPlugin"}
 
     USER_MODEL_VIEWS = {
         "RegisterUserModelView",
@@ -169,7 +169,6 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
         "Log",
         "List Users",
         "List Roles",
-        "Refresh Druid Metadata",
         "ResetPasswordView",
         "RoleModelView",
         "Row Level Security",
@@ -196,7 +195,6 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
     }
 
     ADMIN_ONLY_PERMISSIONS = {
-        "can_sync_druid_source",
         "can_override_role_permissions",
         "can_approve",
         "can_update_role",
@@ -375,8 +373,6 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
         """
         Return True if the user can fully access the Superset database, False otherwise.
 
-        Note for Druid the database is akin to the Druid cluster.
-
         :param database: The Superset database
         :returns: Whether the user can fully access the Superset database
         """
@@ -392,9 +388,6 @@ class SupersetSecurityManager(  # pylint: disable=too-many-public-methods
         Return True if the user can fully access the schema associated with the Superset
         datasource, False otherwise.
 
-        Note for Druid datasources the database and schema are akin to the Druid cluster
-        and datasource name prefix respectively, i.e., [schema.]datasource.
-
         :param datasource: The Superset datasource
         :returns: Whether the user can fully access the datasource's schema
         """
diff --git a/superset/utils/core.py b/superset/utils/core.py
index 24e539b2b6..036414ef1d 100644
--- a/superset/utils/core.py
+++ b/superset/utils/core.py
@@ -102,11 +102,6 @@ from superset.utils.date_parser import parse_human_timedelta
 from superset.utils.dates import datetime_to_epoch, EPOCH
 from superset.utils.hashing import md5_sha_from_dict, md5_sha_from_str
 
-try:
-    from pydruid.utils.having import Having
-except ImportError:
-    pass
-
 if TYPE_CHECKING:
     from superset.connectors.base.models import BaseColumn, BaseDatasource
     from superset.models.sql_lab import Query
@@ -213,7 +208,6 @@ class QueryObjectFilterClause(TypedDict, total=False):
 
 
 class ExtraFiltersTimeColumnType(str, Enum):
-    GRANULARITY = "__granularity"
     TIME_COL = "__time_col"
     TIME_GRAIN = "__time_grain"
     TIME_ORIGIN = "__time_origin"
@@ -360,25 +354,6 @@ class ColumnSpec(NamedTuple):
     python_date_format: str | None = None
 
 
-try:
-    # Having might not have been imported.
-    class DimSelector(Having):
-        def __init__(self, **args: Any) -> None:
-            # Just a hack to prevent any exceptions
-            Having.__init__(self, type="equalTo", aggregation=None, value=None)
-
-            self.having = {
-                "having": {
-                    "type": "dimSelector",
-                    "dimension": args["dimension"],
-                    "value": args["value"],
-                }
-            }
-
-except NameError:
-    pass
-
-
 def flasher(msg: str, severity: str = "message") -> None:
     """Flask's flash if available, logging call if not"""
     try:
@@ -1144,11 +1119,7 @@ def merge_extra_form_data(form_data: dict[str, Any]) -> None:
                     for fltr in append_filters
                     if fltr
                 )
-    if (
-        form_data.get("time_range")
-        and not form_data.get("granularity")
-        and not form_data.get("granularity_sqla")
-    ):
+    if form_data.get("time_range") and not form_data.get("granularity_sqla"):
         for adhoc_filter in form_data.get("adhoc_filters", []):
             if adhoc_filter.get("operator") == "TEMPORAL_RANGE":
                 adhoc_filter["comparator"] = form_data["time_range"]
@@ -1172,7 +1143,6 @@ def merge_extra_filters(form_data: dict[str, Any]) -> None:
             "__time_range": "time_range",
             "__time_col": "granularity_sqla",
             "__time_grain": "time_grain_sqla",
-            "__granularity": "granularity",
         }
 
         # Grab list of existing filters 'keyed' on the column and operator
@@ -1394,21 +1364,22 @@ def ensure_path_exists(path: str) -> None:
 def convert_legacy_filters_into_adhoc(  # pylint: disable=invalid-name
     form_data: FormData,
 ) -> None:
-    mapping = {"having": "having_filters", "where": "filters"}
-
     if not form_data.get("adhoc_filters"):
         adhoc_filters: list[AdhocFilterClause] = []
         form_data["adhoc_filters"] = adhoc_filters
 
-        for clause, filters in mapping.items():
+        for clause in ("having", "where"):
             if clause in form_data and form_data[clause] != "":
                 adhoc_filters.append(form_data_to_adhoc(form_data, clause))
 
-            if filters in form_data:
-                for filt in filter(lambda x: x is not None, form_data[filters]):
-                    adhoc_filters.append(simple_filter_to_adhoc(filt, clause))
+        if "filters" in form_data:
+            adhoc_filters.extend(
+                simple_filter_to_adhoc(fltr, "where")
+                for fltr in form_data["filters"]
+                if fltr is not None
+            )
 
-    for key in ("filters", "having", "having_filters", "where"):
+    for key in ("filters", "having", "where"):
         if key in form_data:
             del form_data[key]
 
@@ -1417,15 +1388,13 @@ def split_adhoc_filters_into_base_filters(  # pylint: disable=invalid-name
     form_data: FormData,
 ) -> None:
     """
-    Mutates form data to restructure the adhoc filters in the form of the four base
-    filters, `where`, `having`, `filters`, and `having_filters` which represent
-    free form where sql, free form having sql, structured where clauses and structured
-    having clauses.
+    Mutates form data to restructure the adhoc filters in the form of the three base
+    filters, `where`, `having`, and `filters` which represent free form where sql,
+    free form having sql, and structured where clauses.
     """
     adhoc_filters = form_data.get("adhoc_filters")
     if isinstance(adhoc_filters, list):
         simple_where_filters = []
-        simple_having_filters = []
         sql_where_filters = []
         sql_having_filters = []
         for adhoc_filter in adhoc_filters:
@@ -1440,14 +1409,6 @@ def split_adhoc_filters_into_base_filters(  # pylint: disable=invalid-name
                             "val": adhoc_filter.get("comparator"),
                         }
                     )
-                elif clause == "HAVING":
-                    simple_having_filters.append(
-                        {
-                            "col": adhoc_filter.get("subject"),
-                            "op": adhoc_filter.get("operator"),
-                            "val": adhoc_filter.get("comparator"),
-                        }
-                    )
             elif expression_type == "SQL":
                 sql_expression = adhoc_filter.get("sqlExpression")
                 sql_expression = sanitize_clause(sql_expression)
@@ -1457,7 +1418,6 @@ def split_adhoc_filters_into_base_filters(  # pylint: disable=invalid-name
                     sql_having_filters.append(sql_expression)
         form_data["where"] = " AND ".join([f"({sql})" for sql in sql_where_filters])
         form_data["having"] = " AND ".join([f"({sql})" for sql in sql_having_filters])
-        form_data["having_filters"] = simple_having_filters
         form_data["filters"] = simple_where_filters
 
 
diff --git a/superset/utils/dict_import_export.py b/superset/utils/dict_import_export.py
index f3fb1bbd6c..fbd9db7d81 100644
--- a/superset/utils/dict_import_export.py
+++ b/superset/utils/dict_import_export.py
@@ -40,7 +40,7 @@ def export_schema_to_dict(back_references: bool) -> dict[str, Any]:
 def export_to_dict(
     session: Session, recursive: bool, back_references: bool, include_defaults: bool
 ) -> dict[str, Any]:
-    """Exports databases and druid clusters to a dictionary"""
+    """Exports databases to a dictionary"""
     logger.info("Starting export")
     dbs = session.query(Database)
     databases = [
diff --git a/superset/views/core.py b/superset/views/core.py
index 3b63eb74d8..9ef2e45b30 100755
--- a/superset/views/core.py
+++ b/superset/views/core.py
@@ -238,7 +238,7 @@ class Superset(BaseSupersetView):  # pylint: disable=too-many-public-methods
            {
             'role_name': '{role_name}',
             'database': [{
-                'datasource_type': '{table|druid}',
+                'datasource_type': '{table}',
                 'name': '{database_name}',
                 'schema': [{
                     'name': '{schema_name}',
diff --git a/superset/views/utils.py b/superset/views/utils.py
index 9b515edc26..75ab4ebe4f 100644
--- a/superset/views/utils.py
+++ b/superset/views/utils.py
@@ -254,7 +254,7 @@ def get_datasource_info(
     This function allows supporting both without duplicating code
 
     :param datasource_id: The datasource ID
-    :param datasource_type: The datasource type, i.e., 'druid' or 'table'
+    :param datasource_type: The datasource type
     :param form_data: The URL form data
     :returns: The datasource ID and type
     :raises SupersetException: If the datasource no longer exists
@@ -468,7 +468,7 @@ def check_datasource_perms(
     This function takes `self` since it must have the same signature as the
     the decorated method.
 
-    :param datasource_type: The datasource type, i.e., 'druid' or 'table'
+    :param datasource_type: The datasource type
     :param datasource_id: The datasource ID
     :raises SupersetSecurityException: If the user cannot access the resource
     """
diff --git a/superset/viz.py b/superset/viz.py
index d68103deaa..4e39ae2a19 100644
--- a/superset/viz.py
+++ b/superset/viz.py
@@ -359,9 +359,7 @@ class BaseViz:  # pylint: disable=too-many-public-methods
             del groupby[groupby_labels.index(DTTM_ALIAS)]
             is_timeseries = True
 
-        granularity = self.form_data.get("granularity") or self.form_data.get(
-            "granularity_sqla"
-        )
+        granularity = self.form_data.get("granularity_sqla")
         limit = int(self.form_data.get("limit") or 0)
         timeseries_limit_metric = self.form_data.get("timeseries_limit_metric")
 
@@ -772,12 +770,8 @@ class TableViz(BaseViz):
     @deprecated(deprecated_in="3.0")
     def should_be_timeseries(self) -> bool:
         # TODO handle datasource-type-specific code in datasource
-        conditions_met = (
-            self.form_data.get("granularity")
-            and self.form_data.get("granularity") != "all"
-        ) or (
-            self.form_data.get("granularity_sqla")
-            and self.form_data.get("time_grain_sqla")
+        conditions_met = self.form_data.get("granularity_sqla") and self.form_data.get(
+            "time_grain_sqla"
         )
         if self.form_data.get("include_time") and not conditions_met:
             raise QueryObjectValidationError(
@@ -981,11 +975,9 @@ class CalHeatmapViz(BaseViz):
             "month": "P1M",
             "year": "P1Y",
         }
-        time_grain = mapping[self.form_data.get("subdomain_granularity", "min")]
-        if self.datasource.type == "druid":
-            query_obj["granularity"] = time_grain
-        else:
-            query_obj["extras"]["time_grain_sqla"] = time_grain
+        query_obj["extras"]["time_grain_sqla"] = mapping[
+            self.form_data.get("subdomain_granularity", "min")
+        ]
         return query_obj
 
 
@@ -1231,11 +1223,6 @@ class NVD3TimeSeriesViz(NVD3Viz):
 
     @deprecated(deprecated_in="3.0")
     def process_data(self, df: pd.DataFrame, aggregate: bool = False) -> VizData:
-        if self.form_data.get("granularity") == "all":
-            raise QueryObjectValidationError(
-                _("Pick a time granularity for your time series")
-            )
-
         if df.empty:
             return df
 
@@ -2398,9 +2385,7 @@ class DeckScatterViz(BaseDeckGLViz):
     @deprecated(deprecated_in="3.0")
     def query_obj(self) -> QueryObjectDict:
         # pylint: disable=attribute-defined-outside-init
-        self.is_timeseries = bool(
-            self.form_data.get("time_grain_sqla") or self.form_data.get("granularity")
-        )
+        self.is_timeseries = bool(self.form_data.get("time_grain_sqla"))
         self.point_radius_fixed = self.form_data.get("point_radius_fixed") or {
             "type": "fix",
             "value": 500,
@@ -2453,9 +2438,7 @@ class DeckScreengrid(BaseDeckGLViz):
 
     @deprecated(deprecated_in="3.0")
     def query_obj(self) -> QueryObjectDict:
-        self.is_timeseries = bool(
-            self.form_data.get("time_grain_sqla") or self.form_data.get("granularity")
-        )
+        self.is_timeseries = bool(self.form_data.get("time_grain_sqla"))
         return super().query_obj()
 
     @deprecated(deprecated_in="3.0")
@@ -2526,9 +2509,7 @@ class DeckPathViz(BaseDeckGLViz):
     @deprecated(deprecated_in="3.0")
     def query_obj(self) -> QueryObjectDict:
         # pylint: disable=attribute-defined-outside-init
-        self.is_timeseries = bool(
-            self.form_data.get("time_grain_sqla") or self.form_data.get("granularity")
-        )
+        self.is_timeseries = bool(self.form_data.get("time_grain_sqla"))
         query_obj = super().query_obj()
         self.metric = self.form_data.get("metric")
         line_col = self.form_data.get("line_column")
@@ -2675,9 +2656,7 @@ class DeckArc(BaseDeckGLViz):
 
     @deprecated(deprecated_in="3.0")
     def query_obj(self) -> QueryObjectDict:
-        self.is_timeseries = bool(
-            self.form_data.get("time_grain_sqla") or self.form_data.get("granularity")
-        )
+        self.is_timeseries = bool(self.form_data.get("time_grain_sqla"))
         return super().query_obj()
 
     @deprecated(deprecated_in="3.0")
diff --git a/tests/common/query_context_generator.py b/tests/common/query_context_generator.py
index 32b4063974..ce458b466f 100644
--- a/tests/common/query_context_generator.py
+++ b/tests/common/query_context_generator.py
@@ -22,7 +22,7 @@ from superset.common.chart_data import ChartDataResultType
 from superset.utils.core import AnnotationType, DTTM_ALIAS
 
 query_birth_names = {
-    "extras": {"where": "", "time_grain_sqla": "P1D", "having_druid": []},
+    "extras": {"where": "", "time_grain_sqla": "P1D"},
     "columns": ["name"],
     "metrics": [{"label": "sum__num"}],
     "orderby": [("sum__num", False)],
@@ -38,7 +38,6 @@ query_birth_names = {
         {"col": "name", "op": "NOT IN", "val": ["<NULL>", '"abc"']},
     ],
     "having": "",
-    "having_filters": [],
     "where": "",
 }
 
diff --git a/tests/integration_tests/access_tests.py b/tests/integration_tests/access_tests.py
index 79fdff6346..ab0100ac24 100644
--- a/tests/integration_tests/access_tests.py
+++ b/tests/integration_tests/access_tests.py
@@ -67,11 +67,6 @@ ROLE_ALL_PERM_DATA = {
             "name": "examples",
             "schema": [{"name": "", "datasources": ["birth_names"]}],
         },
-        {
-            "datasource_type": "druid",
-            "name": "druid_test",
-            "schema": [{"name": "", "datasources": ["druid_ds_1", "druid_ds_2"]}],
-        },
     ],
 }
 
diff --git a/tests/integration_tests/charts/data/api_tests.py b/tests/integration_tests/charts/data/api_tests.py
index f9e6b5e3b1..4c8d1996c9 100644
--- a/tests/integration_tests/charts/data/api_tests.py
+++ b/tests/integration_tests/charts/data/api_tests.py
@@ -959,7 +959,6 @@ class TestGetChartDataApi(BaseTestChartDataApi):
                         "filters": [],
                         "extras": {
                             "having": "",
-                            "having_druid": [],
                             "where": "",
                         },
                         "applied_time_extras": {},
diff --git a/tests/integration_tests/fixtures/energy_dashboard.py b/tests/integration_tests/fixtures/energy_dashboard.py
index 8b597bf3be..5b4690f572 100644
--- a/tests/integration_tests/fixtures/energy_dashboard.py
+++ b/tests/integration_tests/fixtures/energy_dashboard.py
@@ -188,6 +188,6 @@ def _get_energy_slices():
                 "xscale_interval": "1",
                 "yscale_interval": "1",
             },
-            "query_context": '{"datasource":{"id":12,"type":"table"},"force":false,"queries":[{"time_range":" : ","filters":[],"extras":{"time_grain_sqla":null,"having":"","having_druid":[],"where":""},"applied_time_extras":{},"columns":[],"metrics":[],"annotation_layers":[],"row_limit":5000,"timeseries_limit":0,"order_desc":true,"url_params":{},"custom_params":{},"custom_form_data":{}}],"result_format":"json","result_type":"full"}',
+            "query_context": '{"datasource":{"id":12,"type":"table"},"force":false,"queries":[{"time_range":" : ","filters":[],"extras":{"time_grain_sqla":null,"having":"","where":""},"applied_time_extras":{},"columns":[],"metrics":[],"annotation_layers":[],"row_limit":5000,"timeseries_limit":0,"order_desc":true,"url_params":{},"custom_params":{},"custom_form_data":{}}],"result_format":"json","result_type":"full"}',
         },
     ]
diff --git a/tests/integration_tests/fixtures/importexport.py b/tests/integration_tests/fixtures/importexport.py
index 5fddb071e2..74237f4a82 100644
--- a/tests/integration_tests/fixtures/importexport.py
+++ b/tests/integration_tests/fixtures/importexport.py
@@ -550,7 +550,7 @@ chart_config: dict[str, Any] = {
         },
         "viz_type": "deck_path",
     },
-    "query_context": '{"datasource":{"id":12,"type":"table"},"force":false,"queries":[{"time_range":" : ","filters":[],"extras":{"time_grain_sqla":null,"having":"","having_druid":[],"where":""},"applied_time_extras":{},"columns":[],"metrics":[],"annotation_layers":[],"row_limit":5000,"timeseries_limit":0,"order_desc":true,"url_params":{},"custom_params":{},"custom_form_data":{}}],"result_format":"json","result_type":"full"}',
+    "query_context": '{"datasource":{"id":12,"type":"table"},"force":false,"queries":[{"time_range":" : ","filters":[],"extras":{"time_grain_sqla":null,"having":"","where":""},"applied_time_extras":{},"columns":[],"metrics":[],"annotation_layers":[],"row_limit":5000,"timeseries_limit":0,"order_desc":true,"url_params":{},"custom_params":{},"custom_form_data":{}}],"result_format":"json","result_type":"full"}',
     "cache_timeout": None,
     "uuid": "0c23747a-6528-4629-97bf-e4b78d3b9df1",
     "version": "1.0.0",
diff --git a/tests/integration_tests/migrations/ad07e4fdbaba_rm_time_range_endpoints_from_qc_3__test.py b/tests/integration_tests/migrations/ad07e4fdbaba_rm_time_range_endpoints_from_qc_3__test.py
index 716be9b1fa..aa06796701 100644
--- a/tests/integration_tests/migrations/ad07e4fdbaba_rm_time_range_endpoints_from_qc_3__test.py
+++ b/tests/integration_tests/migrations/ad07e4fdbaba_rm_time_range_endpoints_from_qc_3__test.py
@@ -35,7 +35,6 @@ sample_query_context = {
                 "time_grain_sqla": "P1D",
                 "time_range_endpoints": ["inclusive", "exclusive"],
                 "having": "",
-                "having_druid": [],
                 "where": "",
             },
             "applied_time_extras": {},
@@ -93,7 +92,6 @@ sample_query_context = {
                 "time_grain_sqla": "P1D",
                 "time_range_endpoints": ["inclusive", "exclusive"],
                 "having": "",
-                "having_druid": [],
                 "where": "",
             },
             "applied_time_extras": {},
diff --git a/tests/integration_tests/utils_tests.py b/tests/integration_tests/utils_tests.py
index b4d750c8d0..2986188ff9 100644
--- a/tests/integration_tests/utils_tests.py
+++ b/tests/integration_tests/utils_tests.py
@@ -242,7 +242,6 @@ class TestUtils(SupersetTestCase):
                 {"col": "__time_col", "op": "in", "val": "birth_year"},
                 {"col": "__time_grain", "op": "in", "val": "years"},
                 {"col": "A", "op": "like", "val": "hello"},
-                {"col": "__granularity", "op": "in", "val": "90 seconds"},
             ]
         }
         expected = {
@@ -260,12 +259,10 @@ class TestUtils(SupersetTestCase):
             "time_range": "1 year ago :",
             "granularity_sqla": "birth_year",
             "time_grain_sqla": "years",
-            "granularity": "90 seconds",
             "applied_time_extras": {
                 "__time_range": "1 year ago :",
                 "__time_col": "birth_year",
                 "__time_grain": "years",
-                "__granularity": "90 seconds",
             },
         }
         merge_extra_filters(form_data)
@@ -634,47 +631,30 @@ class TestUtils(SupersetTestCase):
         convert_legacy_filters_into_adhoc(form_data)
         self.assertEqual(form_data, expected)
 
-    def test_convert_legacy_filters_into_adhoc_having(self):
-        form_data = {"having": "COUNT(1) = 1"}
+    def test_convert_legacy_filters_into_adhoc_present_and_empty(self):
+        form_data = {"adhoc_filters": [], "where": "a = 1"}
         expected = {
             "adhoc_filters": [
                 {
-                    "clause": "HAVING",
+                    "clause": "WHERE",
                     "expressionType": "SQL",
-                    "filterOptionName": "683f1c26466ab912f75a00842e0f2f7b",
-                    "sqlExpression": "COUNT(1) = 1",
+                    "filterOptionName": "46fb6d7891e23596e42ae38da94a57e0",
+                    "sqlExpression": "a = 1",
                 }
             ]
         }
         convert_legacy_filters_into_adhoc(form_data)
         self.assertEqual(form_data, expected)
 
-    def test_convert_legacy_filters_into_adhoc_having_filters(self):
-        form_data = {"having_filters": [{"col": "COUNT(1)", "op": "==", "val": 1}]}
+    def test_convert_legacy_filters_into_adhoc_having(self):
+        form_data = {"having": "COUNT(1) = 1"}
         expected = {
             "adhoc_filters": [
                 {
                     "clause": "HAVING",
-                    "comparator": 1,
-                    "expressionType": "SIMPLE",
-                    "filterOptionName": "967d0fb409f6d9c7a6c03a46cf933c9c",
-                    "operator": "==",
-                    "subject": "COUNT(1)",
-                }
-            ]
-        }
-        convert_legacy_filters_into_adhoc(form_data)
-        self.assertEqual(form_data, expected)
-
-    def test_convert_legacy_filters_into_adhoc_present_and_empty(self):
-        form_data = {"adhoc_filters": [], "where": "a = 1"}
-        expected = {
-            "adhoc_filters": [
-                {
-                    "clause": "WHERE",
                     "expressionType": "SQL",
-                    "filterOptionName": "46fb6d7891e23596e42ae38da94a57e0",
-                    "sqlExpression": "a = 1",
+                    "filterOptionName": "683f1c26466ab912f75a00842e0f2f7b",
+                    "sqlExpression": "COUNT(1) = 1",
                 }
             ]
         }
@@ -688,7 +668,6 @@ class TestUtils(SupersetTestCase):
             ],
             "filters": [{"col": "a", "op": "in", "val": "someval"}],
             "having": "COUNT(1) = 1",
-            "having_filters": [{"col": "COUNT(1)", "op": "==", "val": 1}],
         }
         expected = {
             "adhoc_filters": [