You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@druid.apache.org by cw...@apache.org on 2020/10/31 02:03:04 UTC

[druid] branch master updated: Web console: improve make type preservation in ingestion configs in the data loader (#10533)

This is an automated email from the ASF dual-hosted git repository.

cwylie pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/druid.git


The following commit(s) were added to refs/heads/master by this push:
     new ee61a16  Web console: improve make type preservation in ingestion configs in the data loader (#10533)
ee61a16 is described below

commit ee61a165e3142149595362a01211227f2ce3b07a
Author: Vadim Ogievetsky <va...@ogievetsky.com>
AuthorDate: Fri Oct 30 19:02:44 2020 -0700

    Web console: improve make type preservation in ingestion configs in the data loader (#10533)
    
    * improve validation
    
    * move to druid modals
    
    * adjust specs
    
    * oneOf
    
    * move transform
    
    * segment grans
    
    * tidy up webpack
    
    * add Partitioning
    
    * break out druid models
    
    * tidy up
    
    * rename to Expression
    
    * move druid time
    
    * cleanup
    
    * update format
    
    * better detail
    
    * fix e2e test
    
    * remove forceGuaranteedRollup from e2e tests
    
    * clean up imports
    
    * shardSpec -> shard_spec
    
    * fix css
    
    * adjust snapshot
    
    * add granularity to segments view
    
    * rename to Time span
    
    * use OFFSET in segments view query
    
    * update doc links
    
    * simplify require
    
    * more oneOf
---
 .../e2e-tests/component/datasources/overview.ts    |    7 +-
 .../component/load-data/config/partition.ts        |    8 +-
 .../e2e-tests/component/load-data/data-loader.ts   |   14 +-
 web-console/e2e-tests/component/query/overview.ts  |    3 +-
 web-console/e2e-tests/reindexing.spec.ts           |    1 -
 web-console/e2e-tests/tutorial-batch.spec.ts       |    1 -
 web-console/lib/keywords.js                        |    1 +
 .../src/components/auto-form/auto-form.scss        |    4 -
 web-console/src/components/auto-form/auto-form.tsx |   46 +-
 .../__snapshots__/header-bar.spec.tsx.snap         |    2 +-
 web-console/src/components/index.ts                |    1 +
 .../src/components/json-input/json-input.tsx       |   15 +-
 .../suggestible-input/suggestible-input.tsx        |    1 +
 .../supervisor-statistics-table.tsx                |    2 +-
 web-console/src/console-application.scss           |    4 -
 web-console/src/console-application.tsx            |   10 -
 .../__snapshots__/compaction-dialog.spec.tsx.snap  |   36 +-
 .../compaction-dialog/compaction-dialog.scss       |    4 -
 .../compaction-dialog/compaction-dialog.tsx        |  258 +----
 ...coordinator-dynamic-config-dialog.spec.tsx.snap |    2 +-
 .../coordinator-dynamic-config-dialog.tsx          |    2 +-
 .../src/dialogs/doctor-dialog/doctor-checks.tsx    |    3 +-
 .../__snapshots__/lookup-edit-dialog.spec.tsx.snap |  122 ++-
 .../lookup-edit-dialog/lookup-edit-dialog.scss     |    4 -
 .../lookup-edit-dialog/lookup-edit-dialog.spec.tsx |  438 +--------
 .../lookup-edit-dialog/lookup-edit-dialog.tsx      |  502 +---------
 .../overload-dynamic-config-dialog.spec.tsx.snap   |    2 +-
 .../overlord-dynamic-config-dialog.tsx             |    2 +-
 .../__snapshots__/retention-dialog.spec.tsx.snap   |    2 +-
 .../src/dialogs/spec-dialog/spec-dialog.scss       |    4 -
 .../supervisor-table-action-dialog.tsx             |    2 +-
 .../task-table-action-dialog.tsx                   |    2 +-
 .../__snapshots__/ingestion-spec.spec.ts.snap      |    0
 .../compaction-config.tsx}                         |  119 +--
 .../compaction-status.spec.ts}                     |    6 +-
 .../compaction-status.ts}                          |    4 +-
 .../dimension-spec.spec.ts}                        |   29 +-
 web-console/src/druid-models/dimension-spec.ts     |   84 ++
 web-console/src/druid-models/filter.tsx            |  121 +++
 .../flatten-spec.spec.ts}                          |    2 +-
 .../flatten-spec.tsx}                              |   45 +-
 .../index.ts}                                      |   33 +-
 .../{utils => druid-models}/ingestion-spec.spec.ts |  106 +-
 .../src/{utils => druid-models}/ingestion-spec.tsx | 1021 +++-----------------
 web-console/src/druid-models/input-format.tsx      |  131 +++
 web-console/src/druid-models/input-source.tsx      |   89 ++
 web-console/src/druid-models/lookup-spec.spec.ts   |  453 +++++++++
 web-console/src/druid-models/lookup-spec.tsx       |  456 +++++++++
 .../metric-spec.spec.ts}                           |   32 +-
 web-console/src/druid-models/metric-spec.tsx       |  347 +++++++
 .../time.spec.ts}                                  |    2 +-
 .../{utils/druid-time.ts => druid-models/time.ts}  |    2 +-
 web-console/src/druid-models/timestamp-spec.tsx    |  157 +++
 web-console/src/druid-models/transform-spec.tsx    |  104 ++
 web-console/src/entry.scss                         |    8 +-
 web-console/src/links.ts                           |    2 +-
 web-console/src/utils/druid-query.spec.ts          |   16 +-
 web-console/src/utils/druid-type.ts                |  115 ---
 web-console/src/utils/general.spec.ts              |   28 +-
 web-console/src/utils/general.tsx                  |   18 +-
 web-console/src/utils/index.tsx                    |    3 +-
 web-console/src/utils/object-change.ts             |   11 +
 web-console/src/utils/query-manager.tsx            |    3 +
 web-console/src/utils/sampler.ts                   |   38 +-
 web-console/src/utils/utils.spec.ts                |  170 +---
 .../__snapshots__/datasource-view.spec.tsx.snap    |   14 +
 .../src/views/datasource-view/datasource-view.tsx  |   74 +-
 .../home-view/segments-card/segments-card.tsx      |    4 +-
 .../views/ingestion-view/ingestion-view.spec.tsx   |    1 -
 .../src/views/ingestion-view/ingestion-view.tsx    |   22 +-
 .../load-data-view/filter-table/filter-table.tsx   |    2 +-
 .../src/views/load-data-view/load-data-view.scss   |   13 +-
 .../src/views/load-data-view/load-data-view.tsx    |  279 +++---
 .../parse-data-table/parse-data-table.tsx          |    2 +-
 .../parse-time-table/parse-time-table.spec.tsx     |   11 +-
 .../parse-time-table/parse-time-table.tsx          |   49 +-
 .../load-data-view/schema-table/schema-table.tsx   |    6 +-
 .../transform-table/transform-table.tsx            |    2 +-
 .../src/views/lookups-view/lookups-view.tsx        |    6 +-
 .../views/query-view/column-tree/column-tree.tsx   |    6 +-
 .../views/query-view/query-output/query-output.tsx |    3 +-
 .../__snapshots__/segments-view.spec.tsx.snap      |   18 +
 .../src/views/segments-view/segments-view.tsx      |  190 ++--
 .../src/views/services-view/services-view.spec.tsx |    7 +-
 .../src/views/services-view/services-view.tsx      |   25 +-
 web-console/webpack.config.js                      |    2 +-
 86 files changed, 3042 insertions(+), 2954 deletions(-)

diff --git a/web-console/e2e-tests/component/datasources/overview.ts b/web-console/e2e-tests/component/datasources/overview.ts
index 5fb4bf4..f3f0c55 100644
--- a/web-console/e2e-tests/component/datasources/overview.ts
+++ b/web-console/e2e-tests/component/datasources/overview.ts
@@ -18,9 +18,7 @@
 
 import * as playwright from 'playwright-chromium';
 
-import { clickButton } from '../../util/playwright';
-import { getLabeledInput } from '../../util/playwright';
-import { setLabeledInput } from '../../util/playwright';
+import { clickButton, getLabeledInput, setLabeledInput } from '../../util/playwright';
 import { extractTable } from '../../util/table';
 import { readPartitionSpec } from '../load-data/config/partition';
 
@@ -36,10 +34,13 @@ enum DatasourceColumn {
   SEGMENT_LOAD_DROP,
   TOTAL_DATA_SIZE,
   SEGMENT_SIZE,
+  SEGMENT_GRANULARITY,
   TOTAL_ROWS,
   AVG_ROW_SIZE,
   REPLICATED_SIZE,
   COMPACTION,
+  PERCENT_COMPACTED,
+  LEFT_TO_BE_COMPACTED,
   RETENTION,
   ACTIONS,
 }
diff --git a/web-console/e2e-tests/component/load-data/config/partition.ts b/web-console/e2e-tests/component/load-data/config/partition.ts
index 04e9d0e..f811a53 100644
--- a/web-console/e2e-tests/component/load-data/config/partition.ts
+++ b/web-console/e2e-tests/component/load-data/config/partition.ts
@@ -18,9 +18,7 @@
 
 import * as playwright from 'playwright-chromium';
 
-import { selectSuggestibleInput } from '../../../util/playwright';
-import { getLabeledInput } from '../../../util/playwright';
-import { setLabeledInput } from '../../../util/playwright';
+import { getLabeledInput, selectSuggestibleInput, setLabeledInput } from '../../../util/playwright';
 
 /* tslint:disable max-classes-per-file */
 
@@ -159,18 +157,14 @@ export interface SingleDimPartitionsSpec extends SingleDimPartitionsSpecProps {}
  * Data loader partition step configuration.
  */
 export class PartitionConfig {
-  readonly forceGuaranteedRollupText: string;
-
   constructor(props: PartitionConfigProps) {
     Object.assign(this, props);
-    this.forceGuaranteedRollupText = this.forceGuaranteedRollup ? 'True' : 'False';
   }
 }
 
 interface PartitionConfigProps {
   readonly segmentGranularity: SegmentGranularity;
   readonly timeIntervals: string | null;
-  readonly forceGuaranteedRollup: boolean | null;
   readonly partitionsSpec: PartitionsSpec | null;
 }
 
diff --git a/web-console/e2e-tests/component/load-data/data-loader.ts b/web-console/e2e-tests/component/load-data/data-loader.ts
index a0b64ff..df16e71 100644
--- a/web-console/e2e-tests/component/load-data/data-loader.ts
+++ b/web-console/e2e-tests/component/load-data/data-loader.ts
@@ -18,10 +18,7 @@
 
 import * as playwright from 'playwright-chromium';
 
-import { clickButton } from '../../util/playwright';
-import { clickLabeledButton } from '../../util/playwright';
-import { setLabeledInput } from '../../util/playwright';
-import { setLabeledTextarea } from '../../util/playwright';
+import { clickButton, setLabeledInput, setLabeledTextarea } from '../../util/playwright';
 
 import { ConfigureSchemaConfig } from './config/configure-schema';
 import { PartitionConfig } from './config/partition';
@@ -128,13 +125,8 @@ export class DataLoader {
 
   private async applyPartitionConfig(partitionConfig: PartitionConfig) {
     await setLabeledInput(this.page, 'Segment granularity', partitionConfig.segmentGranularity);
-    if (partitionConfig.forceGuaranteedRollup) {
-      await clickLabeledButton(
-        this.page,
-        'Force guaranteed rollup',
-        partitionConfig.forceGuaranteedRollupText,
-      );
-      await setLabeledTextarea(this.page, 'Time intervals', partitionConfig.timeIntervals!);
+    if (partitionConfig.timeIntervals) {
+      await setLabeledTextarea(this.page, 'Time intervals', partitionConfig.timeIntervals);
     }
     if (partitionConfig.partitionsSpec != null) {
       await partitionConfig.partitionsSpec.apply(this.page);
diff --git a/web-console/e2e-tests/component/query/overview.ts b/web-console/e2e-tests/component/query/overview.ts
index 9e3c646..d3b8986 100644
--- a/web-console/e2e-tests/component/query/overview.ts
+++ b/web-console/e2e-tests/component/query/overview.ts
@@ -18,8 +18,7 @@
 
 import * as playwright from 'playwright-chromium';
 
-import { clickButton } from '../../util/playwright';
-import { setInput } from '../../util/playwright';
+import { clickButton, setInput } from '../../util/playwright';
 import { extractTable } from '../../util/table';
 
 /**
diff --git a/web-console/e2e-tests/reindexing.spec.ts b/web-console/e2e-tests/reindexing.spec.ts
index d3a02c1..ae45b73 100644
--- a/web-console/e2e-tests/reindexing.spec.ts
+++ b/web-console/e2e-tests/reindexing.spec.ts
@@ -68,7 +68,6 @@ describe('Reindexing from Druid', () => {
     const partitionConfig = new PartitionConfig({
       segmentGranularity: SegmentGranularity.DAY,
       timeIntervals: interval,
-      forceGuaranteedRollup: true,
       partitionsSpec: new SingleDimPartitionsSpec({
         partitionDimension: 'channel',
         targetRowsPerSegment: 10_000,
diff --git a/web-console/e2e-tests/tutorial-batch.spec.ts b/web-console/e2e-tests/tutorial-batch.spec.ts
index 1fad5a9..f4fa450 100644
--- a/web-console/e2e-tests/tutorial-batch.spec.ts
+++ b/web-console/e2e-tests/tutorial-batch.spec.ts
@@ -65,7 +65,6 @@ describe('Tutorial: Loading a file', () => {
     const partitionConfig = new PartitionConfig({
       segmentGranularity: SegmentGranularity.DAY,
       timeIntervals: null,
-      forceGuaranteedRollup: null,
       partitionsSpec: null,
     });
     const publishConfig = new PublishConfig({ datasourceName: datasourceName });
diff --git a/web-console/lib/keywords.js b/web-console/lib/keywords.js
index dfc2bd6..e476dfb 100644
--- a/web-console/lib/keywords.js
+++ b/web-console/lib/keywords.js
@@ -36,6 +36,7 @@ exports.SQL_KEYWORDS = [
   'ASC',
   'DESC',
   'LIMIT',
+  'OFFSET',
   'UNION ALL',
   'JOIN',
   'LEFT',
diff --git a/web-console/src/components/auto-form/auto-form.scss b/web-console/src/components/auto-form/auto-form.scss
index 898f9b1..b880b55 100644
--- a/web-console/src/components/auto-form/auto-form.scss
+++ b/web-console/src/components/auto-form/auto-form.scss
@@ -17,10 +17,6 @@
  */
 
 .auto-form {
-  .ace-solarized-dark {
-    background-color: #212e37;
-  }
-
   // Popover in info label
   label.bp3-label {
     position: relative;
diff --git a/web-console/src/components/auto-form/auto-form.tsx b/web-console/src/components/auto-form/auto-form.tsx
index ce26cad..6ae9e4b 100644
--- a/web-console/src/components/auto-form/auto-form.tsx
+++ b/web-console/src/components/auto-form/auto-form.tsx
@@ -19,7 +19,7 @@
 import { Button, ButtonGroup, FormGroup, Intent, NumericInput } from '@blueprintjs/core';
 import React from 'react';
 
-import { deepDelete, deepGet, deepSet } from '../../utils/object-change';
+import { deepDelete, deepGet, deepSet } from '../../utils';
 import { ArrayInput } from '../array-input/array-input';
 import { FormGroupWithInfo } from '../form-group-with-info/form-group-with-info';
 import { IntervalInput } from '../interval-input/interval-input';
@@ -55,6 +55,7 @@ export interface Field<M> {
   defined?: Functor<M, boolean>;
   required?: Functor<M, boolean>;
   adjustment?: (model: M) => M;
+  issueWithValue?: (value: any) => string | undefined;
 }
 
 export interface AutoFormProps<M> {
@@ -93,6 +94,48 @@ export class AutoForm<T extends Record<string, any>> extends React.PureComponent
     }
   }
 
+  static issueWithModel<M>(model: M | undefined, fields: readonly Field<M>[]): string | undefined {
+    if (typeof model === 'undefined') {
+      return `model is undefined`;
+    }
+
+    // Precompute which fields are defined because fields could be defined twice and only one should do the checking
+    const definedFields: Record<string, Field<M>> = {};
+    for (const field of fields) {
+      const fieldDefined = AutoForm.evaluateFunctor(field.defined, model, true);
+      if (fieldDefined) {
+        definedFields[field.name] = field;
+      }
+    }
+
+    for (const field of fields) {
+      const fieldValue = deepGet(model, field.name);
+      const fieldValueDefined = typeof fieldValue !== 'undefined';
+      const fieldThatIsDefined = definedFields[field.name];
+      if (fieldThatIsDefined) {
+        if (fieldThatIsDefined === field) {
+          const fieldRequired = AutoForm.evaluateFunctor(field.required, model, false);
+          if (fieldRequired) {
+            if (!fieldValueDefined) {
+              return `field ${field.name} is required`;
+            }
+          }
+
+          if (fieldValueDefined && field.issueWithValue) {
+            const valueIssue = field.issueWithValue(fieldValue);
+            if (valueIssue) return `field ${field.name} has issue ${valueIssue}`;
+          }
+        }
+      } else {
+        // The field is undefined
+        if (fieldValueDefined) {
+          return `field ${field.name} is defined but it should not be`;
+        }
+      }
+    }
+    return;
+  }
+
   constructor(props: AutoFormProps<T>) {
     super(props);
     this.state = {};
@@ -274,6 +317,7 @@ export class AutoForm<T extends Record<string, any>> extends React.PureComponent
         onChange={(v: any) => this.fieldChange(field, v)}
         placeholder={AutoForm.evaluateFunctor(field.placeholder, model, '')}
         height={field.height}
+        issueWithValue={field.issueWithValue}
       />
     );
   }
diff --git a/web-console/src/components/header-bar/__snapshots__/header-bar.spec.tsx.snap b/web-console/src/components/header-bar/__snapshots__/header-bar.spec.tsx.snap
index 788cabc..0aadb18 100644
--- a/web-console/src/components/header-bar/__snapshots__/header-bar.spec.tsx.snap
+++ b/web-console/src/components/header-bar/__snapshots__/header-bar.spec.tsx.snap
@@ -159,7 +159,7 @@ exports[`header bar matches snapshot 1`] = `
           />
           <Blueprint3.MenuItem
             disabled={false}
-            href="https://druid.apache.org/docs/0.19.0"
+            href="https://druid.apache.org/docs/0.20.0"
             icon="th"
             multiline={false}
             popoverProps={Object {}}
diff --git a/web-console/src/components/index.ts b/web-console/src/components/index.ts
index 7ebc13e..346a00e 100644
--- a/web-console/src/components/index.ts
+++ b/web-console/src/components/index.ts
@@ -41,3 +41,4 @@ export * from './table-cell/table-cell';
 export * from './table-column-selector/table-column-selector';
 export * from './timed-button/timed-button';
 export * from './view-control-bar/view-control-bar';
+export * from './form-json-selector/form-json-selector';
diff --git a/web-console/src/components/json-input/json-input.tsx b/web-console/src/components/json-input/json-input.tsx
index eba0620..ff08db2 100644
--- a/web-console/src/components/json-input/json-input.tsx
+++ b/web-console/src/components/json-input/json-input.tsx
@@ -44,7 +44,9 @@ export function extractRowColumnFromHjsonError(
 
 function stringifyJson(item: any): string {
   if (item != null) {
-    return JSON.stringify(item, null, 2);
+    const str = JSON.stringify(item, null, 2);
+    if (str === '{}') return '{\n\n}'; // Very special case for an empty object to make it more beautiful
+    return str;
   } else {
     return '';
   }
@@ -68,10 +70,11 @@ interface JsonInputProps {
   focus?: boolean;
   width?: string;
   height?: string;
+  issueWithValue?: (value: any) => string | undefined;
 }
 
 export const JsonInput = React.memo(function JsonInput(props: JsonInputProps) {
-  const { onChange, placeholder, focus, width, height, value } = props;
+  const { onChange, placeholder, focus, width, height, value, issueWithValue } = props;
   const [internalValue, setInternalValue] = useState<InternalValue>(() => ({
     value,
     stringified: stringifyJson(value),
@@ -102,6 +105,14 @@ export const JsonInput = React.memo(function JsonInput(props: JsonInputProps) {
             error = e;
           }
 
+          if (!error && issueWithValue) {
+            const issue = issueWithValue(value);
+            if (issue) {
+              value = undefined;
+              error = new Error(issue);
+            }
+          }
+
           setInternalValue({
             value,
             error,
diff --git a/web-console/src/components/suggestible-input/suggestible-input.tsx b/web-console/src/components/suggestible-input/suggestible-input.tsx
index c4e37fd..0b05917 100644
--- a/web-console/src/components/suggestible-input/suggestible-input.tsx
+++ b/web-console/src/components/suggestible-input/suggestible-input.tsx
@@ -83,6 +83,7 @@ export const SuggestibleInput = React.memo(function SuggestibleInput(props: Sugg
       rightElement={
         suggestions && (
           <Popover
+            boundary={'window'}
             content={
               <Menu>
                 {suggestions.map(suggestion => {
diff --git a/web-console/src/components/supervisor-statistics-table/supervisor-statistics-table.tsx b/web-console/src/components/supervisor-statistics-table/supervisor-statistics-table.tsx
index a370592..a8c5383 100644
--- a/web-console/src/components/supervisor-statistics-table/supervisor-statistics-table.tsx
+++ b/web-console/src/components/supervisor-statistics-table/supervisor-statistics-table.tsx
@@ -23,7 +23,7 @@ import ReactTable, { CellInfo, Column } from 'react-table';
 
 import { useQueryManager } from '../../hooks';
 import { UrlBaser } from '../../singletons/url-baser';
-import { deepGet } from '../../utils/object-change';
+import { deepGet } from '../../utils';
 import { Loader } from '../loader/loader';
 
 import './supervisor-statistics-table.scss';
diff --git a/web-console/src/console-application.scss b/web-console/src/console-application.scss
index d3015b7..7f6107e 100644
--- a/web-console/src/console-application.scss
+++ b/web-console/src/console-application.scss
@@ -46,8 +46,4 @@
     height: 22px;
     border-top: 2px solid #6d8ea9;
   }
-
-  .ace-solarized-dark {
-    background-color: rgba($dark-gray1, 0.5);
-  }
 }
diff --git a/web-console/src/console-application.tsx b/web-console/src/console-application.tsx
index c8b9001..c09100a 100644
--- a/web-console/src/console-application.tsx
+++ b/web-console/src/console-application.tsx
@@ -77,7 +77,6 @@ export class ConsoleApplication extends React.PureComponent<
   private datasource?: string;
   private onlyUnavailable?: boolean;
   private initQuery?: string;
-  private middleManager?: string;
 
   constructor(props: ConsoleApplicationProps, context: any) {
     super(props, context);
@@ -118,7 +117,6 @@ export class ConsoleApplication extends React.PureComponent<
       this.datasource = undefined;
       this.onlyUnavailable = undefined;
       this.initQuery = undefined;
-      this.middleManager = undefined;
     }, 50);
   }
 
@@ -156,12 +154,6 @@ export class ConsoleApplication extends React.PureComponent<
     this.resetInitialsWithDelay();
   };
 
-  private goToMiddleManager = (middleManager: string) => {
-    this.middleManager = middleManager;
-    window.location.hash = 'services';
-    this.resetInitialsWithDelay();
-  };
-
   private goToQuery = (initQuery: string) => {
     this.initQuery = initQuery;
     window.location.hash = 'query';
@@ -254,7 +246,6 @@ export class ConsoleApplication extends React.PureComponent<
         openDialog={this.openDialog}
         goToDatasource={this.goToDatasources}
         goToQuery={this.goToQuery}
-        goToMiddleManager={this.goToMiddleManager}
         goToLoadData={this.goToLoadData}
         capabilities={capabilities}
       />,
@@ -266,7 +257,6 @@ export class ConsoleApplication extends React.PureComponent<
     return this.wrapInViewContainer(
       'services',
       <ServicesView
-        middleManager={this.middleManager}
         goToQuery={this.goToQuery}
         goToTask={this.goToIngestionWithTaskGroupId}
         capabilities={capabilities}
diff --git a/web-console/src/dialogs/compaction-dialog/__snapshots__/compaction-dialog.spec.tsx.snap b/web-console/src/dialogs/compaction-dialog/__snapshots__/compaction-dialog.spec.tsx.snap
index 2cbd2be..e03e12a 100644
--- a/web-console/src/dialogs/compaction-dialog/__snapshots__/compaction-dialog.spec.tsx.snap
+++ b/web-console/src/dialogs/compaction-dialog/__snapshots__/compaction-dialog.spec.tsx.snap
@@ -90,6 +90,7 @@ exports[`CompactionDialog matches snapshot with compactionConfig (dynamic partit
             "label": "Target rows per segment",
             "name": "tuningConfig.partitionsSpec.targetRowsPerSegment",
             "type": "number",
+            "zeroMeansUndefined": true,
           },
           Object {
             "defined": [Function],
@@ -104,6 +105,7 @@ exports[`CompactionDialog matches snapshot with compactionConfig (dynamic partit
             "label": "Num shards",
             "name": "tuningConfig.partitionsSpec.numShards",
             "type": "number",
+            "zeroMeansUndefined": true,
           },
           Object {
             "defined": [Function],
@@ -112,6 +114,7 @@ exports[`CompactionDialog matches snapshot with compactionConfig (dynamic partit
             </p>,
             "label": "Partition dimensions",
             "name": "tuningConfig.partitionsSpec.partitionDimensions",
+            "placeholder": "(all dimensions)",
             "type": "string-array",
           },
           Object {
@@ -175,14 +178,14 @@ exports[`CompactionDialog matches snapshot with compactionConfig (dynamic partit
             "type": "number",
           },
           Object {
-            "defaultValue": 1,
+            "defaultValue": 10,
             "defined": [Function],
             "info": <React.Fragment>
               Maximum number of merge tasks which can be run at the same time.
             </React.Fragment>,
-            "label": "Max num merge tasks",
+            "label": "Total num merge tasks",
             "min": 1,
-            "name": "tuningConfig.maxNumMergeTasks",
+            "name": "tuningConfig.totalNumMergeTasks",
             "type": "number",
           },
           Object {
@@ -327,6 +330,7 @@ exports[`CompactionDialog matches snapshot with compactionConfig (hashed partiti
             "label": "Target rows per segment",
             "name": "tuningConfig.partitionsSpec.targetRowsPerSegment",
             "type": "number",
+            "zeroMeansUndefined": true,
           },
           Object {
             "defined": [Function],
@@ -341,6 +345,7 @@ exports[`CompactionDialog matches snapshot with compactionConfig (hashed partiti
             "label": "Num shards",
             "name": "tuningConfig.partitionsSpec.numShards",
             "type": "number",
+            "zeroMeansUndefined": true,
           },
           Object {
             "defined": [Function],
@@ -349,6 +354,7 @@ exports[`CompactionDialog matches snapshot with compactionConfig (hashed partiti
             </p>,
             "label": "Partition dimensions",
             "name": "tuningConfig.partitionsSpec.partitionDimensions",
+            "placeholder": "(all dimensions)",
             "type": "string-array",
           },
           Object {
@@ -412,14 +418,14 @@ exports[`CompactionDialog matches snapshot with compactionConfig (hashed partiti
             "type": "number",
           },
           Object {
-            "defaultValue": 1,
+            "defaultValue": 10,
             "defined": [Function],
             "info": <React.Fragment>
               Maximum number of merge tasks which can be run at the same time.
             </React.Fragment>,
-            "label": "Max num merge tasks",
+            "label": "Total num merge tasks",
             "min": 1,
-            "name": "tuningConfig.maxNumMergeTasks",
+            "name": "tuningConfig.totalNumMergeTasks",
             "type": "number",
           },
           Object {
@@ -564,6 +570,7 @@ exports[`CompactionDialog matches snapshot with compactionConfig (single_dim par
             "label": "Target rows per segment",
             "name": "tuningConfig.partitionsSpec.targetRowsPerSegment",
             "type": "number",
+            "zeroMeansUndefined": true,
           },
           Object {
             "defined": [Function],
@@ -578,6 +585,7 @@ exports[`CompactionDialog matches snapshot with compactionConfig (single_dim par
             "label": "Num shards",
             "name": "tuningConfig.partitionsSpec.numShards",
             "type": "number",
+            "zeroMeansUndefined": true,
           },
           Object {
             "defined": [Function],
@@ -586,6 +594,7 @@ exports[`CompactionDialog matches snapshot with compactionConfig (single_dim par
             </p>,
             "label": "Partition dimensions",
             "name": "tuningConfig.partitionsSpec.partitionDimensions",
+            "placeholder": "(all dimensions)",
             "type": "string-array",
           },
           Object {
@@ -649,14 +658,14 @@ exports[`CompactionDialog matches snapshot with compactionConfig (single_dim par
             "type": "number",
           },
           Object {
-            "defaultValue": 1,
+            "defaultValue": 10,
             "defined": [Function],
             "info": <React.Fragment>
               Maximum number of merge tasks which can be run at the same time.
             </React.Fragment>,
-            "label": "Max num merge tasks",
+            "label": "Total num merge tasks",
             "min": 1,
-            "name": "tuningConfig.maxNumMergeTasks",
+            "name": "tuningConfig.totalNumMergeTasks",
             "type": "number",
           },
           Object {
@@ -801,6 +810,7 @@ exports[`CompactionDialog matches snapshot without compactionConfig 1`] = `
             "label": "Target rows per segment",
             "name": "tuningConfig.partitionsSpec.targetRowsPerSegment",
             "type": "number",
+            "zeroMeansUndefined": true,
           },
           Object {
             "defined": [Function],
@@ -815,6 +825,7 @@ exports[`CompactionDialog matches snapshot without compactionConfig 1`] = `
             "label": "Num shards",
             "name": "tuningConfig.partitionsSpec.numShards",
             "type": "number",
+            "zeroMeansUndefined": true,
           },
           Object {
             "defined": [Function],
@@ -823,6 +834,7 @@ exports[`CompactionDialog matches snapshot without compactionConfig 1`] = `
             </p>,
             "label": "Partition dimensions",
             "name": "tuningConfig.partitionsSpec.partitionDimensions",
+            "placeholder": "(all dimensions)",
             "type": "string-array",
           },
           Object {
@@ -886,14 +898,14 @@ exports[`CompactionDialog matches snapshot without compactionConfig 1`] = `
             "type": "number",
           },
           Object {
-            "defaultValue": 1,
+            "defaultValue": 10,
             "defined": [Function],
             "info": <React.Fragment>
               Maximum number of merge tasks which can be run at the same time.
             </React.Fragment>,
-            "label": "Max num merge tasks",
+            "label": "Total num merge tasks",
             "min": 1,
-            "name": "tuningConfig.maxNumMergeTasks",
+            "name": "tuningConfig.totalNumMergeTasks",
             "type": "number",
           },
           Object {
diff --git a/web-console/src/dialogs/compaction-dialog/compaction-dialog.scss b/web-console/src/dialogs/compaction-dialog/compaction-dialog.scss
index f5cd57f..00cea76 100644
--- a/web-console/src/dialogs/compaction-dialog/compaction-dialog.scss
+++ b/web-console/src/dialogs/compaction-dialog/compaction-dialog.scss
@@ -31,8 +31,4 @@
     flex: 1;
     overflow: auto;
   }
-
-  .ace-solarized-dark {
-    background-color: #232c35;
-  }
 }
diff --git a/web-console/src/dialogs/compaction-dialog/compaction-dialog.tsx b/web-console/src/dialogs/compaction-dialog/compaction-dialog.tsx
index d2b8055..8c96e97 100644
--- a/web-console/src/dialogs/compaction-dialog/compaction-dialog.tsx
+++ b/web-console/src/dialogs/compaction-dialog/compaction-dialog.tsx
@@ -16,254 +16,14 @@
  * limitations under the License.
  */
 
-import { Button, Classes, Code, Dialog, Intent } from '@blueprintjs/core';
+import { Button, Classes, Dialog, Intent } from '@blueprintjs/core';
 import React, { useState } from 'react';
 
-import { AutoForm, Field, JsonInput } from '../../components';
-import {
-  FormJsonSelector,
-  FormJsonTabs,
-} from '../../components/form-json-selector/form-json-selector';
-import { deepGet, deepSet } from '../../utils/object-change';
+import { AutoForm, FormJsonSelector, FormJsonTabs, JsonInput } from '../../components';
+import { COMPACTION_CONFIG_FIELDS, CompactionConfig } from '../../druid-models';
 
 import './compaction-dialog.scss';
 
-type CompactionConfig = Record<string, any>;
-
-const COMPACTION_CONFIG_FIELDS: Field<CompactionConfig>[] = [
-  {
-    name: 'skipOffsetFromLatest',
-    type: 'string',
-    defaultValue: 'P1D',
-    suggestions: ['PT0H', 'PT1H', 'P1D', 'P3D'],
-    info: (
-      <p>
-        The offset for searching segments to be compacted. Strongly recommended to set for realtime
-        dataSources.
-      </p>
-    ),
-  },
-  {
-    name: 'tuningConfig.partitionsSpec.type',
-    label: 'Partitioning type',
-    type: 'string',
-    suggestions: ['dynamic', 'hashed', 'single_dim'],
-    info: (
-      <p>
-        For perfect rollup, you should use either <Code>hashed</Code> (partitioning based on the
-        hash of dimensions in each row) or <Code>single_dim</Code> (based on ranges of a single
-        dimension). For best-effort rollup, you should use <Code>dynamic</Code>.
-      </p>
-    ),
-  },
-  // partitionsSpec type: dynamic
-  {
-    name: 'tuningConfig.partitionsSpec.maxRowsPerSegment',
-    label: 'Max rows per segment',
-    type: 'number',
-    defaultValue: 5000000,
-    defined: (t: CompactionConfig) => deepGet(t, 'tuningConfig.partitionsSpec.type') === 'dynamic',
-    info: <>Determines how many rows are in each segment.</>,
-  },
-  {
-    name: 'tuningConfig.partitionsSpec.maxTotalRows',
-    label: 'Max total rows',
-    type: 'number',
-    defaultValue: 20000000,
-    defined: (t: CompactionConfig) => deepGet(t, 'tuningConfig.partitionsSpec.type') === 'dynamic',
-    info: <>Total number of rows in segments waiting for being pushed.</>,
-  },
-  // partitionsSpec type: hashed
-  {
-    name: 'tuningConfig.partitionsSpec.targetRowsPerSegment',
-    label: 'Target rows per segment',
-    type: 'number',
-    defined: (t: CompactionConfig) =>
-      deepGet(t, 'tuningConfig.partitionsSpec.type') === 'hashed' &&
-      !deepGet(t, 'tuningConfig.partitionsSpec.numShards'),
-    info: (
-      <>
-        <p>
-          If the segments generated are a sub-optimal size for the requested partition dimensions,
-          consider setting this field.
-        </p>
-        <p>
-          A target row count for each partition. Each partition will have a row count close to the
-          target assuming evenly distributed keys. Defaults to 5 million if numShards is null.
-        </p>
-      </>
-    ),
-  },
-  {
-    name: 'tuningConfig.partitionsSpec.numShards',
-    label: 'Num shards',
-    type: 'number',
-    defined: (t: CompactionConfig) =>
-      deepGet(t, 'tuningConfig.partitionsSpec.type') === 'hashed' &&
-      !deepGet(t, 'tuningConfig.partitionsSpec.targetRowsPerSegment'),
-    info: (
-      <>
-        <p>
-          If you know the optimal number of shards and want to speed up the time it takes for
-          compaction to run, set this field.
-        </p>
-        <p>
-          Directly specify the number of shards to create. If this is specified and 'intervals' is
-          specified in the granularitySpec, the index task can skip the determine
-          intervals/partitions pass through the data.
-        </p>
-      </>
-    ),
-  },
-  {
-    name: 'tuningConfig.partitionsSpec.partitionDimensions',
-    label: 'Partition dimensions',
-    type: 'string-array',
-    defined: (t: CompactionConfig) => deepGet(t, 'tuningConfig.partitionsSpec.type') === 'hashed',
-    info: <p>The dimensions to partition on. Leave blank to select all dimensions.</p>,
-  },
-  // partitionsSpec type: single_dim
-  {
-    name: 'tuningConfig.partitionsSpec.partitionDimension',
-    label: 'Partition dimension',
-    type: 'string',
-    defined: (t: CompactionConfig) =>
-      deepGet(t, 'tuningConfig.partitionsSpec.type') === 'single_dim',
-    required: true,
-    info: <p>The dimension to partition on.</p>,
-  },
-  {
-    name: 'tuningConfig.partitionsSpec.targetRowsPerSegment',
-    label: 'Target rows per segment',
-    type: 'number',
-    zeroMeansUndefined: true,
-    defined: (t: CompactionConfig) =>
-      deepGet(t, 'tuningConfig.partitionsSpec.type') === 'single_dim' &&
-      !deepGet(t, 'tuningConfig.partitionsSpec.maxRowsPerSegment'),
-    required: (t: CompactionConfig) =>
-      !deepGet(t, 'tuningConfig.partitionsSpec.targetRowsPerSegment') &&
-      !deepGet(t, 'tuningConfig.partitionsSpec.maxRowsPerSegment'),
-    info: (
-      <p>
-        Target number of rows to include in a partition, should be a number that targets segments of
-        500MB~1GB.
-      </p>
-    ),
-  },
-  {
-    name: 'tuningConfig.partitionsSpec.maxRowsPerSegment',
-    label: 'Max rows per segment',
-    type: 'number',
-    zeroMeansUndefined: true,
-    defined: (t: CompactionConfig) =>
-      deepGet(t, 'tuningConfig.partitionsSpec.type') === 'single_dim' &&
-      !deepGet(t, 'tuningConfig.partitionsSpec.targetRowsPerSegment'),
-    required: (t: CompactionConfig) =>
-      !deepGet(t, 'tuningConfig.partitionsSpec.targetRowsPerSegment') &&
-      !deepGet(t, 'tuningConfig.partitionsSpec.maxRowsPerSegment'),
-    info: <p>Maximum number of rows to include in a partition.</p>,
-  },
-  {
-    name: 'tuningConfig.partitionsSpec.assumeGrouped',
-    label: 'Assume grouped',
-    type: 'boolean',
-    defaultValue: false,
-    defined: (t: CompactionConfig) =>
-      deepGet(t, 'tuningConfig.partitionsSpec.type') === 'single_dim',
-    info: (
-      <p>
-        Assume that input data has already been grouped on time and dimensions. Ingestion will run
-        faster, but may choose sub-optimal partitions if this assumption is violated.
-      </p>
-    ),
-  },
-  {
-    name: 'tuningConfig.maxNumConcurrentSubTasks',
-    label: 'Max num concurrent sub tasks',
-    type: 'number',
-    defaultValue: 1,
-    min: 1,
-    info: (
-      <>
-        Maximum number of tasks which can be run at the same time. The supervisor task would spawn
-        worker tasks up to maxNumConcurrentSubTasks regardless of the available task slots. If this
-        value is set to 1, the supervisor task processes data ingestion on its own instead of
-        spawning worker tasks. If this value is set to too large, too many worker tasks can be
-        created which might block other ingestion.
-      </>
-    ),
-  },
-  {
-    name: 'inputSegmentSizeBytes',
-    type: 'number',
-    defaultValue: 419430400,
-    info: (
-      <p>
-        Maximum number of total segment bytes processed per compaction task. Since a time chunk must
-        be processed in its entirety, if the segments for a particular time chunk have a total size
-        in bytes greater than this parameter, compaction will not run for that time chunk. Because
-        each compaction task runs with a single thread, setting this value too far above 1–2GB will
-        result in compaction tasks taking an excessive amount of time.
-      </p>
-    ),
-  },
-  {
-    name: 'tuningConfig.maxNumMergeTasks',
-    label: 'Max num merge tasks',
-    type: 'number',
-    defaultValue: 1,
-    min: 1,
-    defined: (t: CompactionConfig) =>
-      ['hashed', 'single_dim'].includes(deepGet(t, 'tuningConfig.partitionsSpec.type')),
-    info: <>Maximum number of merge tasks which can be run at the same time.</>,
-  },
-  {
-    name: 'tuningConfig.splitHintSpec.maxInputSegmentBytesPerTask',
-    label: 'Max input segment bytes per task',
-    type: 'number',
-    defaultValue: 500000000,
-    min: 1000000,
-    adjustment: (t: CompactionConfig) => deepSet(t, 'tuningConfig.splitHintSpec.type', 'segments'),
-    info: (
-      <>
-        Maximum number of bytes of input segments to process in a single task. If a single segment
-        is larger than this number, it will be processed by itself in a single task (input segments
-        are never split across tasks).
-      </>
-    ),
-  },
-];
-
-function validCompactionConfig(compactionConfig: CompactionConfig): boolean {
-  const partitionsSpecType =
-    deepGet(compactionConfig, 'tuningConfig.partitionsSpec.type') || 'dynamic';
-  switch (partitionsSpecType) {
-    // case 'dynamic': // Nothing to check for dynamic
-    case 'hashed':
-      return !(
-        Boolean(deepGet(compactionConfig, 'tuningConfig.partitionsSpec.targetRowsPerSegment')) &&
-        Boolean(deepGet(compactionConfig, 'tuningConfig.partitionsSpec.numShards'))
-      );
-      break;
-    case 'single_dim':
-      if (!deepGet(compactionConfig, 'tuningConfig.partitionsSpec.partitionDimension')) {
-        return false;
-      }
-      const hasTargetRowsPerSegment = Boolean(
-        deepGet(compactionConfig, 'tuningConfig.partitionsSpec.targetRowsPerSegment'),
-      );
-      const hasMaxRowsPerSegment = Boolean(
-        deepGet(compactionConfig, 'tuningConfig.partitionsSpec.maxRowsPerSegment'),
-      );
-      if (hasTargetRowsPerSegment === hasMaxRowsPerSegment) {
-        return false;
-      }
-      break;
-  }
-
-  return true;
-}
-
 export interface CompactionDialogProps {
   onClose: () => void;
   onSave: (compactionConfig: CompactionConfig) => void;
@@ -283,8 +43,9 @@ export const CompactionDialog = React.memo(function CompactionDialog(props: Comp
     },
   );
 
+  const issueWithCurrentConfig = AutoForm.issueWithModel(currentConfig, COMPACTION_CONFIG_FIELDS);
   function handleSubmit() {
-    if (!validCompactionConfig(currentConfig)) return;
+    if (issueWithCurrentConfig) return;
     onSave(currentConfig);
   }
 
@@ -305,7 +66,12 @@ export const CompactionDialog = React.memo(function CompactionDialog(props: Comp
             onChange={m => setCurrentConfig(m)}
           />
         ) : (
-          <JsonInput value={currentConfig} onChange={setCurrentConfig} height="100%" />
+          <JsonInput
+            value={currentConfig}
+            onChange={setCurrentConfig}
+            issueWithValue={value => AutoForm.issueWithModel(value, COMPACTION_CONFIG_FIELDS)}
+            height="100%"
+          />
         )}
       </div>
       <div className={Classes.DIALOG_FOOTER}>
@@ -316,7 +82,7 @@ export const CompactionDialog = React.memo(function CompactionDialog(props: Comp
             text="Submit"
             intent={Intent.PRIMARY}
             onClick={handleSubmit}
-            disabled={!validCompactionConfig(currentConfig)}
+            disabled={Boolean(issueWithCurrentConfig)}
           />
         </div>
       </div>
diff --git a/web-console/src/dialogs/coordinator-dynamic-config-dialog/__snapshots__/coordinator-dynamic-config-dialog.spec.tsx.snap b/web-console/src/dialogs/coordinator-dynamic-config-dialog/__snapshots__/coordinator-dynamic-config-dialog.spec.tsx.snap
index dc3cdac..4363077 100644
--- a/web-console/src/dialogs/coordinator-dynamic-config-dialog/__snapshots__/coordinator-dynamic-config-dialog.spec.tsx.snap
+++ b/web-console/src/dialogs/coordinator-dynamic-config-dialog/__snapshots__/coordinator-dynamic-config-dialog.spec.tsx.snap
@@ -11,7 +11,7 @@ exports[`coordinator dynamic config matches snapshot 1`] = `
     Edit the coordinator dynamic configuration on the fly. For more information please refer to the
      
     <Memo(ExternalLink)
-      href="https://druid.apache.org/docs/0.19.0/configuration/index.html#dynamic-configuration"
+      href="https://druid.apache.org/docs/0.20.0/configuration/index.html#dynamic-configuration"
     >
       documentation
     </Memo(ExternalLink)>
diff --git a/web-console/src/dialogs/coordinator-dynamic-config-dialog/coordinator-dynamic-config-dialog.tsx b/web-console/src/dialogs/coordinator-dynamic-config-dialog/coordinator-dynamic-config-dialog.tsx
index 8478ec6..f3f71b8 100644
--- a/web-console/src/dialogs/coordinator-dynamic-config-dialog/coordinator-dynamic-config-dialog.tsx
+++ b/web-console/src/dialogs/coordinator-dynamic-config-dialog/coordinator-dynamic-config-dialog.tsx
@@ -52,7 +52,7 @@ export const CoordinatorDynamicConfigDialog = React.memo(function CoordinatorDyn
     processQuery: async () => {
       try {
         const configResp = await axios.get('/druid/coordinator/v1/config');
-        setDynamicConfig(configResp.data);
+        setDynamicConfig(configResp.data || {});
       } catch (e) {
         AppToaster.show({
           icon: IconNames.ERROR,
diff --git a/web-console/src/dialogs/doctor-dialog/doctor-checks.tsx b/web-console/src/dialogs/doctor-dialog/doctor-checks.tsx
index 1dd5468..a5ecb85 100644
--- a/web-console/src/dialogs/doctor-dialog/doctor-checks.tsx
+++ b/web-console/src/dialogs/doctor-dialog/doctor-checks.tsx
@@ -18,8 +18,7 @@
 
 import axios from 'axios';
 
-import { pluralIfNeeded, queryDruidSql } from '../../utils';
-import { deepGet } from '../../utils/object-change';
+import { deepGet, pluralIfNeeded, queryDruidSql } from '../../utils';
 import { postToSampler } from '../../utils/sampler';
 
 export interface CheckControls {
diff --git a/web-console/src/dialogs/lookup-edit-dialog/__snapshots__/lookup-edit-dialog.spec.tsx.snap b/web-console/src/dialogs/lookup-edit-dialog/__snapshots__/lookup-edit-dialog.spec.tsx.snap
index 5349950..7b7dabc 100644
--- a/web-console/src/dialogs/lookup-edit-dialog/__snapshots__/lookup-edit-dialog.spec.tsx.snap
+++ b/web-console/src/dialogs/lookup-edit-dialog/__snapshots__/lookup-edit-dialog.spec.tsx.snap
@@ -16,6 +16,7 @@ exports[`LookupEditDialog matches snapshot 1`] = `
     >
       <Blueprint3.InputGroup
         disabled={false}
+        intent="none"
         onChange={[Function]}
         placeholder="Enter the lookup name"
         value="test"
@@ -68,6 +69,7 @@ exports[`LookupEditDialog matches snapshot 1`] = `
           Object {
             "adjustment": [Function],
             "name": "type",
+            "required": true,
             "suggestions": Array [
               "map",
               "cachedNamespace",
@@ -77,7 +79,9 @@ exports[`LookupEditDialog matches snapshot 1`] = `
           Object {
             "defined": [Function],
             "height": "60vh",
+            "issueWithValue": [Function],
             "name": "map",
+            "required": true,
             "type": "json",
           },
           Object {
@@ -85,6 +89,7 @@ exports[`LookupEditDialog matches snapshot 1`] = `
             "label": "Globally cached lookup type",
             "name": "extractionNamespace.type",
             "placeholder": "uri",
+            "required": true,
             "suggestions": Array [
               "uri",
               "jdbc",
@@ -97,35 +102,81 @@ exports[`LookupEditDialog matches snapshot 1`] = `
             "label": "URI prefix",
             "name": "extractionNamespace.uriPrefix",
             "placeholder": "s3://bucket/some/key/prefix/",
+            "required": [Function],
             "type": "string",
           },
           Object {
             "defined": [Function],
-            "info": "Optional regex for matching the file name under uriPrefix. Only used if uriPrefix is used",
+            "info": <React.Fragment>
+              <p>
+                URI for the file of interest, specified as a file, hdfs, or s3 path
+              </p>
+              <p>
+                The URI prefix option is strictly better than URI and should be used instead
+              </p>
+            </React.Fragment>,
+            "label": "URI (deprecated)",
+            "name": "extractionNamespace.uri",
+            "placeholder": "s3://bucket/some/key/prefix/lookups-01.gz",
+            "required": [Function],
+            "type": "string",
+          },
+          Object {
+            "defaultValue": ".*",
+            "defined": [Function],
+            "info": "Optional regex for matching the file name under uriPrefix.",
             "label": "File regex",
             "name": "extractionNamespace.fileRegex",
-            "placeholder": "(optional)",
             "type": "string",
           },
           Object {
-            "defaultValue": "csv",
             "defined": [Function],
-            "label": "Format",
+            "info": <React.Fragment>
+              <p>
+                The format of the data in the lookup files.
+              </p>
+              <p>
+                The 
+                <Unknown>
+                  simpleJson
+                </Unknown>
+                 lookupParseSpec does not take any parameters. It is simply a line delimited JSON file where the field is the key, and the field's value is the value.
+              </p>
+            </React.Fragment>,
+            "label": "Parse format",
             "name": "extractionNamespace.namespaceParseSpec.format",
+            "required": true,
             "suggestions": Array [
               "csv",
               "tsv",
-              "customJson",
               "simpleJson",
+              "customJson",
             ],
             "type": "string",
           },
           Object {
+            "defaultValue": 0,
+            "defined": [Function],
+            "info": "Number of header rows to be skipped. The default number of header rows to be skipped is 0.",
+            "label": "Skip header rows",
+            "name": "extractionNamespace.namespaceParseSpec.skipHeaderRows",
+            "type": "number",
+          },
+          Object {
+            "defaultValue": false,
+            "defined": [Function],
+            "info": "A flag to indicate that column information can be extracted from the input files' header row",
+            "label": "Has header row",
+            "name": "extractionNamespace.namespaceParseSpec.hasHeaderRow",
+            "type": "boolean",
+          },
+          Object {
             "defined": [Function],
             "info": "The list of columns in the csv file",
             "label": "Columns",
             "name": "extractionNamespace.namespaceParseSpec.columns",
             "placeholder": "[\\"key\\", \\"value\\"]",
+            "required": [Function],
             "type": "string-array",
           },
           Object {
@@ -133,7 +184,7 @@ exports[`LookupEditDialog matches snapshot 1`] = `
             "info": "The name of the column containing the key",
             "label": "Key column",
             "name": "extractionNamespace.namespaceParseSpec.keyColumn",
-            "placeholder": "Key",
+            "placeholder": "(optional - defaults to the first column)",
             "type": "string",
           },
           Object {
@@ -141,26 +192,10 @@ exports[`LookupEditDialog matches snapshot 1`] = `
             "info": "The name of the column containing the value",
             "label": "Value column",
             "name": "extractionNamespace.namespaceParseSpec.valueColumn",
-            "placeholder": "Value",
+            "placeholder": "(optional - defaults to the second column)",
             "type": "string",
           },
           Object {
-            "defaultValue": false,
-            "defined": [Function],
-            "info": "A flag to indicate that column information can be extracted from the input files' header row",
-            "label": "Has header row",
-            "name": "extractionNamespace.namespaceParseSpec.hasHeaderRow",
-            "type": "boolean",
-          },
-          Object {
-            "defined": [Function],
-            "info": "Number of header rows to be skipped. The default number of header rows to be skipped is 0.",
-            "label": "Skip header rows",
-            "name": "extractionNamespace.namespaceParseSpec.skipHeaderRows",
-            "placeholder": "(optional)",
-            "type": "number",
-          },
-          Object {
             "defined": [Function],
             "label": "Delimiter",
             "name": "extractionNamespace.namespaceParseSpec.delimiter",
@@ -179,6 +214,7 @@ exports[`LookupEditDialog matches snapshot 1`] = `
             "label": "Key field name",
             "name": "extractionNamespace.namespaceParseSpec.keyFieldName",
             "placeholder": "key",
+            "required": true,
             "type": "string",
           },
           Object {
@@ -186,6 +222,15 @@ exports[`LookupEditDialog matches snapshot 1`] = `
             "label": "Value field name",
             "name": "extractionNamespace.namespaceParseSpec.valueFieldName",
             "placeholder": "value",
+            "required": true,
+            "type": "string",
+          },
+          Object {
+            "defaultValue": "0",
+            "defined": [Function],
+            "info": "Period between polling for updates",
+            "label": "Poll period",
+            "name": "extractionNamespace.pollPeriod",
             "type": "string",
           },
           Object {
@@ -205,20 +250,15 @@ exports[`LookupEditDialog matches snapshot 1`] = `
             "label": "Namespace",
             "name": "extractionNamespace.namespace",
             "placeholder": "some_lookup",
+            "required": true,
             "type": "string",
           },
           Object {
             "defined": [Function],
             "info": "Defines the connectURI value on the The connector config to used",
-            "label": "CreateTables",
-            "name": "extractionNamespace.connectorConfig.createTables",
-            "type": "boolean",
-          },
-          Object {
-            "defined": [Function],
-            "info": "Defines the connectURI value on the The connector config to used",
             "label": "Connect URI",
             "name": "extractionNamespace.connectorConfig.connectURI",
+            "required": true,
             "type": "string",
           },
           Object {
@@ -237,6 +277,13 @@ exports[`LookupEditDialog matches snapshot 1`] = `
           },
           Object {
             "defined": [Function],
+            "info": "Should tables be created",
+            "label": "Create tables",
+            "name": "extractionNamespace.connectorConfig.createTables",
+            "type": "boolean",
+          },
+          Object {
+            "defined": [Function],
             "info": <React.Fragment>
               <p>
                 The table which contains the key value pairs. This will become the table value in the SQL query:
@@ -252,6 +299,7 @@ exports[`LookupEditDialog matches snapshot 1`] = `
             "label": "Table",
             "name": "extractionNamespace.table",
             "placeholder": "some_lookup_table",
+            "required": true,
             "type": "string",
           },
           Object {
@@ -271,6 +319,7 @@ exports[`LookupEditDialog matches snapshot 1`] = `
             "label": "Key column",
             "name": "extractionNamespace.keyColumn",
             "placeholder": "my_key_value",
+            "required": true,
             "type": "string",
           },
           Object {
@@ -290,6 +339,7 @@ exports[`LookupEditDialog matches snapshot 1`] = `
             "label": "Value column",
             "name": "extractionNamespace.valueColumn",
             "placeholder": "my_column_value",
+            "required": true,
             "type": "string",
           },
           Object {
@@ -325,25 +375,17 @@ exports[`LookupEditDialog matches snapshot 1`] = `
                 ? FROM namespace.table WHERE filter
               </p>
             </React.Fragment>,
-            "label": "TsColumn",
+            "label": "Timestamp column",
             "name": "extractionNamespace.tsColumn",
             "placeholder": "(optional)",
             "type": "string",
           },
           Object {
-            "defined": [Function],
-            "info": "Period between polling for updates",
-            "label": "Poll period",
-            "name": "extractionNamespace.pollPeriod",
-            "placeholder": "(optional)",
-            "type": "string",
-          },
-          Object {
+            "defaultValue": 0,
             "defined": [Function],
             "info": "How long to wait (in ms) for the first run of the cache to populate. 0 indicates to not wait",
             "label": "First cache timeout",
             "name": "firstCacheTimeout",
-            "placeholder": "(optional)",
             "type": "number",
           },
           Object {
diff --git a/web-console/src/dialogs/lookup-edit-dialog/lookup-edit-dialog.scss b/web-console/src/dialogs/lookup-edit-dialog/lookup-edit-dialog.scss
index e42914f..ce69aae 100644
--- a/web-console/src/dialogs/lookup-edit-dialog/lookup-edit-dialog.scss
+++ b/web-console/src/dialogs/lookup-edit-dialog/lookup-edit-dialog.scss
@@ -29,10 +29,6 @@
     overflow: auto;
   }
 
-  .ace-solarized-dark {
-    background-color: #232c35;
-  }
-
   .ace_gutter-layer {
     background-color: #27313c;
   }
diff --git a/web-console/src/dialogs/lookup-edit-dialog/lookup-edit-dialog.spec.tsx b/web-console/src/dialogs/lookup-edit-dialog/lookup-edit-dialog.spec.tsx
index f9eebd8..432d53d 100644
--- a/web-console/src/dialogs/lookup-edit-dialog/lookup-edit-dialog.spec.tsx
+++ b/web-console/src/dialogs/lookup-edit-dialog/lookup-edit-dialog.spec.tsx
@@ -19,7 +19,7 @@
 import { shallow } from 'enzyme';
 import React from 'react';
 
-import { isLookupSubmitDisabled, LookupEditDialog } from './lookup-edit-dialog';
+import { LookupEditDialog } from './lookup-edit-dialog';
 
 describe('LookupEditDialog', () => {
   it('matches snapshot', () => {
@@ -40,439 +40,3 @@ describe('LookupEditDialog', () => {
     expect(lookupEditDialog).toMatchSnapshot();
   });
 });
-
-describe('Type Map Should be disabled', () => {
-  it('Missing LookupName', () => {
-    expect(isLookupSubmitDisabled(undefined, 'v1', '__default', { type: '' })).toBe(true);
-  });
-
-  it('Empty version', () => {
-    expect(isLookupSubmitDisabled('lookup', '', '__default', { type: '' })).toBe(true);
-  });
-
-  it('Missing version', () => {
-    expect(isLookupSubmitDisabled('lookup', undefined, '__default', { type: '' })).toBe(true);
-  });
-
-  it('Empty tier', () => {
-    expect(isLookupSubmitDisabled('lookup', 'v1', '', { type: '' })).toBe(true);
-  });
-
-  it('Missing tier', () => {
-    expect(isLookupSubmitDisabled('lookup', 'v1', undefined, { type: '' })).toBe(true);
-  });
-
-  it('Missing spec', () => {
-    expect(isLookupSubmitDisabled('lookup', 'v1', '__default', {})).toBe(true);
-  });
-
-  it('Type undefined', () => {
-    expect(isLookupSubmitDisabled('lookup', 'v1', '__default', { type: undefined })).toBe(true);
-  });
-
-  it('Lookup of type map with no map', () => {
-    expect(isLookupSubmitDisabled('lookup', 'v1', '__default', { type: 'map' })).toBe(true);
-  });
-
-  it('Lookup of type cachedNamespace with no extractionNamespace', () => {
-    expect(isLookupSubmitDisabled('lookup', 'v1', '__default', { type: 'cachedNamespace' })).toBe(
-      true,
-    );
-  });
-
-  it('Lookup of type cachedNamespace with extractionNamespace type uri, format csv, no namespaceParseSpec', () => {
-    expect(
-      isLookupSubmitDisabled('lookup', 'v1', '__default', {
-        type: 'cachedNamespace',
-        extractionNamespace: {
-          type: 'uri',
-          uriPrefix: 's3://bucket/some/key/prefix/',
-          fileRegex: 'renames-[0-9]*\\.gz',
-          pollPeriod: 'PT5M',
-        },
-      }),
-    ).toBe(true);
-  });
-
-  it('Lookup of type cachedNamespace with extractionNamespace type uri, format csv, no columns and skipHeaderRows', () => {
-    expect(
-      isLookupSubmitDisabled('lookup', 'v1', '__default', {
-        type: 'cachedNamespace',
-        extractionNamespace: {
-          type: 'uri',
-          uriPrefix: 's3://bucket/some/key/prefix/',
-          fileRegex: 'renames-[0-9]*\\.gz',
-          namespaceParseSpec: {
-            format: 'csv',
-          },
-          pollPeriod: 'PT5M',
-        },
-      }),
-    ).toBe(true);
-  });
-
-  it('Lookup of type cachedNamespace with extractionNamespace type uri, format tsv, no columns', () => {
-    expect(
-      isLookupSubmitDisabled('lookup', 'v1', '__default', {
-        type: 'cachedNamespace',
-        extractionNamespace: {
-          type: 'uri',
-          uriPrefix: 's3://bucket/some/key/prefix/',
-          fileRegex: 'renames-[0-9]*\\.gz',
-          namespaceParseSpec: {
-            format: 'tsv',
-            skipHeaderRows: 0,
-          },
-          pollPeriod: 'PT5M',
-        },
-      }),
-    ).toBe(true);
-  });
-
-  it('Lookup of type cachedNamespace with extractionNamespace type customJson, format tsv, no keyFieldName', () => {
-    expect(
-      isLookupSubmitDisabled('lookup', 'v1', '__default', {
-        type: 'cachedNamespace',
-        extractionNamespace: {
-          type: 'uri',
-          uriPrefix: 's3://bucket/some/key/prefix/',
-          fileRegex: 'renames-[0-9]*\\.gz',
-          namespaceParseSpec: {
-            format: 'customJson',
-            valueFieldName: 'value',
-          },
-          pollPeriod: 'PT5M',
-        },
-      }),
-    ).toBe(true);
-  });
-
-  it('Lookup of type cachedNamespace with extractionNamespace type customJson, format customJson, no valueFieldName', () => {
-    expect(
-      isLookupSubmitDisabled('lookup', 'v1', '__default', {
-        type: 'cachedNamespace',
-        extractionNamespace: {
-          type: 'uri',
-          uriPrefix: 's3://bucket/some/key/prefix/',
-          fileRegex: 'renames-[0-9]*\\.gz',
-          namespaceParseSpec: {
-            format: 'customJson',
-            keyFieldName: 'key',
-          },
-          pollPeriod: 'PT5M',
-        },
-      }),
-    ).toBe(true);
-  });
-});
-
-describe('Type cachedNamespace should be disabled', () => {
-  it('No extractionNamespace', () => {
-    expect(isLookupSubmitDisabled('lookup', 'v1', '__default', { type: 'cachedNamespace' })).toBe(
-      true,
-    );
-  });
-
-  describe('ExtractionNamespace type URI', () => {
-    it('Format csv, no namespaceParseSpec', () => {
-      expect(
-        isLookupSubmitDisabled('lookup', 'v1', '__default', {
-          type: 'cachedNamespace',
-          extractionNamespace: {
-            type: 'uri',
-            uriPrefix: 's3://bucket/some/key/prefix/',
-            fileRegex: 'renames-[0-9]*\\.gz',
-            pollPeriod: 'PT5M',
-          },
-        }),
-      ).toBe(true);
-    });
-
-    it('Format csv, no columns and skipHeaderRows', () => {
-      expect(
-        isLookupSubmitDisabled('lookup', 'v1', '__default', {
-          type: 'cachedNamespace',
-          extractionNamespace: {
-            type: 'uri',
-            uriPrefix: 's3://bucket/some/key/prefix/',
-            fileRegex: 'renames-[0-9]*\\.gz',
-            namespaceParseSpec: {
-              format: 'csv',
-            },
-            pollPeriod: 'PT5M',
-          },
-        }),
-      ).toBe(true);
-    });
-
-    it('Format tsv, no columns', () => {
-      expect(
-        isLookupSubmitDisabled('lookup', 'v1', '__default', {
-          type: 'cachedNamespace',
-          extractionNamespace: {
-            type: 'uri',
-            uriPrefix: 's3://bucket/some/key/prefix/',
-            fileRegex: 'renames-[0-9]*\\.gz',
-            namespaceParseSpec: {
-              format: 'tsv',
-              skipHeaderRows: 0,
-            },
-            pollPeriod: 'PT5M',
-          },
-        }),
-      ).toBe(true);
-    });
-
-    it('Format tsv, no keyFieldName', () => {
-      expect(
-        isLookupSubmitDisabled('lookup', 'v1', '__default', {
-          type: 'cachedNamespace',
-          extractionNamespace: {
-            type: 'uri',
-            uriPrefix: 's3://bucket/some/key/prefix/',
-            fileRegex: 'renames-[0-9]*\\.gz',
-            namespaceParseSpec: {
-              format: 'customJson',
-              valueFieldName: 'value',
-            },
-            pollPeriod: 'PT5M',
-          },
-        }),
-      ).toBe(true);
-    });
-
-    it('Format customJson, no valueFieldName', () => {
-      expect(
-        isLookupSubmitDisabled('lookup', 'v1', '__default', {
-          type: 'cachedNamespace',
-          extractionNamespace: {
-            type: 'uri',
-            uriPrefix: 's3://bucket/some/key/prefix/',
-            fileRegex: 'renames-[0-9]*\\.gz',
-            namespaceParseSpec: {
-              format: 'customJson',
-              keyFieldName: 'key',
-            },
-            pollPeriod: 'PT5M',
-          },
-        }),
-      ).toBe(true);
-    });
-  });
-
-  describe('ExtractionNamespace type JDBC', () => {
-    it('No namespace', () => {
-      expect(
-        isLookupSubmitDisabled('lookup', 'v1', '__default', {
-          type: 'cachedNamespace',
-          extractionNamespace: {
-            type: 'jdbc',
-            namespace: undefined,
-            connectorConfig: {
-              createTables: true,
-              connectURI: 'jdbc:mysql://localhost:3306/druid',
-              user: 'druid',
-              password: 'diurd',
-            },
-            table: 'some_lookup_table',
-            keyColumn: 'the_old_dim_value',
-            valueColumn: 'the_new_dim_value',
-            tsColumn: 'timestamp_column',
-            pollPeriod: 600000,
-          },
-        }),
-      ).toBe(true);
-    });
-
-    it('No connectorConfig', () => {
-      expect(
-        isLookupSubmitDisabled('lookup', 'v1', '__default', {
-          type: 'cachedNamespace',
-          extractionNamespace: {
-            type: 'jdbc',
-            namespace: 'some_lookup',
-            connectorConfig: undefined,
-            table: 'some_lookup_table',
-            keyColumn: 'the_old_dim_value',
-            valueColumn: 'the_new_dim_value',
-            tsColumn: 'timestamp_column',
-            pollPeriod: 600000,
-          },
-        }),
-      ).toBe(true);
-    });
-
-    it('No table', () => {
-      expect(
-        isLookupSubmitDisabled('lookup', 'v1', '__default', {
-          type: 'cachedNamespace',
-          extractionNamespace: {
-            type: 'jdbc',
-            namespace: 'some_lookup',
-            connectorConfig: {
-              createTables: true,
-              connectURI: 'jdbc:mysql://localhost:3306/druid',
-              user: 'druid',
-              password: 'diurd',
-            },
-            table: undefined,
-            keyColumn: 'the_old_dim_value',
-            valueColumn: 'the_new_dim_value',
-            tsColumn: 'timestamp_column',
-            pollPeriod: 600000,
-          },
-        }),
-      ).toBe(true);
-    });
-
-    it('No keyColumn', () => {
-      expect(
-        isLookupSubmitDisabled('lookup', 'v1', '__default', {
-          type: 'cachedNamespace',
-          extractionNamespace: {
-            type: 'jdbc',
-            namespace: 'some_lookup',
-            connectorConfig: {
-              createTables: true,
-              connectURI: 'jdbc:mysql://localhost:3306/druid',
-              user: 'druid',
-              password: 'diurd',
-            },
-            table: 'some_lookup_table',
-            keyColumn: undefined,
-            valueColumn: 'the_new_dim_value',
-            tsColumn: 'timestamp_column',
-            pollPeriod: 600000,
-          },
-        }),
-      ).toBe(true);
-    });
-
-    it('No keyColumn', () => {
-      expect(
-        isLookupSubmitDisabled('lookup', 'v1', '__default', {
-          type: 'cachedNamespace',
-          extractionNamespace: {
-            type: 'jdbc',
-            namespace: 'some_lookup',
-            connectorConfig: {
-              createTables: true,
-              connectURI: 'jdbc:mysql://localhost:3306/druid',
-              user: 'druid',
-              password: 'diurd',
-            },
-            table: 'some_lookup_table',
-            keyColumn: 'the_old_dim_value',
-            valueColumn: undefined,
-            tsColumn: 'timestamp_column',
-            pollPeriod: 600000,
-          },
-        }),
-      ).toBe(true);
-    });
-  });
-});
-
-describe('Type Map Should be enabled', () => {
-  it('Has type and has Map', () => {
-    expect(
-      isLookupSubmitDisabled('lookup', 'v1', '__default', { type: 'map', map: { a: 'b' } }),
-    ).toBe(false);
-  });
-});
-
-describe('Type cachedNamespace Should be enabled', () => {
-  describe('ExtractionNamespace type URI', () => {
-    it('Format csv with columns', () => {
-      expect(
-        isLookupSubmitDisabled('lookup', 'v1', '__default', {
-          type: 'cachedNamespace',
-          extractionNamespace: {
-            type: 'uri',
-            uriPrefix: 's3://bucket/some/key/prefix/',
-            fileRegex: 'renames-[0-9]*\\.gz',
-            namespaceParseSpec: {
-              format: 'csv',
-              columns: ['key', 'value'],
-            },
-          },
-        }),
-      ).toBe(false);
-    });
-
-    it('Format csv with skipHeaderRows', () => {
-      expect(
-        isLookupSubmitDisabled('lookup', 'v1', '__default', {
-          type: 'cachedNamespace',
-          extractionNamespace: {
-            type: 'uri',
-            uriPrefix: 's3://bucket/some/key/prefix/',
-            fileRegex: 'renames-[0-9]*\\.gz',
-            namespaceParseSpec: {
-              format: 'csv',
-              skipHeaderRows: 1,
-            },
-          },
-        }),
-      ).toBe(false);
-    });
-
-    it('Format tsv, only columns', () => {
-      expect(
-        isLookupSubmitDisabled('lookup', 'v1', '__default', {
-          type: 'cachedNamespace',
-          extractionNamespace: {
-            type: 'uri',
-            uriPrefix: 's3://bucket/some/key/prefix/',
-            fileRegex: 'renames-[0-9]*\\.gz',
-            namespaceParseSpec: {
-              format: 'tsv',
-              columns: ['key', 'value'],
-            },
-          },
-        }),
-      ).toBe(false);
-    });
-
-    it('Format tsv, keyFieldName and valueFieldName', () => {
-      expect(
-        isLookupSubmitDisabled('lookup', 'v1', '__default', {
-          type: 'cachedNamespace',
-          extractionNamespace: {
-            type: 'uri',
-            uriPrefix: 's3://bucket/some/key/prefix/',
-            fileRegex: 'renames-[0-9]*\\.gz',
-            namespaceParseSpec: {
-              format: 'customJson',
-              valueFieldName: 'value',
-              keyFieldName: 'value',
-            },
-          },
-        }),
-      ).toBe(false);
-    });
-  });
-
-  describe('ExtractionNamespace type JDBC', () => {
-    it('No namespace', () => {
-      expect(
-        isLookupSubmitDisabled('lookup', 'v1', '__default', {
-          type: 'cachedNamespace',
-          extractionNamespace: {
-            type: 'jdbc',
-            namespace: 'lookup',
-            connectorConfig: {
-              createTables: true,
-              connectURI: 'jdbc:mysql://localhost:3306/druid',
-              user: 'druid',
-              password: 'diurd',
-            },
-            table: 'some_lookup_table',
-            keyColumn: 'the_old_dim_value',
-            valueColumn: 'the_new_dim_value',
-          },
-        }),
-      ).toBe(false);
-    });
-  });
-});
diff --git a/web-console/src/dialogs/lookup-edit-dialog/lookup-edit-dialog.tsx b/web-console/src/dialogs/lookup-edit-dialog/lookup-edit-dialog.tsx
index 6c7061e..11b2be8 100644
--- a/web-console/src/dialogs/lookup-edit-dialog/lookup-edit-dialog.tsx
+++ b/web-console/src/dialogs/lookup-edit-dialog/lookup-edit-dialog.tsx
@@ -27,56 +27,12 @@ import {
 } from '@blueprintjs/core';
 import React, { useState } from 'react';
 
-import { AutoForm, Field, JsonInput } from '../../components';
-import {
-  FormJsonSelector,
-  FormJsonTabs,
-} from '../../components/form-json-selector/form-json-selector';
+import { AutoForm, JsonInput } from '../../components';
+import { FormJsonSelector, FormJsonTabs } from '../../components';
+import { isLookupInvalid, LOOKUP_FIELDS, LookupSpec } from '../../druid-models';
 
 import './lookup-edit-dialog.scss';
 
-export interface ExtractionNamespaceSpec {
-  type?: string;
-  uri?: string;
-  uriPrefix?: string;
-  fileRegex?: string;
-  namespaceParseSpec?: NamespaceParseSpec;
-  namespace?: string;
-  connectorConfig?: {
-    createTables: boolean;
-    connectURI: string;
-    user: string;
-    password: string;
-  };
-  table?: string;
-  keyColumn?: string;
-  valueColumn?: string;
-  filter?: any;
-  tsColumn?: string;
-  pollPeriod?: number | string;
-}
-
-export interface NamespaceParseSpec {
-  format: string;
-  columns?: string[];
-  keyColumn?: string;
-  valueColumn?: string;
-  hasHeaderRow?: boolean;
-  skipHeaderRows?: number;
-  keyFieldName?: string;
-  valueFieldName?: string;
-  delimiter?: string;
-  listDelimiter?: string;
-}
-
-export interface LookupSpec {
-  type?: string;
-  map?: {};
-  extractionNamespace?: ExtractionNamespaceSpec;
-  firstCacheTimeout?: number;
-  injective?: boolean;
-}
-
 export interface LookupEditDialogProps {
   onClose: () => void;
   onSubmit: (updateLookupVersion: boolean) => void;
@@ -89,455 +45,6 @@ export interface LookupEditDialogProps {
   allLookupTiers: string[];
 }
 
-export function isLookupSubmitDisabled(
-  lookupName: string | undefined,
-  lookupVersion: string | undefined,
-  lookupTier: string | undefined,
-  lookupSpec: LookupSpec | undefined,
-) {
-  let disableSubmit =
-    !lookupName ||
-    !lookupVersion ||
-    !lookupTier ||
-    !lookupSpec ||
-    !lookupSpec.type ||
-    (lookupSpec.type === 'map' && !lookupSpec.map) ||
-    (lookupSpec.type === 'cachedNamespace' && !lookupSpec.extractionNamespace);
-
-  if (
-    !disableSubmit &&
-    lookupSpec &&
-    lookupSpec.type === 'cachedNamespace' &&
-    lookupSpec.extractionNamespace
-  ) {
-    switch (lookupSpec.extractionNamespace.type) {
-      case 'uri':
-        const namespaceParseSpec = lookupSpec.extractionNamespace.namespaceParseSpec;
-        disableSubmit = !namespaceParseSpec;
-        if (!namespaceParseSpec) break;
-        switch (namespaceParseSpec.format) {
-          case 'csv':
-            disableSubmit = !namespaceParseSpec.columns && !namespaceParseSpec.skipHeaderRows;
-            break;
-          case 'tsv':
-            disableSubmit = !namespaceParseSpec.columns;
-            break;
-          case 'customJson':
-            disableSubmit = !namespaceParseSpec.keyFieldName || !namespaceParseSpec.valueFieldName;
-            break;
-        }
-        break;
-      case 'jdbc':
-        const extractionNamespace = lookupSpec.extractionNamespace;
-        disableSubmit =
-          !extractionNamespace.namespace ||
-          !extractionNamespace.connectorConfig ||
-          !extractionNamespace.table ||
-          !extractionNamespace.keyColumn ||
-          !extractionNamespace.valueColumn;
-        break;
-    }
-  }
-  return disableSubmit;
-}
-
-const LOOKUP_FIELDS: Field<LookupSpec>[] = [
-  {
-    name: 'type',
-    type: 'string',
-    suggestions: ['map', 'cachedNamespace'],
-    adjustment: (model: LookupSpec) => {
-      if (model.type === 'map' && model.extractionNamespace && model.extractionNamespace.type) {
-        return model;
-      }
-      model.extractionNamespace = { type: 'uri', namespaceParseSpec: { format: 'csv' } };
-      return model;
-    },
-  },
-  {
-    name: 'map',
-    type: 'json',
-    height: '60vh',
-    defined: (model: LookupSpec) => model.type === 'map',
-  },
-  {
-    name: 'extractionNamespace.type',
-    type: 'string',
-    label: 'Globally cached lookup type',
-    placeholder: 'uri',
-    suggestions: ['uri', 'jdbc'],
-    defined: (model: LookupSpec) => model.type === 'cachedNamespace',
-  },
-  {
-    name: 'extractionNamespace.uriPrefix',
-    type: 'string',
-    label: 'URI prefix',
-    info:
-      'A URI which specifies a directory (or other searchable resource) in which to search for files',
-    placeholder: 's3://bucket/some/key/prefix/',
-    defined: (model: LookupSpec) =>
-      model.type === 'cachedNamespace' &&
-      !!model.extractionNamespace &&
-      model.extractionNamespace.type === 'uri',
-  },
-  {
-    name: 'extractionNamespace.fileRegex',
-    type: 'string',
-    label: 'File regex',
-    placeholder: '(optional)',
-    info:
-      'Optional regex for matching the file name under uriPrefix. Only used if uriPrefix is used',
-    defined: (model: LookupSpec) =>
-      model.type === 'cachedNamespace' &&
-      !!model.extractionNamespace &&
-      model.extractionNamespace.type === 'uri',
-  },
-  {
-    name: 'extractionNamespace.namespaceParseSpec.format',
-    type: 'string',
-    label: 'Format',
-    defaultValue: 'csv',
-    suggestions: ['csv', 'tsv', 'customJson', 'simpleJson'],
-    defined: (model: LookupSpec) =>
-      Boolean(
-        model.type === 'cachedNamespace' &&
-          model.extractionNamespace &&
-          model.extractionNamespace.type === 'uri',
-      ),
-  },
-  {
-    name: 'extractionNamespace.namespaceParseSpec.columns',
-    type: 'string-array',
-    label: 'Columns',
-    placeholder: `["key", "value"]`,
-    info: 'The list of columns in the csv file',
-    defined: (model: LookupSpec) =>
-      Boolean(
-        model.type === 'cachedNamespace' &&
-          model.extractionNamespace &&
-          model.extractionNamespace.type === 'uri' &&
-          model.extractionNamespace.namespaceParseSpec &&
-          (model.extractionNamespace.namespaceParseSpec.format === 'csv' ||
-            model.extractionNamespace.namespaceParseSpec.format === 'tsv'),
-      ),
-  },
-  {
-    name: 'extractionNamespace.namespaceParseSpec.keyColumn',
-    type: 'string',
-    label: 'Key column',
-    placeholder: 'Key',
-    info: 'The name of the column containing the key',
-    defined: (model: LookupSpec) =>
-      Boolean(
-        model.type === 'cachedNamespace' &&
-          model.extractionNamespace &&
-          model.extractionNamespace.type === 'uri' &&
-          model.extractionNamespace.namespaceParseSpec &&
-          (model.extractionNamespace.namespaceParseSpec.format === 'csv' ||
-            model.extractionNamespace.namespaceParseSpec.format === 'tsv'),
-      ),
-  },
-  {
-    name: 'extractionNamespace.namespaceParseSpec.valueColumn',
-    type: 'string',
-    label: 'Value column',
-    placeholder: 'Value',
-    info: 'The name of the column containing the value',
-    defined: (model: LookupSpec) =>
-      Boolean(
-        model.type === 'cachedNamespace' &&
-          !!model.extractionNamespace &&
-          model.extractionNamespace.type === 'uri' &&
-          model.extractionNamespace.namespaceParseSpec &&
-          (model.extractionNamespace.namespaceParseSpec.format === 'csv' ||
-            model.extractionNamespace.namespaceParseSpec.format === 'tsv'),
-      ),
-  },
-  {
-    name: 'extractionNamespace.namespaceParseSpec.hasHeaderRow',
-    type: 'boolean',
-    label: 'Has header row',
-    defaultValue: false,
-    info: `A flag to indicate that column information can be extracted from the input files' header row`,
-    defined: (model: LookupSpec) =>
-      Boolean(
-        model.type === 'cachedNamespace' &&
-          !!model.extractionNamespace &&
-          model.extractionNamespace.type === 'uri' &&
-          model.extractionNamespace.namespaceParseSpec &&
-          (model.extractionNamespace.namespaceParseSpec.format === 'csv' ||
-            model.extractionNamespace.namespaceParseSpec.format === 'tsv'),
-      ),
-  },
-  {
-    name: 'extractionNamespace.namespaceParseSpec.skipHeaderRows',
-    type: 'number',
-    label: 'Skip header rows',
-    placeholder: '(optional)',
-    info: `Number of header rows to be skipped. The default number of header rows to be skipped is 0.`,
-    defined: (model: LookupSpec) =>
-      Boolean(
-        model.type === 'cachedNamespace' &&
-          !!model.extractionNamespace &&
-          model.extractionNamespace.type === 'uri' &&
-          model.extractionNamespace.namespaceParseSpec &&
-          (model.extractionNamespace.namespaceParseSpec.format === 'csv' ||
-            model.extractionNamespace.namespaceParseSpec.format === 'tsv'),
-      ),
-  },
-  {
-    name: 'extractionNamespace.namespaceParseSpec.delimiter',
-    type: 'string',
-    label: 'Delimiter',
-    placeholder: `(optional)`,
-    defined: (model: LookupSpec) =>
-      Boolean(
-        model.type === 'cachedNamespace' &&
-          !!model.extractionNamespace &&
-          model.extractionNamespace.type === 'uri' &&
-          model.extractionNamespace.namespaceParseSpec &&
-          model.extractionNamespace.namespaceParseSpec.format === 'tsv',
-      ),
-  },
-  {
-    name: 'extractionNamespace.namespaceParseSpec.listDelimiter',
-    type: 'string',
-    label: 'List delimiter',
-    placeholder: `(optional)`,
-    defined: (model: LookupSpec) =>
-      Boolean(
-        model.type === 'cachedNamespace' &&
-          !!model.extractionNamespace &&
-          model.extractionNamespace.type === 'uri' &&
-          model.extractionNamespace.namespaceParseSpec &&
-          model.extractionNamespace.namespaceParseSpec.format === 'tsv',
-      ),
-  },
-  {
-    name: 'extractionNamespace.namespaceParseSpec.keyFieldName',
-    type: 'string',
-    label: 'Key field name',
-    placeholder: `key`,
-    defined: (model: LookupSpec) =>
-      Boolean(
-        model.type === 'cachedNamespace' &&
-          !!model.extractionNamespace &&
-          model.extractionNamespace.type === 'uri' &&
-          model.extractionNamespace.namespaceParseSpec &&
-          model.extractionNamespace.namespaceParseSpec.format === 'customJson',
-      ),
-  },
-  {
-    name: 'extractionNamespace.namespaceParseSpec.valueFieldName',
-    type: 'string',
-    label: 'Value field name',
-    placeholder: `value`,
-    defined: (model: LookupSpec) =>
-      Boolean(
-        model.type === 'cachedNamespace' &&
-          !!model.extractionNamespace &&
-          model.extractionNamespace.type === 'uri' &&
-          model.extractionNamespace.namespaceParseSpec &&
-          model.extractionNamespace.namespaceParseSpec.format === 'customJson',
-      ),
-  },
-  {
-    name: 'extractionNamespace.namespace',
-    type: 'string',
-    label: 'Namespace',
-    placeholder: 'some_lookup',
-    info: (
-      <>
-        <p>The namespace value in the SQL query:</p>
-        <p>
-          SELECT keyColumn, valueColumn, tsColumn? FROM <strong>namespace</strong>.table WHERE
-          filter
-        </p>
-      </>
-    ),
-    defined: (model: LookupSpec) =>
-      model.type === 'cachedNamespace' &&
-      !!model.extractionNamespace &&
-      model.extractionNamespace.type === 'jdbc',
-  },
-  {
-    name: 'extractionNamespace.connectorConfig.createTables',
-    type: 'boolean',
-    label: 'CreateTables',
-    info: 'Defines the connectURI value on the The connector config to used',
-    defined: (model: LookupSpec) =>
-      model.type === 'cachedNamespace' &&
-      !!model.extractionNamespace &&
-      model.extractionNamespace.type === 'jdbc',
-  },
-  {
-    name: 'extractionNamespace.connectorConfig.connectURI',
-    type: 'string',
-    label: 'Connect URI',
-    info: 'Defines the connectURI value on the The connector config to used',
-    defined: (model: LookupSpec) =>
-      model.type === 'cachedNamespace' &&
-      !!model.extractionNamespace &&
-      model.extractionNamespace.type === 'jdbc',
-  },
-  {
-    name: 'extractionNamespace.connectorConfig.user',
-    type: 'string',
-    label: 'User',
-    info: 'Defines the user to be used by the connector config',
-    defined: (model: LookupSpec) =>
-      model.type === 'cachedNamespace' &&
-      !!model.extractionNamespace &&
-      model.extractionNamespace.type === 'jdbc',
-  },
-  {
-    name: 'extractionNamespace.connectorConfig.password',
-    type: 'string',
-    label: 'Password',
-    info: 'Defines the password to be used by the connector config',
-    defined: (model: LookupSpec) =>
-      model.type === 'cachedNamespace' &&
-      !!model.extractionNamespace &&
-      model.extractionNamespace.type === 'jdbc',
-  },
-  {
-    name: 'extractionNamespace.table',
-    type: 'string',
-    label: 'Table',
-    placeholder: 'some_lookup_table',
-    info: (
-      <>
-        <p>
-          The table which contains the key value pairs. This will become the table value in the SQL
-          query:
-        </p>
-        <p>
-          SELECT keyColumn, valueColumn, tsColumn? FROM namespace.<strong>table</strong> WHERE
-          filter
-        </p>
-      </>
-    ),
-    defined: (model: LookupSpec) =>
-      model.type === 'cachedNamespace' &&
-      !!model.extractionNamespace &&
-      model.extractionNamespace.type === 'jdbc',
-  },
-  {
-    name: 'extractionNamespace.keyColumn',
-    type: 'string',
-    label: 'Key column',
-    placeholder: 'my_key_value',
-    info: (
-      <>
-        <p>
-          The column in the table which contains the keys. This will become the keyColumn value in
-          the SQL query:
-        </p>
-        <p>
-          SELECT <strong>keyColumn</strong>, valueColumn, tsColumn? FROM namespace.table WHERE
-          filter
-        </p>
-      </>
-    ),
-    defined: (model: LookupSpec) =>
-      model.type === 'cachedNamespace' &&
-      !!model.extractionNamespace &&
-      model.extractionNamespace.type === 'jdbc',
-  },
-  {
-    name: 'extractionNamespace.valueColumn',
-    type: 'string',
-    label: 'Value column',
-    placeholder: 'my_column_value',
-    info: (
-      <>
-        <p>
-          The column in table which contains the values. This will become the valueColumn value in
-          the SQL query:
-        </p>
-        <p>
-          SELECT keyColumn, <strong>valueColumn</strong>, tsColumn? FROM namespace.table WHERE
-          filter
-        </p>
-      </>
-    ),
-    defined: (model: LookupSpec) =>
-      model.type === 'cachedNamespace' &&
-      !!model.extractionNamespace &&
-      model.extractionNamespace.type === 'jdbc',
-  },
-  {
-    name: 'extractionNamespace.filter',
-    type: 'string',
-    label: 'Filter',
-    placeholder: '(optional)',
-    info: (
-      <>
-        <p>
-          The filter to be used when selecting lookups, this is used to create a where clause on
-          lookup population. This will become the expression filter in the SQL query:
-        </p>
-        <p>
-          SELECT keyColumn, valueColumn, tsColumn? FROM namespace.table WHERE{' '}
-          <strong>filter</strong>
-        </p>
-      </>
-    ),
-    defined: (model: LookupSpec) =>
-      model.type === 'cachedNamespace' &&
-      !!model.extractionNamespace &&
-      model.extractionNamespace.type === 'jdbc',
-  },
-  {
-    name: 'extractionNamespace.tsColumn',
-    type: 'string',
-    label: 'TsColumn',
-    placeholder: '(optional)',
-    info: (
-      <>
-        <p>
-          The column in table which contains when the key was updated. This will become the Value in
-          the SQL query:
-        </p>
-        <p>
-          SELECT keyColumn, valueColumn, <strong>tsColumn</strong>? FROM namespace.table WHERE
-          filter
-        </p>
-      </>
-    ),
-    defined: (model: LookupSpec) =>
-      model.type === 'cachedNamespace' &&
-      !!model.extractionNamespace &&
-      model.extractionNamespace.type === 'jdbc',
-  },
-  {
-    name: 'extractionNamespace.pollPeriod',
-    type: 'string',
-    label: 'Poll period',
-    placeholder: '(optional)',
-    info: `Period between polling for updates`,
-    defined: (model: LookupSpec) =>
-      model.type === 'cachedNamespace' &&
-      !!model.extractionNamespace &&
-      model.extractionNamespace.type === 'uri',
-  },
-  {
-    name: 'firstCacheTimeout',
-    type: 'number',
-    label: 'First cache timeout',
-    placeholder: '(optional)',
-    info: `How long to wait (in ms) for the first run of the cache to populate. 0 indicates to not wait`,
-    defined: (model: LookupSpec) => model.type === 'cachedNamespace',
-  },
-  {
-    name: 'injective',
-    type: 'boolean',
-    defaultValue: false,
-    info: `If the underlying map is injective (keys and values are unique) then optimizations can occur internally by setting this to true`,
-    defined: (model: LookupSpec) => model.type === 'cachedNamespace',
-  },
-];
-
 export const LookupEditDialog = React.memo(function LookupEditDialog(props: LookupEditDialogProps) {
   const {
     onClose,
@@ -565,6 +72,7 @@ export const LookupEditDialog = React.memo(function LookupEditDialog(props: Look
           <InputGroup
             value={lookupName}
             onChange={(e: any) => onChange('name', e.target.value)}
+            intent={lookupName ? Intent.NONE : Intent.PRIMARY}
             disabled={isEdit}
             placeholder="Enter the lookup name"
           />
@@ -631,7 +139,7 @@ export const LookupEditDialog = React.memo(function LookupEditDialog(props: Look
             onClick={() => {
               onSubmit(updateVersionOnSubmit && isEdit);
             }}
-            disabled={isLookupSubmitDisabled(lookupName, lookupVersion, lookupTier, lookupSpec)}
+            disabled={isLookupInvalid(lookupName, lookupVersion, lookupTier, lookupSpec)}
           />
         </div>
       </div>
diff --git a/web-console/src/dialogs/overlord-dynamic-config-dialog/__snapshots__/overload-dynamic-config-dialog.spec.tsx.snap b/web-console/src/dialogs/overlord-dynamic-config-dialog/__snapshots__/overload-dynamic-config-dialog.spec.tsx.snap
index a5e42b2..a4a8926 100644
--- a/web-console/src/dialogs/overlord-dynamic-config-dialog/__snapshots__/overload-dynamic-config-dialog.spec.tsx.snap
+++ b/web-console/src/dialogs/overlord-dynamic-config-dialog/__snapshots__/overload-dynamic-config-dialog.spec.tsx.snap
@@ -11,7 +11,7 @@ exports[`overload dynamic config matches snapshot 1`] = `
     Edit the overlord dynamic configuration on the fly. For more information please refer to the
      
     <Memo(ExternalLink)
-      href="https://druid.apache.org/docs/0.19.0/configuration/index.html#overlord-dynamic-configuration"
+      href="https://druid.apache.org/docs/0.20.0/configuration/index.html#overlord-dynamic-configuration"
     >
       documentation
     </Memo(ExternalLink)>
diff --git a/web-console/src/dialogs/overlord-dynamic-config-dialog/overlord-dynamic-config-dialog.tsx b/web-console/src/dialogs/overlord-dynamic-config-dialog/overlord-dynamic-config-dialog.tsx
index 64cea82..57ced4b 100644
--- a/web-console/src/dialogs/overlord-dynamic-config-dialog/overlord-dynamic-config-dialog.tsx
+++ b/web-console/src/dialogs/overlord-dynamic-config-dialog/overlord-dynamic-config-dialog.tsx
@@ -52,7 +52,7 @@ export const OverlordDynamicConfigDialog = React.memo(function OverlordDynamicCo
     processQuery: async () => {
       try {
         const configResp = await axios(`/druid/indexer/v1/worker`);
-        setDynamicConfig(configResp.data);
+        setDynamicConfig(configResp.data || {});
       } catch (e) {
         AppToaster.show({
           icon: IconNames.ERROR,
diff --git a/web-console/src/dialogs/retention-dialog/__snapshots__/retention-dialog.spec.tsx.snap b/web-console/src/dialogs/retention-dialog/__snapshots__/retention-dialog.spec.tsx.snap
index 8c85f7f..380ea44 100644
--- a/web-console/src/dialogs/retention-dialog/__snapshots__/retention-dialog.spec.tsx.snap
+++ b/web-console/src/dialogs/retention-dialog/__snapshots__/retention-dialog.spec.tsx.snap
@@ -58,7 +58,7 @@ exports[`retention dialog matches snapshot 1`] = `
             Druid uses rules to determine what data should be retained in the cluster. The rules are evaluated in order from top to bottom. For more information please refer to the
              
             <a
-              href="https://druid.apache.org/docs/0.19.0/operations/rule-configuration.html"
+              href="https://druid.apache.org/docs/0.20.0/operations/rule-configuration.html"
               rel="noopener noreferrer"
               target="_blank"
             >
diff --git a/web-console/src/dialogs/spec-dialog/spec-dialog.scss b/web-console/src/dialogs/spec-dialog/spec-dialog.scss
index 17d4cff..b8c7198 100644
--- a/web-console/src/dialogs/spec-dialog/spec-dialog.scss
+++ b/web-console/src/dialogs/spec-dialog/spec-dialog.scss
@@ -25,9 +25,5 @@
   .spec-dialog-textarea {
     background-color: #232c35;
     margin-bottom: 10px;
-
-    .ace-solarized-dark {
-      background-color: #232c35;
-    }
   }
 }
diff --git a/web-console/src/dialogs/supervisor-table-action-dialog/supervisor-table-action-dialog.tsx b/web-console/src/dialogs/supervisor-table-action-dialog/supervisor-table-action-dialog.tsx
index 41897f7..e1c6b07 100644
--- a/web-console/src/dialogs/supervisor-table-action-dialog/supervisor-table-action-dialog.tsx
+++ b/web-console/src/dialogs/supervisor-table-action-dialog/supervisor-table-action-dialog.tsx
@@ -21,8 +21,8 @@ import React, { useState } from 'react';
 import { ShowJson } from '../../components';
 import { ShowHistory } from '../../components/show-history/show-history';
 import { SupervisorStatisticsTable } from '../../components/supervisor-statistics-table/supervisor-statistics-table';
+import { deepGet } from '../../utils';
 import { BasicAction } from '../../utils/basic-action';
-import { deepGet } from '../../utils/object-change';
 import { SideButtonMetaData, TableActionDialog } from '../table-action-dialog/table-action-dialog';
 
 interface SupervisorTableActionDialogProps {
diff --git a/web-console/src/dialogs/task-table-action-dialog/task-table-action-dialog.tsx b/web-console/src/dialogs/task-table-action-dialog/task-table-action-dialog.tsx
index b5f9440..d030821 100644
--- a/web-console/src/dialogs/task-table-action-dialog/task-table-action-dialog.tsx
+++ b/web-console/src/dialogs/task-table-action-dialog/task-table-action-dialog.tsx
@@ -19,8 +19,8 @@
 import React, { useState } from 'react';
 
 import { ShowJson, ShowLog } from '../../components';
+import { deepGet } from '../../utils';
 import { BasicAction } from '../../utils/basic-action';
-import { deepGet } from '../../utils/object-change';
 import { SideButtonMetaData, TableActionDialog } from '../table-action-dialog/table-action-dialog';
 
 interface TaskTableActionDialogProps {
diff --git a/web-console/src/utils/__snapshots__/ingestion-spec.spec.ts.snap b/web-console/src/druid-models/__snapshots__/ingestion-spec.spec.ts.snap
similarity index 100%
rename from web-console/src/utils/__snapshots__/ingestion-spec.spec.ts.snap
rename to web-console/src/druid-models/__snapshots__/ingestion-spec.spec.ts.snap
diff --git a/web-console/src/dialogs/compaction-dialog/compaction-dialog.tsx b/web-console/src/druid-models/compaction-config.tsx
similarity index 69%
copy from web-console/src/dialogs/compaction-dialog/compaction-dialog.tsx
copy to web-console/src/druid-models/compaction-config.tsx
index d2b8055..02bb42f 100644
--- a/web-console/src/dialogs/compaction-dialog/compaction-dialog.tsx
+++ b/web-console/src/druid-models/compaction-config.tsx
@@ -16,21 +16,15 @@
  * limitations under the License.
  */
 
-import { Button, Classes, Code, Dialog, Intent } from '@blueprintjs/core';
-import React, { useState } from 'react';
+import { Code } from '@blueprintjs/core';
+import React from 'react';
 
-import { AutoForm, Field, JsonInput } from '../../components';
-import {
-  FormJsonSelector,
-  FormJsonTabs,
-} from '../../components/form-json-selector/form-json-selector';
-import { deepGet, deepSet } from '../../utils/object-change';
+import { Field } from '../components';
+import { deepGet, deepSet, oneOf } from '../utils';
 
-import './compaction-dialog.scss';
+export type CompactionConfig = Record<string, any>;
 
-type CompactionConfig = Record<string, any>;
-
-const COMPACTION_CONFIG_FIELDS: Field<CompactionConfig>[] = [
+export const COMPACTION_CONFIG_FIELDS: Field<CompactionConfig>[] = [
   {
     name: 'skipOffsetFromLatest',
     type: 'string',
@@ -78,6 +72,7 @@ const COMPACTION_CONFIG_FIELDS: Field<CompactionConfig>[] = [
     name: 'tuningConfig.partitionsSpec.targetRowsPerSegment',
     label: 'Target rows per segment',
     type: 'number',
+    zeroMeansUndefined: true,
     defined: (t: CompactionConfig) =>
       deepGet(t, 'tuningConfig.partitionsSpec.type') === 'hashed' &&
       !deepGet(t, 'tuningConfig.partitionsSpec.numShards'),
@@ -98,6 +93,7 @@ const COMPACTION_CONFIG_FIELDS: Field<CompactionConfig>[] = [
     name: 'tuningConfig.partitionsSpec.numShards',
     label: 'Num shards',
     type: 'number',
+    zeroMeansUndefined: true,
     defined: (t: CompactionConfig) =>
       deepGet(t, 'tuningConfig.partitionsSpec.type') === 'hashed' &&
       !deepGet(t, 'tuningConfig.partitionsSpec.targetRowsPerSegment'),
@@ -119,6 +115,7 @@ const COMPACTION_CONFIG_FIELDS: Field<CompactionConfig>[] = [
     name: 'tuningConfig.partitionsSpec.partitionDimensions',
     label: 'Partition dimensions',
     type: 'string-array',
+    placeholder: '(all dimensions)',
     defined: (t: CompactionConfig) => deepGet(t, 'tuningConfig.partitionsSpec.type') === 'hashed',
     info: <p>The dimensions to partition on. Leave blank to select all dimensions.</p>,
   },
@@ -208,13 +205,13 @@ const COMPACTION_CONFIG_FIELDS: Field<CompactionConfig>[] = [
     ),
   },
   {
-    name: 'tuningConfig.maxNumMergeTasks',
-    label: 'Max num merge tasks',
+    name: 'tuningConfig.totalNumMergeTasks',
+    label: 'Total num merge tasks',
     type: 'number',
-    defaultValue: 1,
+    defaultValue: 10,
     min: 1,
     defined: (t: CompactionConfig) =>
-      ['hashed', 'single_dim'].includes(deepGet(t, 'tuningConfig.partitionsSpec.type')),
+      oneOf(deepGet(t, 'tuningConfig.partitionsSpec.type'), 'hashed', 'single_dim'),
     info: <>Maximum number of merge tasks which can be run at the same time.</>,
   },
   {
@@ -233,93 +230,3 @@ const COMPACTION_CONFIG_FIELDS: Field<CompactionConfig>[] = [
     ),
   },
 ];
-
-function validCompactionConfig(compactionConfig: CompactionConfig): boolean {
-  const partitionsSpecType =
-    deepGet(compactionConfig, 'tuningConfig.partitionsSpec.type') || 'dynamic';
-  switch (partitionsSpecType) {
-    // case 'dynamic': // Nothing to check for dynamic
-    case 'hashed':
-      return !(
-        Boolean(deepGet(compactionConfig, 'tuningConfig.partitionsSpec.targetRowsPerSegment')) &&
-        Boolean(deepGet(compactionConfig, 'tuningConfig.partitionsSpec.numShards'))
-      );
-      break;
-    case 'single_dim':
-      if (!deepGet(compactionConfig, 'tuningConfig.partitionsSpec.partitionDimension')) {
-        return false;
-      }
-      const hasTargetRowsPerSegment = Boolean(
-        deepGet(compactionConfig, 'tuningConfig.partitionsSpec.targetRowsPerSegment'),
-      );
-      const hasMaxRowsPerSegment = Boolean(
-        deepGet(compactionConfig, 'tuningConfig.partitionsSpec.maxRowsPerSegment'),
-      );
-      if (hasTargetRowsPerSegment === hasMaxRowsPerSegment) {
-        return false;
-      }
-      break;
-  }
-
-  return true;
-}
-
-export interface CompactionDialogProps {
-  onClose: () => void;
-  onSave: (compactionConfig: CompactionConfig) => void;
-  onDelete: () => void;
-  datasource: string;
-  compactionConfig: CompactionConfig | undefined;
-}
-
-export const CompactionDialog = React.memo(function CompactionDialog(props: CompactionDialogProps) {
-  const { datasource, compactionConfig, onSave, onClose, onDelete } = props;
-
-  const [currentTab, setCurrentTab] = useState<FormJsonTabs>('form');
-  const [currentConfig, setCurrentConfig] = useState<CompactionConfig>(
-    compactionConfig || {
-      dataSource: datasource,
-      tuningConfig: { partitionsSpec: { type: 'dynamic' } },
-    },
-  );
-
-  function handleSubmit() {
-    if (!validCompactionConfig(currentConfig)) return;
-    onSave(currentConfig);
-  }
-
-  return (
-    <Dialog
-      className="compaction-dialog"
-      isOpen
-      onClose={onClose}
-      canOutsideClickClose={false}
-      title={`Compaction config: ${datasource}`}
-    >
-      <FormJsonSelector tab={currentTab} onChange={setCurrentTab} />
-      <div className="content">
-        {currentTab === 'form' ? (
-          <AutoForm
-            fields={COMPACTION_CONFIG_FIELDS}
-            model={currentConfig}
-            onChange={m => setCurrentConfig(m)}
-          />
-        ) : (
-          <JsonInput value={currentConfig} onChange={setCurrentConfig} height="100%" />
-        )}
-      </div>
-      <div className={Classes.DIALOG_FOOTER}>
-        <div className={Classes.DIALOG_FOOTER_ACTIONS}>
-          {compactionConfig && <Button text="Delete" intent={Intent.DANGER} onClick={onDelete} />}
-          <Button text="Close" onClick={onClose} />
-          <Button
-            text="Submit"
-            intent={Intent.PRIMARY}
-            onClick={handleSubmit}
-            disabled={!validCompactionConfig(currentConfig)}
-          />
-        </div>
-      </div>
-    </Dialog>
-  );
-});
diff --git a/web-console/src/utils/compaction.spec.ts b/web-console/src/druid-models/compaction-status.spec.ts
similarity index 95%
rename from web-console/src/utils/compaction.spec.ts
rename to web-console/src/druid-models/compaction-status.spec.ts
index dfb1420..68b4cca 100644
--- a/web-console/src/utils/compaction.spec.ts
+++ b/web-console/src/druid-models/compaction-status.spec.ts
@@ -16,14 +16,14 @@
  * limitations under the License.
  */
 
+import { CompactionConfig } from './compaction-config';
 import {
-  CompactionConfig,
   CompactionStatus,
   formatCompactionConfigAndStatus,
   zeroCompactionStatus,
-} from './compaction';
+} from './compaction-status';
 
-describe('compaction', () => {
+describe('compaction status', () => {
   const BASIC_CONFIG: CompactionConfig = {};
   const ZERO_STATUS: CompactionStatus = {
     dataSource: 'tbl',
diff --git a/web-console/src/utils/compaction.ts b/web-console/src/druid-models/compaction-status.ts
similarity index 97%
rename from web-console/src/utils/compaction.ts
rename to web-console/src/druid-models/compaction-status.ts
index 287b8ec..c637e44 100644
--- a/web-console/src/utils/compaction.ts
+++ b/web-console/src/druid-models/compaction-status.ts
@@ -16,6 +16,8 @@
  * limitations under the License.
  */
 
+import { CompactionConfig } from './compaction-config';
+
 function capitalizeFirst(str: string): string {
   return str.slice(0, 1).toUpperCase() + str.slice(1).toLowerCase();
 }
@@ -34,8 +36,6 @@ export interface CompactionStatus {
   intervalCountSkipped: number;
 }
 
-export type CompactionConfig = Record<string, any>;
-
 export function zeroCompactionStatus(compactionStatus: CompactionStatus): boolean {
   return (
     !compactionStatus.bytesAwaitingCompaction &&
diff --git a/web-console/src/dialogs/compaction-dialog/compaction-dialog.scss b/web-console/src/druid-models/dimension-spec.spec.ts
similarity index 74%
copy from web-console/src/dialogs/compaction-dialog/compaction-dialog.scss
copy to web-console/src/druid-models/dimension-spec.spec.ts
index f5cd57f..c018c9d 100644
--- a/web-console/src/dialogs/compaction-dialog/compaction-dialog.scss
+++ b/web-console/src/druid-models/dimension-spec.spec.ts
@@ -16,23 +16,14 @@
  * limitations under the License.
  */
 
-.compaction-dialog {
-  &.bp3-dialog {
-    height: 80vh;
-  }
+import { getDimensionSpecs } from './dimension-spec';
 
-  .form-json-selector {
-    margin: 15px;
-  }
-
-  .content {
-    margin: 0 15px 10px 0;
-    padding: 0 5px 0 15px;
-    flex: 1;
-    overflow: auto;
-  }
-
-  .ace-solarized-dark {
-    background-color: #232c35;
-  }
-}
+describe('dimension-spec', () => {
+  it('getDimensionSpecs', () => {
+    expect(getDimensionSpecs({ header: ['header'], rows: [] }, {}, true)).toMatchInlineSnapshot(`
+      Array [
+        "header",
+      ]
+    `);
+  });
+});
diff --git a/web-console/src/druid-models/dimension-spec.ts b/web-console/src/druid-models/dimension-spec.ts
new file mode 100644
index 0000000..e305d67
--- /dev/null
+++ b/web-console/src/druid-models/dimension-spec.ts
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import { Field } from '../components';
+import { filterMap } from '../utils';
+import { HeaderAndRows } from '../utils/sampler';
+
+import { getColumnTypeFromHeaderAndRows } from './ingestion-spec';
+
+export interface DimensionsSpec {
+  dimensions?: (string | DimensionSpec)[];
+  dimensionExclusions?: string[];
+  spatialDimensions?: any[];
+}
+
+export interface DimensionSpec {
+  type: string;
+  name: string;
+  createBitmapIndex?: boolean;
+}
+
+export const DIMENSION_SPEC_FIELDS: Field<DimensionSpec>[] = [
+  {
+    name: 'name',
+    type: 'string',
+  },
+  {
+    name: 'type',
+    type: 'string',
+    suggestions: ['string', 'long', 'float', 'double'],
+  },
+  {
+    name: 'createBitmapIndex',
+    type: 'boolean',
+    defaultValue: true,
+    defined: (dimensionSpec: DimensionSpec) => dimensionSpec.type === 'string',
+  },
+];
+
+export function getDimensionSpecName(dimensionSpec: string | DimensionSpec): string {
+  return typeof dimensionSpec === 'string' ? dimensionSpec : dimensionSpec.name;
+}
+
+export function getDimensionSpecType(dimensionSpec: string | DimensionSpec): string {
+  return typeof dimensionSpec === 'string' ? 'string' : dimensionSpec.type;
+}
+
+export function inflateDimensionSpec(dimensionSpec: string | DimensionSpec): DimensionSpec {
+  return typeof dimensionSpec === 'string'
+    ? { name: dimensionSpec, type: 'string' }
+    : dimensionSpec;
+}
+
+export function getDimensionSpecs(
+  headerAndRows: HeaderAndRows,
+  typeHints: Record<string, string>,
+  hasRollup: boolean,
+): (string | DimensionSpec)[] {
+  return filterMap(headerAndRows.header, h => {
+    if (h === '__time') return;
+    const type = typeHints[h] || getColumnTypeFromHeaderAndRows(headerAndRows, h);
+    if (type === 'string') return h;
+    if (hasRollup) return;
+    return {
+      type,
+      name: h,
+    };
+  });
+}
diff --git a/web-console/src/druid-models/filter.tsx b/web-console/src/druid-models/filter.tsx
new file mode 100644
index 0000000..1079129
--- /dev/null
+++ b/web-console/src/druid-models/filter.tsx
@@ -0,0 +1,121 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import { Field } from '../components';
+import { deepGet, EMPTY_ARRAY, oneOf } from '../utils';
+
+export type DruidFilter = Record<string, any>;
+
+export interface DimensionFiltersWithRest {
+  dimensionFilters: DruidFilter[];
+  restFilter?: DruidFilter;
+}
+
+export function splitFilter(filter: DruidFilter | null): DimensionFiltersWithRest {
+  const inputAndFilters: DruidFilter[] = filter
+    ? filter.type === 'and' && Array.isArray(filter.fields)
+      ? filter.fields
+      : [filter]
+    : EMPTY_ARRAY;
+  const dimensionFilters: DruidFilter[] = inputAndFilters.filter(
+    f => typeof f.dimension === 'string',
+  );
+  const restFilters: DruidFilter[] = inputAndFilters.filter(f => typeof f.dimension !== 'string');
+
+  return {
+    dimensionFilters,
+    restFilter: restFilters.length
+      ? restFilters.length > 1
+        ? { type: 'and', filters: restFilters }
+        : restFilters[0]
+      : undefined,
+  };
+}
+
+export function joinFilter(
+  dimensionFiltersWithRest: DimensionFiltersWithRest,
+): DruidFilter | undefined {
+  const { dimensionFilters, restFilter } = dimensionFiltersWithRest;
+  let newFields = dimensionFilters || EMPTY_ARRAY;
+  if (restFilter && restFilter.type) newFields = newFields.concat([restFilter]);
+
+  if (!newFields.length) return;
+  if (newFields.length === 1) return newFields[0];
+  return { type: 'and', fields: newFields };
+}
+
+export const FILTER_FIELDS: Field<DruidFilter>[] = [
+  {
+    name: 'type',
+    type: 'string',
+    suggestions: ['selector', 'in', 'regex', 'like', 'not'],
+  },
+  {
+    name: 'dimension',
+    type: 'string',
+    defined: (df: DruidFilter) => oneOf(df.type, 'selector', 'in', 'regex', 'like'),
+  },
+  {
+    name: 'value',
+    type: 'string',
+    defined: (df: DruidFilter) => df.type === 'selector',
+  },
+  {
+    name: 'values',
+    type: 'string-array',
+    defined: (df: DruidFilter) => df.type === 'in',
+  },
+  {
+    name: 'pattern',
+    type: 'string',
+    defined: (df: DruidFilter) => oneOf(df.type, 'regex', 'like'),
+  },
+
+  {
+    name: 'field.type',
+    label: 'Sub-filter type',
+    type: 'string',
+    suggestions: ['selector', 'in', 'regex', 'like'],
+    defined: (df: DruidFilter) => df.type === 'not',
+  },
+  {
+    name: 'field.dimension',
+    label: 'Sub-filter dimension',
+    type: 'string',
+    defined: (df: DruidFilter) => df.type === 'not',
+  },
+  {
+    name: 'field.value',
+    label: 'Sub-filter value',
+    type: 'string',
+    defined: (df: DruidFilter) => df.type === 'not' && deepGet(df, 'field.type') === 'selector',
+  },
+  {
+    name: 'field.values',
+    label: 'Sub-filter values',
+    type: 'string-array',
+    defined: (df: DruidFilter) => df.type === 'not' && deepGet(df, 'field.type') === 'in',
+  },
+  {
+    name: 'field.pattern',
+    label: 'Sub-filter pattern',
+    type: 'string',
+    defined: (df: DruidFilter) =>
+      df.type === 'not' && oneOf(deepGet(df, 'field.type'), 'regex', 'like'),
+  },
+];
diff --git a/web-console/src/utils/spec-utils.spec.ts b/web-console/src/druid-models/flatten-spec.spec.ts
similarity index 97%
rename from web-console/src/utils/spec-utils.spec.ts
rename to web-console/src/druid-models/flatten-spec.spec.ts
index f368417..faf8987 100644
--- a/web-console/src/utils/spec-utils.spec.ts
+++ b/web-console/src/druid-models/flatten-spec.spec.ts
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-import { computeFlattenExprsForData } from './spec-utils';
+import { computeFlattenExprsForData } from './flatten-spec';
 
 describe('spec-utils', () => {
   describe('computeFlattenExprsForData', () => {
diff --git a/web-console/src/utils/spec-utils.ts b/web-console/src/druid-models/flatten-spec.tsx
similarity index 73%
rename from web-console/src/utils/spec-utils.ts
rename to web-console/src/druid-models/flatten-spec.tsx
index 2842118..f4246f1 100644
--- a/web-console/src/utils/spec-utils.ts
+++ b/web-console/src/druid-models/flatten-spec.tsx
@@ -16,7 +16,50 @@
  * limitations under the License.
  */
 
-import { FlattenField } from './ingestion-spec';
+import React from 'react';
+
+import { ExternalLink, Field } from '../components';
+import { getLink } from '../links';
+import { oneOf } from '../utils';
+
+export interface FlattenSpec {
+  useFieldDiscovery?: boolean;
+  fields?: FlattenField[];
+}
+
+export interface FlattenField {
+  name: string;
+  type: string;
+  expr: string;
+}
+
+export const FLATTEN_FIELD_FIELDS: Field<FlattenField>[] = [
+  {
+    name: 'name',
+    type: 'string',
+    placeholder: 'column_name',
+    required: true,
+  },
+  {
+    name: 'type',
+    type: 'string',
+    suggestions: ['path', 'jq', 'root'],
+    required: true,
+  },
+  {
+    name: 'expr',
+    type: 'string',
+    placeholder: '$.thing',
+    defined: (flattenField: FlattenField) => oneOf(flattenField.type, 'path', 'jq'),
+    required: true,
+    info: (
+      <>
+        Specify a flatten{' '}
+        <ExternalLink href={`${getLink('DOCS')}/ingestion/flatten-json`}>expression</ExternalLink>.
+      </>
+    ),
+  },
+];
 
 export type ExprType = 'path' | 'jq';
 export type ArrayHandling = 'ignore-arrays' | 'include-arrays';
diff --git a/web-console/src/dialogs/compaction-dialog/compaction-dialog.scss b/web-console/src/druid-models/index.ts
similarity index 65%
copy from web-console/src/dialogs/compaction-dialog/compaction-dialog.scss
copy to web-console/src/druid-models/index.ts
index f5cd57f..5e7debe 100644
--- a/web-console/src/dialogs/compaction-dialog/compaction-dialog.scss
+++ b/web-console/src/druid-models/index.ts
@@ -16,23 +16,16 @@
  * limitations under the License.
  */
 
-.compaction-dialog {
-  &.bp3-dialog {
-    height: 80vh;
-  }
-
-  .form-json-selector {
-    margin: 15px;
-  }
-
-  .content {
-    margin: 0 15px 10px 0;
-    padding: 0 5px 0 15px;
-    flex: 1;
-    overflow: auto;
-  }
-
-  .ace-solarized-dark {
-    background-color: #232c35;
-  }
-}
+export * from './compaction-config';
+export * from './compaction-status';
+export * from './lookup-spec';
+export * from './time';
+export * from './timestamp-spec';
+export * from './transform-spec';
+export * from './input-source';
+export * from './input-format';
+export * from './flatten-spec';
+export * from './filter';
+export * from './dimension-spec';
+export * from './metric-spec';
+export * from './ingestion-spec';
diff --git a/web-console/src/utils/ingestion-spec.spec.ts b/web-console/src/druid-models/ingestion-spec.spec.ts
similarity index 60%
rename from web-console/src/utils/ingestion-spec.spec.ts
rename to web-console/src/druid-models/ingestion-spec.spec.ts
index 3577819..adccec6 100644
--- a/web-console/src/utils/ingestion-spec.spec.ts
+++ b/web-console/src/druid-models/ingestion-spec.spec.ts
@@ -16,7 +16,16 @@
  * limitations under the License.
  */
 
-import { cleanSpec, downgradeSpec, guessInputFormat, upgradeSpec } from './ingestion-spec';
+import {
+  cleanSpec,
+  downgradeSpec,
+  getColumnTypeFromHeaderAndRows,
+  guessInputFormat,
+  guessTypeFromSample,
+  IngestionSpec,
+  updateSchemaWithSample,
+  upgradeSpec,
+} from './ingestion-spec';
 
 describe('ingestion-spec', () => {
   const oldSpec = {
@@ -152,3 +161,98 @@ describe('ingestion-spec', () => {
     });
   });
 });
+
+describe('spec utils', () => {
+  const ingestionSpec: IngestionSpec = {
+    type: 'index_parallel',
+    spec: {
+      ioConfig: {
+        type: 'index_parallel',
+        inputSource: {
+          type: 'http',
+          uris: ['https://static.imply.io/data/wikipedia.json.gz'],
+        },
+        inputFormat: {
+          type: 'json',
+        },
+      },
+      tuningConfig: {
+        type: 'index_parallel',
+      },
+      dataSchema: {
+        dataSource: 'wikipedia',
+        granularitySpec: {
+          type: 'uniform',
+          segmentGranularity: 'DAY',
+          queryGranularity: 'HOUR',
+        },
+        timestampSpec: {
+          column: 'timestamp',
+          format: 'iso',
+        },
+        dimensionsSpec: {},
+      },
+    },
+  };
+
+  it('guessTypeFromSample', () => {
+    expect(guessTypeFromSample([])).toMatchInlineSnapshot(`"string"`);
+  });
+
+  it('getColumnTypeFromHeaderAndRows', () => {
+    expect(
+      getColumnTypeFromHeaderAndRows({ header: ['header'], rows: [] }, 'header'),
+    ).toMatchInlineSnapshot(`"string"`);
+  });
+
+  it('updateSchemaWithSample', () => {
+    expect(
+      updateSchemaWithSample(ingestionSpec, { header: ['header'], rows: [] }, 'specific', true),
+    ).toMatchInlineSnapshot(`
+      Object {
+        "spec": Object {
+          "dataSchema": Object {
+            "dataSource": "wikipedia",
+            "dimensionsSpec": Object {
+              "dimensions": Array [
+                "header",
+              ],
+            },
+            "granularitySpec": Object {
+              "queryGranularity": "HOUR",
+              "rollup": true,
+              "segmentGranularity": "DAY",
+              "type": "uniform",
+            },
+            "metricsSpec": Array [
+              Object {
+                "name": "count",
+                "type": "count",
+              },
+            ],
+            "timestampSpec": Object {
+              "column": "timestamp",
+              "format": "iso",
+            },
+          },
+          "ioConfig": Object {
+            "inputFormat": Object {
+              "type": "json",
+            },
+            "inputSource": Object {
+              "type": "http",
+              "uris": Array [
+                "https://static.imply.io/data/wikipedia.json.gz",
+              ],
+            },
+            "type": "index_parallel",
+          },
+          "tuningConfig": Object {
+            "type": "index_parallel",
+          },
+        },
+        "type": "index_parallel",
+      }
+    `);
+  });
+});
diff --git a/web-console/src/utils/ingestion-spec.tsx b/web-console/src/druid-models/ingestion-spec.tsx
similarity index 72%
rename from web-console/src/utils/ingestion-spec.tsx
rename to web-console/src/druid-models/ingestion-spec.tsx
index 73f6d43..e58c591 100644
--- a/web-console/src/utils/ingestion-spec.tsx
+++ b/web-console/src/druid-models/ingestion-spec.tsx
@@ -19,24 +19,39 @@
 import { Code } from '@blueprintjs/core';
 import React from 'react';
 
-import { Field } from '../components/auto-form/auto-form';
-import { ExternalLink } from '../components/external-link/external-link';
+import { ExternalLink, Field } from '../components';
 import { getLink } from '../links';
+import {
+  deepDelete,
+  deepGet,
+  deepMove,
+  deepSet,
+  EMPTY_ARRAY,
+  EMPTY_OBJECT,
+  filterMap,
+  oneOf,
+} from '../utils';
+import { HeaderAndRows } from '../utils/sampler';
 
 import {
-  BASIC_TIME_FORMATS,
-  DATE_ONLY_TIME_FORMATS,
-  DATETIME_TIME_FORMATS,
-  OTHER_TIME_FORMATS,
-} from './druid-time';
-import { deepDelete, deepGet, deepMove, deepSet } from './object-change';
+  DimensionsSpec,
+  getDimensionSpecName,
+  getDimensionSpecs,
+  getDimensionSpecType,
+} from './dimension-spec';
+import { InputFormat, issueWithInputFormat } from './input-format';
+import { InputSource, issueWithInputSource } from './input-source';
+import {
+  getMetricSpecOutputType,
+  getMetricSpecs,
+  getMetricSpecSingleFieldName,
+  MetricSpec,
+} from './metric-spec';
+import { PLACEHOLDER_TIMESTAMP_SPEC, TimestampSpec } from './timestamp-spec';
+import { TransformSpec } from './transform-spec';
 
 export const MAX_INLINE_DATA_LENGTH = 65536;
 
-// These constants are used to make sure that they are not constantly recreated thrashing the pure components
-export const EMPTY_OBJECT: any = {};
-export const EMPTY_ARRAY: any[] = [];
-
 const CURRENT_YEAR = new Date().getUTCFullYear();
 
 export interface IngestionSpec {
@@ -77,14 +92,6 @@ export type IngestionComboTypeWithExtra =
   | 'example'
   | 'other';
 
-export function adjustIngestionSpec(spec: IngestionSpec) {
-  const tuningConfig = deepGet(spec, 'spec.tuningConfig');
-  if (tuningConfig) {
-    spec = deepSet(spec, 'spec.tuningConfig', adjustTuningConfig(tuningConfig));
-  }
-  return spec;
-}
-
 function ingestionTypeToIoAndTuningConfigType(ingestionType: IngestionType): string {
   switch (ingestionType) {
     case 'kafka':
@@ -189,7 +196,7 @@ export function getIngestionDocLink(spec: IngestionSpec): string {
       return `${getLink('DOCS')}/development/extensions-core/kinesis-ingestion.html`;
 
     default:
-      return `${getLink('DOCS')}/ingestion/native-batch.html#firehoses`;
+      return `${getLink('DOCS')}/ingestion/native-batch.html#input-sources`;
   }
 }
 
@@ -229,18 +236,6 @@ export interface DataSchema {
   metricsSpec?: MetricSpec[];
 }
 
-export interface InputFormat {
-  type: string;
-  findColumnsFromHeader?: boolean;
-  skipHeaderRows?: number;
-  columns?: string[];
-  listDelimiter?: string;
-  pattern?: string;
-  function?: string;
-  flattenSpec?: FlattenSpec;
-  keepNullColumns?: boolean;
-}
-
 export type DimensionMode = 'specific' | 'auto-detect';
 
 export function getDimensionMode(spec: IngestionSpec): DimensionMode {
@@ -266,7 +261,7 @@ export function isTask(spec: IngestionSpec) {
   const type = String(getSpecType(spec));
   return (
     type.startsWith('index_') ||
-    ['index', 'compact', 'kill', 'append', 'merge', 'same_interval_merge'].includes(type)
+    oneOf(type, 'index', 'compact', 'kill', 'append', 'merge', 'same_interval_merge')
   );
 }
 
@@ -314,359 +309,6 @@ export function cleanSpec(spec: IngestionSpec): IngestionSpec {
   };
 }
 
-const INPUT_FORMAT_FORM_FIELDS: Field<InputFormat>[] = [
-  {
-    name: 'type',
-    label: 'Input format',
-    type: 'string',
-    suggestions: ['json', 'csv', 'tsv', 'regex', 'parquet', 'orc', 'avro_ocf'],
-    info: (
-      <>
-        <p>The parser used to parse the data.</p>
-        <p>
-          For more information see{' '}
-          <ExternalLink href={`${getLink('DOCS')}/ingestion/data-formats.html`}>
-            the documentation
-          </ExternalLink>
-          .
-        </p>
-      </>
-    ),
-  },
-  {
-    name: 'pattern',
-    type: 'string',
-    required: true,
-    defined: (p: InputFormat) => p.type === 'regex',
-  },
-  {
-    name: 'function',
-    type: 'string',
-    required: true,
-    defined: (p: InputFormat) => p.type === 'javascript',
-  },
-  {
-    name: 'findColumnsFromHeader',
-    type: 'boolean',
-    required: true,
-    defined: (p: InputFormat) => p.type === 'csv' || p.type === 'tsv',
-  },
-  {
-    name: 'skipHeaderRows',
-    type: 'number',
-    defaultValue: 0,
-    defined: (p: InputFormat) => p.type === 'csv' || p.type === 'tsv',
-    min: 0,
-    info: (
-      <>
-        If both skipHeaderRows and hasHeaderRow options are set, skipHeaderRows is first applied.
-        For example, if you set skipHeaderRows to 2 and hasHeaderRow to true, Druid will skip the
-        first two lines and then extract column information from the third line.
-      </>
-    ),
-  },
-  {
-    name: 'columns',
-    type: 'string-array',
-    required: (p: InputFormat) =>
-      ((p.type === 'csv' || p.type === 'tsv') && !p.findColumnsFromHeader) || p.type === 'regex',
-    defined: (p: InputFormat) =>
-      ((p.type === 'csv' || p.type === 'tsv') && !p.findColumnsFromHeader) || p.type === 'regex',
-  },
-  {
-    name: 'delimiter',
-    type: 'string',
-    defaultValue: '\t',
-    defined: (p: InputFormat) => p.type === 'tsv',
-    info: <>A custom delimiter for data values.</>,
-  },
-  {
-    name: 'listDelimiter',
-    type: 'string',
-    defined: (p: InputFormat) => p.type === 'csv' || p.type === 'tsv' || p.type === 'regex',
-    info: <>A custom delimiter for multi-value dimensions.</>,
-  },
-  {
-    name: 'binaryAsString',
-    type: 'boolean',
-    defaultValue: false,
-    defined: (p: InputFormat) => p.type === 'parquet' || p.type === 'orc' || p.type === 'avro_ocf',
-    info: (
-      <>
-        Specifies if the bytes parquet column which is not logically marked as a string or enum type
-        should be treated as a UTF-8 encoded string.
-      </>
-    ),
-  },
-];
-
-export function getInputFormatFormFields() {
-  return INPUT_FORMAT_FORM_FIELDS;
-}
-
-export function issueWithInputFormat(inputFormat: InputFormat | undefined): string | undefined {
-  if (!inputFormat) return 'no input format';
-  if (!inputFormat.type) return 'missing a type';
-  switch (inputFormat.type) {
-    case 'regex':
-      if (!inputFormat.pattern) return "must have a 'pattern'";
-      break;
-
-    case 'javascript':
-      if (!inputFormat['function']) return "must have a 'function'";
-      break;
-  }
-  return;
-}
-
-export function inputFormatCanFlatten(inputFormat: InputFormat): boolean {
-  const inputFormatType = inputFormat.type;
-  return (
-    inputFormatType === 'json' ||
-    inputFormatType === 'parquet' ||
-    inputFormatType === 'orc' ||
-    inputFormatType === 'avro_ocf'
-  );
-}
-
-export interface TimestampSpec {
-  column?: string;
-  format?: string;
-  missingValue?: string;
-}
-
-export function getTimestampSpecColumn(timestampSpec: TimestampSpec) {
-  // https://github.com/apache/druid/blob/master/core/src/main/java/org/apache/druid/data/input/impl/TimestampSpec.java#L44
-  return timestampSpec.column || 'timestamp';
-}
-
-const NO_SUCH_COLUMN = '!!!_no_such_column_!!!';
-
-const DUMMY_TIMESTAMP_SPEC: TimestampSpec = {
-  column: NO_SUCH_COLUMN,
-  missingValue: '1970-01-01T00:00:00Z',
-};
-
-export function getDummyTimestampSpec() {
-  return DUMMY_TIMESTAMP_SPEC;
-}
-
-const CONSTANT_TIMESTAMP_SPEC: TimestampSpec = {
-  column: NO_SUCH_COLUMN,
-  missingValue: '2010-01-01T00:00:00Z',
-};
-
-export function getConstantTimestampSpec() {
-  return CONSTANT_TIMESTAMP_SPEC;
-}
-
-export function isColumnTimestampSpec(timestampSpec: TimestampSpec) {
-  return (deepGet(timestampSpec, 'column') || 'timestamp') !== NO_SUCH_COLUMN;
-}
-
-const TIMESTAMP_SPEC_FORM_FIELDS: Field<TimestampSpec>[] = [
-  {
-    name: 'column',
-    type: 'string',
-    defaultValue: 'timestamp',
-  },
-  {
-    name: 'format',
-    type: 'string',
-    defaultValue: 'auto',
-    suggestions: [
-      ...BASIC_TIME_FORMATS,
-      {
-        group: 'Date and time formats',
-        suggestions: DATETIME_TIME_FORMATS,
-      },
-      {
-        group: 'Date only formats',
-        suggestions: DATE_ONLY_TIME_FORMATS,
-      },
-      {
-        group: 'Other time formats',
-        suggestions: OTHER_TIME_FORMATS,
-      },
-    ],
-    defined: (timestampSpec: TimestampSpec) => isColumnTimestampSpec(timestampSpec),
-    info: (
-      <p>
-        Please specify your timestamp format by using the suggestions menu or typing in a{' '}
-        <ExternalLink href="https://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html">
-          format string
-        </ExternalLink>
-        .
-      </p>
-    ),
-  },
-  {
-    name: 'missingValue',
-    type: 'string',
-    placeholder: '(optional)',
-    info: <p>This value will be used if the specified column can not be found.</p>,
-  },
-];
-
-const CONSTANT_TIMESTAMP_SPEC_FORM_FIELDS: Field<TimestampSpec>[] = [
-  {
-    name: 'missingValue',
-    label: 'Constant value',
-    type: 'string',
-    info: <p>The dummy value that will be used as the timestamp.</p>,
-  },
-];
-
-export function getTimestampSpecFormFields(timestampSpec: TimestampSpec) {
-  if (isColumnTimestampSpec(timestampSpec)) {
-    return TIMESTAMP_SPEC_FORM_FIELDS;
-  } else {
-    return CONSTANT_TIMESTAMP_SPEC_FORM_FIELDS;
-  }
-}
-
-export function issueWithTimestampSpec(
-  timestampSpec: TimestampSpec | undefined,
-): string | undefined {
-  if (!timestampSpec) return 'no spec';
-  if (!timestampSpec.column && !timestampSpec.missingValue) return 'timestamp spec is blank';
-  return;
-}
-
-export interface DimensionsSpec {
-  dimensions?: (string | DimensionSpec)[];
-  dimensionExclusions?: string[];
-  spatialDimensions?: any[];
-}
-
-export interface DimensionSpec {
-  type: string;
-  name: string;
-  createBitmapIndex?: boolean;
-}
-
-const DIMENSION_SPEC_FORM_FIELDS: Field<DimensionSpec>[] = [
-  {
-    name: 'name',
-    type: 'string',
-  },
-  {
-    name: 'type',
-    type: 'string',
-    suggestions: ['string', 'long', 'float', 'double'],
-  },
-  {
-    name: 'createBitmapIndex',
-    type: 'boolean',
-    defaultValue: true,
-    defined: (dimensionSpec: DimensionSpec) => dimensionSpec.type === 'string',
-  },
-];
-
-export function getDimensionSpecFormFields() {
-  return DIMENSION_SPEC_FORM_FIELDS;
-}
-
-export function getDimensionSpecName(dimensionSpec: string | DimensionSpec): string {
-  return typeof dimensionSpec === 'string' ? dimensionSpec : dimensionSpec.name;
-}
-
-export function getDimensionSpecType(dimensionSpec: string | DimensionSpec): string {
-  return typeof dimensionSpec === 'string' ? 'string' : dimensionSpec.type;
-}
-
-export function inflateDimensionSpec(dimensionSpec: string | DimensionSpec): DimensionSpec {
-  return typeof dimensionSpec === 'string'
-    ? { name: dimensionSpec, type: 'string' }
-    : dimensionSpec;
-}
-
-export interface FlattenSpec {
-  useFieldDiscovery?: boolean;
-  fields?: FlattenField[];
-}
-
-export interface FlattenField {
-  name: string;
-  type: string;
-  expr: string;
-}
-
-const FLATTEN_FIELD_FORM_FIELDS: Field<FlattenField>[] = [
-  {
-    name: 'name',
-    type: 'string',
-    placeholder: 'column_name',
-    required: true,
-  },
-  {
-    name: 'type',
-    type: 'string',
-    suggestions: ['path', 'jq', 'root'],
-    required: true,
-  },
-  {
-    name: 'expr',
-    type: 'string',
-    placeholder: '$.thing',
-    defined: (flattenField: FlattenField) =>
-      flattenField.type === 'path' || flattenField.type === 'jq',
-    required: true,
-    info: (
-      <>
-        Specify a flatten{' '}
-        <ExternalLink href={`${getLink('DOCS')}/ingestion/flatten-json`}>expression</ExternalLink>.
-      </>
-    ),
-  },
-];
-
-export function getFlattenFieldFormFields() {
-  return FLATTEN_FIELD_FORM_FIELDS;
-}
-
-export interface TransformSpec {
-  transforms?: Transform[];
-  filter?: any;
-}
-
-export interface Transform {
-  type: string;
-  name: string;
-  expression: string;
-}
-
-const TRANSFORM_FORM_FIELDS: Field<Transform>[] = [
-  {
-    name: 'name',
-    type: 'string',
-    placeholder: 'output_name',
-    required: true,
-  },
-  {
-    name: 'type',
-    type: 'string',
-    suggestions: ['expression'],
-    required: true,
-  },
-  {
-    name: 'expression',
-    type: 'string',
-    placeholder: '"foo" + "bar"',
-    required: true,
-    info: (
-      <>
-        A valid Druid{' '}
-        <ExternalLink href={`${getLink('DOCS')}/misc/math-expr.html`}>expression</ExternalLink>.
-      </>
-    ),
-  },
-];
-
-export function getTransformFormFields() {
-  return TRANSFORM_FORM_FIELDS;
-}
-
 export interface GranularitySpec {
   type?: string;
   queryGranularity?: string;
@@ -675,294 +317,6 @@ export interface GranularitySpec {
   intervals?: string | string[];
 }
 
-export interface MetricSpec {
-  type: string;
-  name?: string;
-  fieldName?: string;
-  maxStringBytes?: number;
-  filterNullValues?: boolean;
-  fieldNames?: string[];
-  fnAggregate?: string;
-  fnCombine?: string;
-  fnReset?: string;
-  fields?: string[];
-  byRow?: boolean;
-  round?: boolean;
-  isInputHyperUnique?: boolean;
-  filter?: any;
-  aggregator?: MetricSpec;
-}
-
-const METRIC_SPEC_FORM_FIELDS: Field<MetricSpec>[] = [
-  {
-    name: 'name',
-    type: 'string',
-    info: <>The metric name as it will appear in Druid.</>,
-  },
-  {
-    name: 'type',
-    type: 'string',
-    suggestions: [
-      'count',
-      {
-        group: 'sum',
-        suggestions: ['longSum', 'doubleSum', 'floatSum'],
-      },
-      {
-        group: 'min',
-        suggestions: ['longMin', 'doubleMin', 'floatMin'],
-      },
-      {
-        group: 'max',
-        suggestions: ['longMax', 'doubleMax', 'floatMax'],
-      },
-      {
-        group: 'first',
-        suggestions: ['longFirst', 'doubleFirst', 'floatFirst'],
-      },
-      {
-        group: 'last',
-        suggestions: ['longLast', 'doubleLast', 'floatLast'],
-      },
-      'thetaSketch',
-      {
-        group: 'HLLSketch',
-        suggestions: ['HLLSketchBuild', 'HLLSketchMerge'],
-      },
-      'quantilesDoublesSketch',
-      'momentSketch',
-      'fixedBucketsHistogram',
-      'hyperUnique',
-      'filtered',
-    ],
-    info: <>The aggregation function to apply.</>,
-  },
-  {
-    name: 'fieldName',
-    type: 'string',
-    defined: m => m.type !== 'filtered',
-    info: <>The column name for the aggregator to operate on.</>,
-  },
-  {
-    name: 'maxStringBytes',
-    type: 'number',
-    defaultValue: 1024,
-    defined: m => {
-      return ['stringFirst', 'stringLast'].includes(m.type);
-    },
-  },
-  {
-    name: 'filterNullValues',
-    type: 'boolean',
-    defaultValue: false,
-    defined: m => {
-      return ['stringFirst', 'stringLast'].includes(m.type);
-    },
-  },
-  // filtered
-  {
-    name: 'filter',
-    type: 'json',
-    defined: m => m.type === 'filtered',
-  },
-  {
-    name: 'aggregator',
-    type: 'json',
-    defined: m => m.type === 'filtered',
-  },
-  // thetaSketch
-  {
-    name: 'size',
-    type: 'number',
-    defined: m => m.type === 'thetaSketch',
-    defaultValue: 16384,
-    info: (
-      <>
-        <p>
-          Must be a power of 2. Internally, size refers to the maximum number of entries sketch
-          object will retain. Higher size means higher accuracy but more space to store sketches.
-          Note that after you index with a particular size, druid will persist sketch in segments
-          and you will use size greater or equal to that at query time.
-        </p>
-        <p>
-          See the{' '}
-          <ExternalLink href="https://datasketches.apache.org/docs/Theta/ThetaSize.html">
-            DataSketches site
-          </ExternalLink>{' '}
-          for details.
-        </p>
-        <p>In general, We recommend just sticking to default size.</p>
-      </>
-    ),
-  },
-  {
-    name: 'isInputThetaSketch',
-    type: 'boolean',
-    defined: m => m.type === 'thetaSketch',
-    defaultValue: false,
-    info: (
-      <>
-        This should only be used at indexing time if your input data contains theta sketch objects.
-        This would be the case if you use datasketches library outside of Druid, say with Pig/Hive,
-        to produce the data that you are ingesting into Druid
-      </>
-    ),
-  },
-  // HLLSketchBuild & HLLSketchMerge
-  {
-    name: 'lgK',
-    type: 'number',
-    defined: m => m.type === 'HLLSketchBuild' || m.type === 'HLLSketchMerge',
-    defaultValue: 12,
-    info: (
-      <>
-        <p>
-          log2 of K that is the number of buckets in the sketch, parameter that controls the size
-          and the accuracy.
-        </p>
-        <p>Must be between 4 to 21 inclusively.</p>
-      </>
-    ),
-  },
-  {
-    name: 'tgtHllType',
-    type: 'string',
-    defined: m => m.type === 'HLLSketchBuild' || m.type === 'HLLSketchMerge',
-    defaultValue: 'HLL_4',
-    suggestions: ['HLL_4', 'HLL_6', 'HLL_8'],
-    info: (
-      <>
-        The type of the target HLL sketch. Must be <Code>HLL_4</Code>, <Code>HLL_6</Code>, or{' '}
-        <Code>HLL_8</Code>.
-      </>
-    ),
-  },
-  // quantilesDoublesSketch
-  {
-    name: 'k',
-    type: 'number',
-    defined: m => m.type === 'quantilesDoublesSketch',
-    defaultValue: 128,
-    info: (
-      <>
-        <p>
-          Parameter that determines the accuracy and size of the sketch. Higher k means higher
-          accuracy but more space to store sketches.
-        </p>
-        <p>
-          Must be a power of 2 from 2 to 32768. See the{' '}
-          <ExternalLink href="https://datasketches.apache.org/docs/Quantiles/QuantilesAccuracy.html">
-            Quantiles Accuracy
-          </ExternalLink>{' '}
-          for details.
-        </p>
-      </>
-    ),
-  },
-  // momentSketch
-  {
-    name: 'k',
-    type: 'number',
-    defined: m => m.type === 'momentSketch',
-    required: true,
-    info: (
-      <>
-        Parameter that determines the accuracy and size of the sketch. Higher k means higher
-        accuracy but more space to store sketches. Usable range is generally [3,15]
-      </>
-    ),
-  },
-  {
-    name: 'compress',
-    type: 'boolean',
-    defined: m => m.type === 'momentSketch',
-    defaultValue: true,
-    info: (
-      <>
-        Flag for whether the aggregator compresses numeric values using arcsinh. Can improve
-        robustness to skewed and long-tailed distributions, but reduces accuracy slightly on more
-        uniform distributions.
-      </>
-    ),
-  },
-  // fixedBucketsHistogram
-  {
-    name: 'lowerLimit',
-    type: 'number',
-    defined: m => m.type === 'fixedBucketsHistogram',
-    required: true,
-    info: <>Lower limit of the histogram.</>,
-  },
-  {
-    name: 'upperLimit',
-    type: 'number',
-    defined: m => m.type === 'fixedBucketsHistogram',
-    required: true,
-    info: <>Upper limit of the histogram.</>,
-  },
-  {
-    name: 'numBuckets',
-    type: 'number',
-    defined: m => m.type === 'fixedBucketsHistogram',
-    defaultValue: 10,
-    required: true,
-    info: (
-      <>
-        Number of buckets for the histogram. The range <Code>[lowerLimit, upperLimit]</Code> will be
-        divided into <Code>numBuckets</Code> intervals of equal size.
-      </>
-    ),
-  },
-  {
-    name: 'outlierHandlingMode',
-    type: 'string',
-    defined: m => m.type === 'fixedBucketsHistogram',
-    required: true,
-    suggestions: ['ignore', 'overflow', 'clip'],
-    info: (
-      <>
-        <p>
-          Specifies how values outside of <Code>[lowerLimit, upperLimit]</Code> will be handled.
-        </p>
-        <p>
-          Supported modes are <Code>ignore</Code>, <Code>overflow</Code>, and <Code>clip</Code>. See
-          <ExternalLink
-            href={`${getLink(
-              'DOCS',
-            )}/development/extensions-core/approximate-histograms.html#outlier-handling-modes`}
-          >
-            outlier handling modes
-          </ExternalLink>{' '}
-          for more details.
-        </p>
-      </>
-    ),
-  },
-  // hyperUnique
-  {
-    name: 'isInputHyperUnique',
-    type: 'boolean',
-    defined: m => m.type === 'hyperUnique',
-    defaultValue: false,
-    info: (
-      <>
-        This can be set to true to index precomputed HLL (Base64 encoded output from druid-hll is
-        expected).
-      </>
-    ),
-  },
-];
-
-export function getMetricSpecFormFields() {
-  return METRIC_SPEC_FORM_FIELDS;
-}
-
-export function getMetricSpecName(metricSpec: MetricSpec): string {
-  return (
-    metricSpec.name || (metricSpec.aggregator ? getMetricSpecName(metricSpec.aggregator) : '?')
-  );
-}
-
 // --------------
 
 export interface IoConfig {
@@ -990,29 +344,6 @@ export function invalidIoConfig(ioConfig: IoConfig): boolean {
   );
 }
 
-export interface InputSource {
-  type: string;
-  baseDir?: string;
-  filter?: any;
-  uris?: string[];
-  prefixes?: string[];
-  objects?: { bucket: string; path: string }[];
-  fetchTimeout?: number;
-
-  // druid
-  dataSource?: string;
-  interval?: string;
-  dimensions?: string[];
-  metrics?: string[];
-  maxInputSegmentBytesPerTask?: number;
-
-  // inline
-  data?: string;
-
-  // hdfs
-  paths?: string;
-}
-
 export function getIoConfigFormFields(ingestionComboType: IngestionComboType): Field<IoConfig>[] {
   const inputSourceType: Field<IoConfig> = {
     name: 'inputSource.type',
@@ -1022,7 +353,7 @@ export function getIoConfigFormFields(ingestionComboType: IngestionComboType): F
     info: (
       <p>
         Druid connects to raw data through{' '}
-        <ExternalLink href={`${getLink('DOCS')}/ingestion/firehose.html`}>
+        <ExternalLink href={`${getLink('DOCS')}/ingestion/native-batch.html#input-sources`}>
           inputSources
         </ExternalLink>
         . You can change your selected inputSource here.
@@ -1075,7 +406,7 @@ export function getIoConfigFormFields(ingestionComboType: IngestionComboType): F
           required: true,
           info: (
             <>
-              <ExternalLink href={`${getLink('DOCS')}/ingestion/firehose.html#localfirehose`}>
+              <ExternalLink href={`${getLink('DOCS')}/ingestion/native-batch.html#input-sources`}>
                 inputSource.baseDir
               </ExternalLink>
               <p>Specifies the directory to search recursively for files to be ingested.</p>
@@ -1099,7 +430,9 @@ export function getIoConfigFormFields(ingestionComboType: IngestionComboType): F
           ],
           info: (
             <>
-              <ExternalLink href={`${getLink('DOCS')}/ingestion/firehose.html#localfirehose`}>
+              <ExternalLink
+                href={`${getLink('DOCS')}/ingestion/native-batch.html#local-input-source`}
+              >
                 inputSource.filter
               </ExternalLink>
               <p>
@@ -1588,55 +921,6 @@ export function getIoConfigFormFields(ingestionComboType: IngestionComboType): F
   throw new Error(`unknown input type ${ingestionComboType}`);
 }
 
-function nonEmptyArray(a: any) {
-  return Array.isArray(a) && Boolean(a.length);
-}
-
-function issueWithInputSource(inputSource: InputSource | undefined): string | undefined {
-  if (!inputSource) return 'does not exist';
-  if (!inputSource.type) return 'missing a type';
-  switch (inputSource.type) {
-    case 'local':
-      if (!inputSource.baseDir) return `must have a 'baseDir'`;
-      if (!inputSource.filter) return `must have a 'filter'`;
-      break;
-
-    case 'http':
-      if (!nonEmptyArray(inputSource.uris)) {
-        return 'must have at least one uri';
-      }
-      break;
-
-    case 'druid':
-      if (!inputSource.dataSource) return `must have a 'dataSource'`;
-      if (!inputSource.interval) return `must have an 'interval'`;
-      break;
-
-    case 'inline':
-      if (!inputSource.data) return `must have 'data'`;
-      break;
-
-    case 's3':
-    case 'azure':
-    case 'google':
-      if (
-        !nonEmptyArray(inputSource.uris) &&
-        !nonEmptyArray(inputSource.prefixes) &&
-        !nonEmptyArray(inputSource.objects)
-      ) {
-        return 'must have at least one uri or prefix or object';
-      }
-      break;
-
-    case 'hdfs':
-      if (!inputSource.paths) {
-        return 'must have paths';
-      }
-      break;
-  }
-  return;
-}
-
 export function issueWithIoConfig(
   ioConfig: IoConfig | undefined,
   ignoreInputFormat = false,
@@ -2096,30 +1380,29 @@ export function adjustTuningConfig(tuningConfig: TuningConfig) {
   const tuningConfigType = deepGet(tuningConfig, 'type');
   if (tuningConfigType !== 'index_parallel') return tuningConfig;
 
-  const partitionsSpecType = deepGet(tuningConfig, 'partitionsSpec.type');
-  if (tuningConfig.forceGuaranteedRollup) {
-    if (partitionsSpecType !== 'hashed' && partitionsSpecType !== 'single_dim') {
-      tuningConfig = deepSet(tuningConfig, 'partitionsSpec', { type: 'hashed' });
-    }
-  } else {
-    if (partitionsSpecType !== 'dynamic') {
-      tuningConfig = deepSet(tuningConfig, 'partitionsSpec', { type: 'dynamic' });
-    }
+  const partitionsSpecType = deepGet(tuningConfig, 'partitionsSpec.type') || 'dynamic';
+  if (partitionsSpecType === 'dynamic') {
+    tuningConfig = deepDelete(tuningConfig, 'forceGuaranteedRollup');
+  } else if (oneOf(partitionsSpecType, 'hashed', 'single_dim')) {
+    tuningConfig = deepSet(tuningConfig, 'forceGuaranteedRollup', true);
   }
+
   return tuningConfig;
 }
 
 export function invalidTuningConfig(tuningConfig: TuningConfig, intervals: any): boolean {
-  if (tuningConfig.type !== 'index_parallel' || !tuningConfig.forceGuaranteedRollup) return false;
+  if (tuningConfig.type !== 'index_parallel') return false;
 
-  if (!intervals) return true;
   switch (deepGet(tuningConfig, 'partitionsSpec.type')) {
     case 'hashed':
+      if (!intervals) return true;
       return (
         Boolean(deepGet(tuningConfig, 'partitionsSpec.targetRowsPerSegment')) &&
         Boolean(deepGet(tuningConfig, 'partitionsSpec.numShards'))
       );
+
     case 'single_dim':
+      if (!intervals) return true;
       if (!deepGet(tuningConfig, 'partitionsSpec.partitionDimension')) return true;
       const hasTargetRowsPerSegment = Boolean(
         deepGet(tuningConfig, 'partitionsSpec.targetRowsPerSegment'),
@@ -2142,24 +1425,11 @@ export function getPartitionRelatedTuningSpecFormFields(
     case 'index_parallel':
       return [
         {
-          name: 'forceGuaranteedRollup',
-          type: 'boolean',
-          defaultValue: false,
-          info: (
-            <p>
-              Forces guaranteeing the perfect rollup. The perfect rollup optimizes the total size of
-              generated segments and querying time while indexing time will be increased. If this is
-              set to true, the index task will read the entire input data twice: one for finding the
-              optimal number of partitions per time chunk and one for generating segments.
-            </p>
-          ),
-        },
-        {
           name: 'partitionsSpec.type',
           label: 'Partitioning type',
           type: 'string',
-          suggestions: (t: TuningConfig) =>
-            t.forceGuaranteedRollup ? ['hashed', 'single_dim'] : ['dynamic'],
+          required: true,
+          suggestions: ['dynamic', 'hashed', 'single_dim'],
           info: (
             <p>
               For perfect rollup, you should use either <Code>hashed</Code> (partitioning based on
@@ -2355,17 +1625,26 @@ const TUNING_CONFIG_FORM_FIELDS: Field<TuningConfig>[] = [
     info: <>Used in determining when intermediate persists to disk should occur.</>,
   },
   {
-    name: 'maxNumMergeTasks',
+    name: 'totalNumMergeTasks',
     type: 'number',
     defaultValue: 10,
-    defined: (t: TuningConfig) => Boolean(t.type === 'index_parallel' && t.forceGuaranteedRollup),
+    min: 1,
+    defined: (t: TuningConfig) =>
+      Boolean(
+        t.type === 'index_parallel' &&
+          oneOf(deepGet(t, 'tuningConfig.partitionsSpec.type'), 'hashed', 'single_dim'),
+      ),
     info: <>Number of tasks to merge partial segments after shuffle.</>,
   },
   {
     name: 'maxNumSegmentsToMerge',
     type: 'number',
     defaultValue: 100,
-    defined: (t: TuningConfig) => Boolean(t.type === 'index_parallel' && t.forceGuaranteedRollup),
+    defined: (t: TuningConfig) =>
+      Boolean(
+        t.type === 'index_parallel' &&
+          oneOf(deepGet(t, 'tuningConfig.partitionsSpec.type'), 'hashed', 'single_dim'),
+      ),
     info: (
       <>
         Max limit for the number of segments a single task can merge at the same time after shuffle.
@@ -2376,7 +1655,7 @@ const TUNING_CONFIG_FORM_FIELDS: Field<TuningConfig>[] = [
     name: 'resetOffsetAutomatically',
     type: 'boolean',
     defaultValue: false,
-    defined: (t: TuningConfig) => t.type === 'kafka' || t.type === 'kinesis',
+    defined: (t: TuningConfig) => oneOf(t.type, 'kafka', 'kinesis'),
     info: (
       <>
         Whether to reset the consumer offset if the next offset that it is trying to fetch is less
@@ -2388,14 +1667,14 @@ const TUNING_CONFIG_FORM_FIELDS: Field<TuningConfig>[] = [
     name: 'intermediatePersistPeriod',
     type: 'duration',
     defaultValue: 'PT10M',
-    defined: (t: TuningConfig) => t.type === 'kafka' || t.type === 'kinesis',
+    defined: (t: TuningConfig) => oneOf(t.type, 'kafka', 'kinesis'),
     info: <>The period that determines the rate at which intermediate persists occur.</>,
   },
   {
     name: 'intermediateHandoffPeriod',
     type: 'duration',
     defaultValue: 'P2147483647D',
-    defined: (t: TuningConfig) => t.type === 'kafka' || t.type === 'kinesis',
+    defined: (t: TuningConfig) => oneOf(t.type, 'kafka', 'kinesis'),
     info: (
       <>
         How often the tasks should hand off segments. Handoff will happen either if
@@ -2429,7 +1708,7 @@ const TUNING_CONFIG_FORM_FIELDS: Field<TuningConfig>[] = [
     name: 'handoffConditionTimeout',
     type: 'number',
     defaultValue: 0,
-    defined: (t: TuningConfig) => t.type === 'kafka' || t.type === 'kinesis',
+    defined: (t: TuningConfig) => oneOf(t.type, 'kafka', 'kinesis'),
     info: <>Milliseconds to wait for segment handoff. 0 means to wait forever.</>,
   },
   {
@@ -2489,7 +1768,7 @@ const TUNING_CONFIG_FORM_FIELDS: Field<TuningConfig>[] = [
     name: 'workerThreads',
     type: 'number',
     placeholder: 'min(10, taskCount)',
-    defined: (t: TuningConfig) => t.type === 'kafka' || t.type === 'kinesis',
+    defined: (t: TuningConfig) => oneOf(t.type, 'kafka', 'kinesis'),
     info: (
       <>The number of threads that will be used by the supervisor for asynchronous operations.</>
     ),
@@ -2498,14 +1777,14 @@ const TUNING_CONFIG_FORM_FIELDS: Field<TuningConfig>[] = [
     name: 'chatThreads',
     type: 'number',
     placeholder: 'min(10, taskCount * replicas)',
-    defined: (t: TuningConfig) => t.type === 'kafka' || t.type === 'kinesis',
+    defined: (t: TuningConfig) => oneOf(t.type, 'kafka', 'kinesis'),
     info: <>The number of threads that will be used for communicating with indexing tasks.</>,
   },
   {
     name: 'chatRetries',
     type: 'number',
     defaultValue: 8,
-    defined: (t: TuningConfig) => t.type === 'kafka' || t.type === 'kinesis',
+    defined: (t: TuningConfig) => oneOf(t.type, 'kafka', 'kinesis'),
     info: (
       <>
         The number of times HTTP requests to indexing tasks will be retried before considering tasks
@@ -2517,14 +1796,14 @@ const TUNING_CONFIG_FORM_FIELDS: Field<TuningConfig>[] = [
     name: 'httpTimeout',
     type: 'duration',
     defaultValue: 'PT10S',
-    defined: (t: TuningConfig) => t.type === 'kafka' || t.type === 'kinesis',
+    defined: (t: TuningConfig) => oneOf(t.type, 'kafka', 'kinesis'),
     info: <>How long to wait for a HTTP response from an indexing task.</>,
   },
   {
     name: 'shutdownTimeout',
     type: 'duration',
     defaultValue: 'PT80S',
-    defined: (t: TuningConfig) => t.type === 'kafka' || t.type === 'kinesis',
+    defined: (t: TuningConfig) => oneOf(t.type, 'kafka', 'kinesis'),
     info: (
       <>
         How long to wait for the supervisor to attempt a graceful shutdown of tasks before exiting.
@@ -2676,7 +1955,7 @@ export function updateIngestionType(
   }
 
   if (!deepGet(spec, 'spec.dataSchema.timestampSpec')) {
-    newSpec = deepSet(newSpec, 'spec.dataSchema.timestampSpec', getDummyTimestampSpec());
+    newSpec = deepSet(newSpec, 'spec.dataSchema.timestampSpec', PLACEHOLDER_TIMESTAMP_SPEC);
   }
 
   if (!deepGet(spec, 'spec.dataSchema.dimensionsSpec')) {
@@ -2744,111 +2023,91 @@ function inputFormatFromType(type: string, findColumnsFromHeader?: boolean): Inp
   return inputFormat;
 }
 
-export type DruidFilter = Record<string, any>;
+// ------------------------
 
-export interface DimensionFiltersWithRest {
-  dimensionFilters: DruidFilter[];
-  restFilter?: DruidFilter;
+export function guessTypeFromSample(sample: any[]): string {
+  const definedValues = sample.filter(v => v != null);
+  if (
+    definedValues.length &&
+    definedValues.every(v => !isNaN(v) && oneOf(typeof v, 'number', 'string'))
+  ) {
+    if (definedValues.every(v => v % 1 === 0)) {
+      return 'long';
+    } else {
+      return 'double';
+    }
+  } else {
+    return 'string';
+  }
 }
 
-export function splitFilter(filter: DruidFilter | null): DimensionFiltersWithRest {
-  const inputAndFilters: DruidFilter[] = filter
-    ? filter.type === 'and' && Array.isArray(filter.fields)
-      ? filter.fields
-      : [filter]
-    : EMPTY_ARRAY;
-  const dimensionFilters: DruidFilter[] = inputAndFilters.filter(
-    f => typeof f.dimension === 'string',
+export function getColumnTypeFromHeaderAndRows(
+  headerAndRows: HeaderAndRows,
+  column: string,
+): string {
+  return guessTypeFromSample(
+    filterMap(headerAndRows.rows, (r: any) => (r.parsed ? r.parsed[column] : undefined)),
   );
-  const restFilters: DruidFilter[] = inputAndFilters.filter(f => typeof f.dimension !== 'string');
-
-  return {
-    dimensionFilters,
-    restFilter: restFilters.length
-      ? restFilters.length > 1
-        ? { type: 'and', filters: restFilters }
-        : restFilters[0]
-      : undefined,
-  };
 }
 
-export function joinFilter(
-  dimensionFiltersWithRest: DimensionFiltersWithRest,
-): DruidFilter | undefined {
-  const { dimensionFilters, restFilter } = dimensionFiltersWithRest;
-  let newFields = dimensionFilters || EMPTY_ARRAY;
-  if (restFilter && restFilter.type) newFields = newFields.concat([restFilter]);
+function getTypeHintsFromSpec(spec: IngestionSpec): Record<string, string> {
+  const typeHints: Record<string, string> = {};
+  const currentDimensions = deepGet(spec, 'spec.dataSchema.dimensionsSpec.dimensions') || [];
+  for (const currentDimension of currentDimensions) {
+    typeHints[getDimensionSpecName(currentDimension)] = getDimensionSpecType(currentDimension);
+  }
+
+  const currentMetrics = deepGet(spec, 'spec.dataSchema.metricsSpec') || [];
+  for (const currentMetric of currentMetrics) {
+    const singleFieldName = getMetricSpecSingleFieldName(currentMetric);
+    const metricOutputType = getMetricSpecOutputType(currentMetric);
+    if (singleFieldName && metricOutputType) {
+      typeHints[singleFieldName] = metricOutputType;
+    }
+  }
 
-  if (!newFields.length) return;
-  if (newFields.length === 1) return newFields[0];
-  return { type: 'and', fields: newFields };
+  return typeHints;
 }
 
-const FILTER_FORM_FIELDS: Field<DruidFilter>[] = [
-  {
-    name: 'type',
-    type: 'string',
-    suggestions: ['selector', 'in', 'regex', 'like', 'not'],
-  },
-  {
-    name: 'dimension',
-    type: 'string',
-    defined: (df: DruidFilter) => ['selector', 'in', 'regex', 'like'].includes(df.type),
-  },
-  {
-    name: 'value',
-    type: 'string',
-    defined: (df: DruidFilter) => df.type === 'selector',
-  },
-  {
-    name: 'values',
-    type: 'string-array',
-    defined: (df: DruidFilter) => df.type === 'in',
-  },
-  {
-    name: 'pattern',
-    type: 'string',
-    defined: (df: DruidFilter) => ['regex', 'like'].includes(df.type),
-  },
+export function updateSchemaWithSample(
+  spec: IngestionSpec,
+  headerAndRows: HeaderAndRows,
+  dimensionMode: DimensionMode,
+  rollup: boolean,
+): IngestionSpec {
+  const typeHints = getTypeHintsFromSpec(spec);
 
-  {
-    name: 'field.type',
-    label: 'Sub-filter type',
-    type: 'string',
-    suggestions: ['selector', 'in', 'regex', 'like'],
-    defined: (df: DruidFilter) => df.type === 'not',
-  },
-  {
-    name: 'field.dimension',
-    label: 'Sub-filter dimension',
-    type: 'string',
-    defined: (df: DruidFilter) => df.type === 'not',
-  },
-  {
-    name: 'field.value',
-    label: 'Sub-filter value',
-    type: 'string',
-    defined: (df: DruidFilter) => df.type === 'not' && deepGet(df, 'field.type') === 'selector',
-  },
-  {
-    name: 'field.values',
-    label: 'Sub-filter values',
-    type: 'string-array',
-    defined: (df: DruidFilter) => df.type === 'not' && deepGet(df, 'field.type') === 'in',
-  },
-  {
-    name: 'field.pattern',
-    label: 'Sub-filter pattern',
-    type: 'string',
-    defined: (df: DruidFilter) =>
-      df.type === 'not' && ['regex', 'like'].includes(deepGet(df, 'field.type')),
-  },
-];
+  let newSpec = spec;
 
-export function getFilterFormFields() {
-  return FILTER_FORM_FIELDS;
+  if (dimensionMode === 'auto-detect') {
+    newSpec = deepSet(newSpec, 'spec.dataSchema.dimensionsSpec.dimensions', []);
+  } else {
+    newSpec = deepDelete(newSpec, 'spec.dataSchema.dimensionsSpec.dimensionExclusions');
+
+    const dimensions = getDimensionSpecs(headerAndRows, typeHints, rollup);
+    if (dimensions) {
+      newSpec = deepSet(newSpec, 'spec.dataSchema.dimensionsSpec.dimensions', dimensions);
+    }
+  }
+
+  if (rollup) {
+    newSpec = deepSet(newSpec, 'spec.dataSchema.granularitySpec.queryGranularity', 'HOUR');
+
+    const metrics = getMetricSpecs(headerAndRows, typeHints);
+    if (metrics) {
+      newSpec = deepSet(newSpec, 'spec.dataSchema.metricsSpec', metrics);
+    }
+  } else {
+    newSpec = deepSet(newSpec, 'spec.dataSchema.granularitySpec.queryGranularity', 'NONE');
+    newSpec = deepDelete(newSpec, 'spec.dataSchema.metricsSpec');
+  }
+
+  newSpec = deepSet(newSpec, 'spec.dataSchema.granularitySpec.rollup', rollup);
+  return newSpec;
 }
 
+// ------------------------
+
 export function upgradeSpec(spec: any): any {
   if (deepGet(spec, 'spec.ioConfig.firehose')) {
     switch (deepGet(spec, 'spec.ioConfig.firehose.type')) {
diff --git a/web-console/src/druid-models/input-format.tsx b/web-console/src/druid-models/input-format.tsx
new file mode 100644
index 0000000..15cb682
--- /dev/null
+++ b/web-console/src/druid-models/input-format.tsx
@@ -0,0 +1,131 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import React from 'react';
+
+import { AutoForm, ExternalLink, Field } from '../components';
+import { getLink } from '../links';
+import { oneOf } from '../utils';
+
+import { FlattenSpec } from './flatten-spec';
+
+export interface InputFormat {
+  type: string;
+  findColumnsFromHeader?: boolean;
+  skipHeaderRows?: number;
+  columns?: string[];
+  listDelimiter?: string;
+  pattern?: string;
+  function?: string;
+  flattenSpec?: FlattenSpec;
+  keepNullColumns?: boolean;
+}
+
+export const INPUT_FORMAT_FIELDS: Field<InputFormat>[] = [
+  {
+    name: 'type',
+    label: 'Input format',
+    type: 'string',
+    suggestions: ['json', 'csv', 'tsv', 'regex', 'parquet', 'orc', 'avro_ocf'],
+    required: true,
+    info: (
+      <>
+        <p>The parser used to parse the data.</p>
+        <p>
+          For more information see{' '}
+          <ExternalLink href={`${getLink('DOCS')}/ingestion/data-formats.html`}>
+            the documentation
+          </ExternalLink>
+          .
+        </p>
+      </>
+    ),
+  },
+  {
+    name: 'pattern',
+    type: 'string',
+    required: true,
+    defined: (p: InputFormat) => p.type === 'regex',
+  },
+  {
+    name: 'function',
+    type: 'string',
+    required: true,
+    defined: (p: InputFormat) => p.type === 'javascript',
+  },
+  {
+    name: 'findColumnsFromHeader',
+    type: 'boolean',
+    required: true,
+    defined: (p: InputFormat) => oneOf(p.type, 'csv', 'tsv'),
+  },
+  {
+    name: 'skipHeaderRows',
+    type: 'number',
+    defaultValue: 0,
+    defined: (p: InputFormat) => oneOf(p.type, 'csv', 'tsv'),
+    min: 0,
+    info: (
+      <>
+        If both skipHeaderRows and hasHeaderRow options are set, skipHeaderRows is first applied.
+        For example, if you set skipHeaderRows to 2 and hasHeaderRow to true, Druid will skip the
+        first two lines and then extract column information from the third line.
+      </>
+    ),
+  },
+  {
+    name: 'columns',
+    type: 'string-array',
+    required: true,
+    defined: (p: InputFormat) =>
+      (oneOf(p.type, 'csv', 'tsv') && !p.findColumnsFromHeader) || p.type === 'regex',
+  },
+  {
+    name: 'delimiter',
+    type: 'string',
+    defaultValue: '\t',
+    defined: (p: InputFormat) => p.type === 'tsv',
+    info: <>A custom delimiter for data values.</>,
+  },
+  {
+    name: 'listDelimiter',
+    type: 'string',
+    defined: (p: InputFormat) => oneOf(p.type, 'csv', 'tsv', 'regex'),
+    info: <>A custom delimiter for multi-value dimensions.</>,
+  },
+  {
+    name: 'binaryAsString',
+    type: 'boolean',
+    defaultValue: false,
+    defined: (p: InputFormat) => oneOf(p.type, 'parquet', 'orc', 'avro_ocf'),
+    info: (
+      <>
+        Specifies if the binary column which is not logically marked as a string should be treated
+        as a UTF-8 encoded string.
+      </>
+    ),
+  },
+];
+
+export function issueWithInputFormat(inputFormat: InputFormat | undefined): string | undefined {
+  return AutoForm.issueWithModel(inputFormat, INPUT_FORMAT_FIELDS);
+}
+
+export function inputFormatCanFlatten(inputFormat: InputFormat): boolean {
+  return oneOf(inputFormat.type, 'json', 'parquet', 'orc', 'avro_ocf');
+}
diff --git a/web-console/src/druid-models/input-source.tsx b/web-console/src/druid-models/input-source.tsx
new file mode 100644
index 0000000..8c4302e
--- /dev/null
+++ b/web-console/src/druid-models/input-source.tsx
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+function nonEmptyArray(a: any) {
+  return Array.isArray(a) && Boolean(a.length);
+}
+
+export interface InputSource {
+  type: string;
+  baseDir?: string;
+  filter?: any;
+  uris?: string[];
+  prefixes?: string[];
+  objects?: { bucket: string; path: string }[];
+  fetchTimeout?: number;
+
+  // druid
+  dataSource?: string;
+  interval?: string;
+  dimensions?: string[];
+  metrics?: string[];
+  maxInputSegmentBytesPerTask?: number;
+
+  // inline
+  data?: string;
+
+  // hdfs
+  paths?: string;
+}
+
+export function issueWithInputSource(inputSource: InputSource | undefined): string | undefined {
+  if (!inputSource) return 'does not exist';
+  if (!inputSource.type) return 'missing a type';
+  switch (inputSource.type) {
+    case 'local':
+      if (!inputSource.baseDir) return `must have a 'baseDir'`;
+      if (!inputSource.filter) return `must have a 'filter'`;
+      break;
+
+    case 'http':
+      if (!nonEmptyArray(inputSource.uris)) {
+        return 'must have at least one uri';
+      }
+      break;
+
+    case 'druid':
+      if (!inputSource.dataSource) return `must have a 'dataSource'`;
+      if (!inputSource.interval) return `must have an 'interval'`;
+      break;
+
+    case 'inline':
+      if (!inputSource.data) return `must have 'data'`;
+      break;
+
+    case 's3':
+    case 'azure':
+    case 'google':
+      if (
+        !nonEmptyArray(inputSource.uris) &&
+        !nonEmptyArray(inputSource.prefixes) &&
+        !nonEmptyArray(inputSource.objects)
+      ) {
+        return 'must have at least one uri or prefix or object';
+      }
+      break;
+
+    case 'hdfs':
+      if (!inputSource.paths) {
+        return 'must have paths';
+      }
+      break;
+  }
+  return;
+}
diff --git a/web-console/src/druid-models/lookup-spec.spec.ts b/web-console/src/druid-models/lookup-spec.spec.ts
new file mode 100644
index 0000000..0978f7c
--- /dev/null
+++ b/web-console/src/druid-models/lookup-spec.spec.ts
@@ -0,0 +1,453 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import { isLookupInvalid } from './lookup-spec';
+
+describe('lookup-spec', () => {
+  describe('Type Map Should be disabled', () => {
+    it('Missing LookupName', () => {
+      expect(isLookupInvalid(undefined, 'v1', '__default', { type: '' })).toBe(true);
+    });
+
+    it('Empty version', () => {
+      expect(isLookupInvalid('lookup', '', '__default', { type: '' })).toBe(true);
+    });
+
+    it('Missing version', () => {
+      expect(isLookupInvalid('lookup', undefined, '__default', { type: '' })).toBe(true);
+    });
+
+    it('Empty tier', () => {
+      expect(isLookupInvalid('lookup', 'v1', '', { type: '' })).toBe(true);
+    });
+
+    it('Missing tier', () => {
+      expect(isLookupInvalid('lookup', 'v1', undefined, { type: '' })).toBe(true);
+    });
+
+    it('Missing spec', () => {
+      expect(isLookupInvalid('lookup', 'v1', '__default', {})).toBe(true);
+    });
+
+    it('Type undefined', () => {
+      expect(isLookupInvalid('lookup', 'v1', '__default', { type: undefined })).toBe(true);
+    });
+
+    it('Lookup of type map with no map', () => {
+      expect(isLookupInvalid('lookup', 'v1', '__default', { type: 'map' })).toBe(true);
+    });
+
+    it('Lookup of type cachedNamespace with no extractionNamespace', () => {
+      expect(isLookupInvalid('lookup', 'v1', '__default', { type: 'cachedNamespace' })).toBe(true);
+    });
+
+    it('Lookup of type cachedNamespace with extractionNamespace type uri, format csv, no namespaceParseSpec', () => {
+      expect(
+        isLookupInvalid('lookup', 'v1', '__default', {
+          type: 'cachedNamespace',
+          extractionNamespace: {
+            type: 'uri',
+            uriPrefix: 's3://bucket/some/key/prefix/',
+            fileRegex: 'renames-[0-9]*\\.gz',
+            pollPeriod: 'PT5M',
+          },
+        }),
+      ).toBe(true);
+    });
+
+    it('Lookup of type cachedNamespace with extractionNamespace type uri, format csv, no columns and no hasHeaderRow', () => {
+      expect(
+        isLookupInvalid('lookup', 'v1', '__default', {
+          type: 'cachedNamespace',
+          extractionNamespace: {
+            type: 'uri',
+            uriPrefix: 's3://bucket/some/key/prefix/',
+            fileRegex: 'renames-[0-9]*\\.gz',
+            namespaceParseSpec: {
+              format: 'csv',
+            },
+            pollPeriod: 'PT5M',
+          },
+        }),
+      ).toBe(true);
+    });
+
+    it('Lookup of type cachedNamespace with extractionNamespace type uri, format tsv, no columns', () => {
+      expect(
+        isLookupInvalid('lookup', 'v1', '__default', {
+          type: 'cachedNamespace',
+          extractionNamespace: {
+            type: 'uri',
+            uriPrefix: 's3://bucket/some/key/prefix/',
+            fileRegex: 'renames-[0-9]*\\.gz',
+            namespaceParseSpec: {
+              format: 'tsv',
+              skipHeaderRows: 0,
+            },
+            pollPeriod: 'PT5M',
+          },
+        }),
+      ).toBe(true);
+    });
+
+    it('Lookup of type cachedNamespace with extractionNamespace type customJson, format tsv, no keyFieldName', () => {
+      expect(
+        isLookupInvalid('lookup', 'v1', '__default', {
+          type: 'cachedNamespace',
+          extractionNamespace: {
+            type: 'uri',
+            uriPrefix: 's3://bucket/some/key/prefix/',
+            fileRegex: 'renames-[0-9]*\\.gz',
+            namespaceParseSpec: {
+              format: 'customJson',
+              valueFieldName: 'value',
+            },
+            pollPeriod: 'PT5M',
+          },
+        }),
+      ).toBe(true);
+    });
+
+    it('Lookup of type cachedNamespace with extractionNamespace type customJson, format customJson, no valueFieldName', () => {
+      expect(
+        isLookupInvalid('lookup', 'v1', '__default', {
+          type: 'cachedNamespace',
+          extractionNamespace: {
+            type: 'uri',
+            uriPrefix: 's3://bucket/some/key/prefix/',
+            fileRegex: 'renames-[0-9]*\\.gz',
+            namespaceParseSpec: {
+              format: 'customJson',
+              keyFieldName: 'key',
+            },
+            pollPeriod: 'PT5M',
+          },
+        }),
+      ).toBe(true);
+    });
+  });
+
+  describe('Type cachedNamespace should be disabled', () => {
+    it('No extractionNamespace', () => {
+      expect(isLookupInvalid('lookup', 'v1', '__default', { type: 'cachedNamespace' })).toBe(true);
+    });
+
+    describe('ExtractionNamespace type URI', () => {
+      it('Format csv, no namespaceParseSpec', () => {
+        expect(
+          isLookupInvalid('lookup', 'v1', '__default', {
+            type: 'cachedNamespace',
+            extractionNamespace: {
+              type: 'uri',
+              uriPrefix: 's3://bucket/some/key/prefix/',
+              fileRegex: 'renames-[0-9]*\\.gz',
+              pollPeriod: 'PT5M',
+            },
+          }),
+        ).toBe(true);
+      });
+
+      it('Format csv, no columns and skipHeaderRows', () => {
+        expect(
+          isLookupInvalid('lookup', 'v1', '__default', {
+            type: 'cachedNamespace',
+            extractionNamespace: {
+              type: 'uri',
+              uriPrefix: 's3://bucket/some/key/prefix/',
+              fileRegex: 'renames-[0-9]*\\.gz',
+              namespaceParseSpec: {
+                format: 'csv',
+              },
+              pollPeriod: 'PT5M',
+            },
+          }),
+        ).toBe(true);
+      });
+
+      it('Format tsv, no columns', () => {
+        expect(
+          isLookupInvalid('lookup', 'v1', '__default', {
+            type: 'cachedNamespace',
+            extractionNamespace: {
+              type: 'uri',
+              uriPrefix: 's3://bucket/some/key/prefix/',
+              fileRegex: 'renames-[0-9]*\\.gz',
+              namespaceParseSpec: {
+                format: 'tsv',
+                skipHeaderRows: 0,
+              },
+              pollPeriod: 'PT5M',
+            },
+          }),
+        ).toBe(true);
+      });
+
+      it('Format tsv, no keyFieldName', () => {
+        expect(
+          isLookupInvalid('lookup', 'v1', '__default', {
+            type: 'cachedNamespace',
+            extractionNamespace: {
+              type: 'uri',
+              uriPrefix: 's3://bucket/some/key/prefix/',
+              fileRegex: 'renames-[0-9]*\\.gz',
+              namespaceParseSpec: {
+                format: 'customJson',
+                valueFieldName: 'value',
+              },
+              pollPeriod: 'PT5M',
+            },
+          }),
+        ).toBe(true);
+      });
+
+      it('Format customJson, no valueFieldName', () => {
+        expect(
+          isLookupInvalid('lookup', 'v1', '__default', {
+            type: 'cachedNamespace',
+            extractionNamespace: {
+              type: 'uri',
+              uriPrefix: 's3://bucket/some/key/prefix/',
+              fileRegex: 'renames-[0-9]*\\.gz',
+              namespaceParseSpec: {
+                format: 'customJson',
+                keyFieldName: 'key',
+              },
+              pollPeriod: 'PT5M',
+            },
+          }),
+        ).toBe(true);
+      });
+    });
+
+    describe('ExtractionNamespace type JDBC', () => {
+      it('No namespace', () => {
+        expect(
+          isLookupInvalid('lookup', 'v1', '__default', {
+            type: 'cachedNamespace',
+            extractionNamespace: {
+              type: 'jdbc',
+              namespace: undefined,
+              connectorConfig: {
+                createTables: true,
+                connectURI: 'jdbc:mysql://localhost:3306/druid',
+                user: 'druid',
+                password: 'diurd',
+              },
+              table: 'some_lookup_table',
+              keyColumn: 'the_old_dim_value',
+              valueColumn: 'the_new_dim_value',
+              tsColumn: 'timestamp_column',
+              pollPeriod: 600000,
+            },
+          }),
+        ).toBe(true);
+      });
+
+      it('No connectorConfig', () => {
+        expect(
+          isLookupInvalid('lookup', 'v1', '__default', {
+            type: 'cachedNamespace',
+            extractionNamespace: {
+              type: 'jdbc',
+              namespace: 'some_lookup',
+              connectorConfig: undefined,
+              table: 'some_lookup_table',
+              keyColumn: 'the_old_dim_value',
+              valueColumn: 'the_new_dim_value',
+              tsColumn: 'timestamp_column',
+              pollPeriod: 600000,
+            },
+          }),
+        ).toBe(true);
+      });
+
+      it('No table', () => {
+        expect(
+          isLookupInvalid('lookup', 'v1', '__default', {
+            type: 'cachedNamespace',
+            extractionNamespace: {
+              type: 'jdbc',
+              namespace: 'some_lookup',
+              connectorConfig: {
+                createTables: true,
+                connectURI: 'jdbc:mysql://localhost:3306/druid',
+                user: 'druid',
+                password: 'diurd',
+              },
+              table: undefined,
+              keyColumn: 'the_old_dim_value',
+              valueColumn: 'the_new_dim_value',
+              tsColumn: 'timestamp_column',
+              pollPeriod: 600000,
+            },
+          }),
+        ).toBe(true);
+      });
+
+      it('No keyColumn', () => {
+        expect(
+          isLookupInvalid('lookup', 'v1', '__default', {
+            type: 'cachedNamespace',
+            extractionNamespace: {
+              type: 'jdbc',
+              namespace: 'some_lookup',
+              connectorConfig: {
+                createTables: true,
+                connectURI: 'jdbc:mysql://localhost:3306/druid',
+                user: 'druid',
+                password: 'diurd',
+              },
+              table: 'some_lookup_table',
+              keyColumn: undefined,
+              valueColumn: 'the_new_dim_value',
+              tsColumn: 'timestamp_column',
+              pollPeriod: 600000,
+            },
+          }),
+        ).toBe(true);
+      });
+
+      it('No keyColumn', () => {
+        expect(
+          isLookupInvalid('lookup', 'v1', '__default', {
+            type: 'cachedNamespace',
+            extractionNamespace: {
+              type: 'jdbc',
+              namespace: 'some_lookup',
+              connectorConfig: {
+                createTables: true,
+                connectURI: 'jdbc:mysql://localhost:3306/druid',
+                user: 'druid',
+                password: 'diurd',
+              },
+              table: 'some_lookup_table',
+              keyColumn: 'the_old_dim_value',
+              valueColumn: undefined,
+              tsColumn: 'timestamp_column',
+              pollPeriod: 600000,
+            },
+          }),
+        ).toBe(true);
+      });
+    });
+  });
+
+  describe('Type Map Should be enabled', () => {
+    it('Has type and has Map', () => {
+      expect(isLookupInvalid('lookup', 'v1', '__default', { type: 'map', map: { a: 'b' } })).toBe(
+        false,
+      );
+    });
+  });
+
+  describe('Type cachedNamespace Should be enabled', () => {
+    describe('ExtractionNamespace type URI', () => {
+      it('Format csv with columns', () => {
+        expect(
+          isLookupInvalid('lookup', 'v1', '__default', {
+            type: 'cachedNamespace',
+            extractionNamespace: {
+              type: 'uri',
+              uriPrefix: 's3://bucket/some/key/prefix/',
+              fileRegex: 'renames-[0-9]*\\.gz',
+              namespaceParseSpec: {
+                format: 'csv',
+                columns: ['key', 'value'],
+              },
+            },
+          }),
+        ).toBe(false);
+      });
+
+      it('Format csv with hasHeaderRow', () => {
+        expect(
+          isLookupInvalid('lookup', 'v1', '__default', {
+            type: 'cachedNamespace',
+            extractionNamespace: {
+              type: 'uri',
+              uriPrefix: 's3://bucket/some/key/prefix/',
+              fileRegex: 'renames-[0-9]*\\.gz',
+              namespaceParseSpec: {
+                format: 'csv',
+                hasHeaderRow: true,
+              },
+            },
+          }),
+        ).toBe(false);
+      });
+
+      it('Format tsv, only columns', () => {
+        expect(
+          isLookupInvalid('lookup', 'v1', '__default', {
+            type: 'cachedNamespace',
+            extractionNamespace: {
+              type: 'uri',
+              uriPrefix: 's3://bucket/some/key/prefix/',
+              fileRegex: 'renames-[0-9]*\\.gz',
+              namespaceParseSpec: {
+                format: 'tsv',
+                columns: ['key', 'value'],
+              },
+            },
+          }),
+        ).toBe(false);
+      });
+
+      it('Format tsv, keyFieldName and valueFieldName', () => {
+        expect(
+          isLookupInvalid('lookup', 'v1', '__default', {
+            type: 'cachedNamespace',
+            extractionNamespace: {
+              type: 'uri',
+              uriPrefix: 's3://bucket/some/key/prefix/',
+              fileRegex: 'renames-[0-9]*\\.gz',
+              namespaceParseSpec: {
+                format: 'customJson',
+                valueFieldName: 'value',
+                keyFieldName: 'value',
+              },
+            },
+          }),
+        ).toBe(false);
+      });
+    });
+
+    describe('ExtractionNamespace type JDBC', () => {
+      it('No namespace', () => {
+        expect(
+          isLookupInvalid('lookup', 'v1', '__default', {
+            type: 'cachedNamespace',
+            extractionNamespace: {
+              type: 'jdbc',
+              namespace: 'lookup',
+              connectorConfig: {
+                createTables: true,
+                connectURI: 'jdbc:mysql://localhost:3306/druid',
+                user: 'druid',
+                password: 'diurd',
+              },
+              table: 'some_lookup_table',
+              keyColumn: 'the_old_dim_value',
+              valueColumn: 'the_new_dim_value',
+            },
+          }),
+        ).toBe(false);
+      });
+    });
+  });
+});
diff --git a/web-console/src/druid-models/lookup-spec.tsx b/web-console/src/druid-models/lookup-spec.tsx
new file mode 100644
index 0000000..c9e0e5a
--- /dev/null
+++ b/web-console/src/druid-models/lookup-spec.tsx
@@ -0,0 +1,456 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import { Code } from '@blueprintjs/core';
+import React from 'react';
+
+import { AutoForm, Field } from '../components';
+import { deepGet, deepSet, oneOf } from '../utils';
+
+export interface ExtractionNamespaceSpec {
+  type?: string;
+  uri?: string;
+  uriPrefix?: string;
+  fileRegex?: string;
+  namespaceParseSpec?: NamespaceParseSpec;
+  namespace?: string;
+  connectorConfig?: {
+    createTables: boolean;
+    connectURI: string;
+    user: string;
+    password: string;
+  };
+  table?: string;
+  keyColumn?: string;
+  valueColumn?: string;
+  filter?: any;
+  tsColumn?: string;
+  pollPeriod?: number | string;
+}
+
+export interface NamespaceParseSpec {
+  format: string;
+  columns?: string[];
+  keyColumn?: string;
+  valueColumn?: string;
+  hasHeaderRow?: boolean;
+  skipHeaderRows?: number;
+  keyFieldName?: string;
+  valueFieldName?: string;
+  delimiter?: string;
+  listDelimiter?: string;
+}
+
+export interface LookupSpec {
+  type?: string;
+  map?: Record<string, string | number>;
+  extractionNamespace?: ExtractionNamespaceSpec;
+  firstCacheTimeout?: number;
+  injective?: boolean;
+}
+
+export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
+  {
+    name: 'type',
+    type: 'string',
+    suggestions: ['map', 'cachedNamespace'],
+    required: true,
+    adjustment: (model: LookupSpec) => {
+      if (model.type === 'map' && !model.map) {
+        return deepSet(model, 'map', {});
+      }
+      if (model.type === 'cachedNamespace' && !deepGet(model, 'extractionNamespace.type')) {
+        return deepSet(model, 'extractionNamespace', { type: 'uri' });
+      }
+      return model;
+    },
+  },
+
+  // map lookups are simple
+  {
+    name: 'map',
+    type: 'json',
+    height: '60vh',
+    defined: (model: LookupSpec) => model.type === 'map',
+    required: true,
+    issueWithValue: value => {
+      if (!value) return 'map must be defined';
+      if (typeof value !== 'object') return `map must be an object`;
+      for (const k in value) {
+        const typeValue = typeof value[k];
+        if (typeValue !== 'string' && typeValue !== 'number') {
+          return `map key '${k}' is of the wrong type '${typeValue}'`;
+        }
+      }
+      return;
+    },
+  },
+
+  // cachedNamespace lookups have more options
+  {
+    name: 'extractionNamespace.type',
+    type: 'string',
+    label: 'Globally cached lookup type',
+    placeholder: 'uri',
+    suggestions: ['uri', 'jdbc'],
+    defined: (model: LookupSpec) => model.type === 'cachedNamespace',
+    required: true,
+  },
+  {
+    name: 'extractionNamespace.uriPrefix',
+    type: 'string',
+    label: 'URI prefix',
+    placeholder: 's3://bucket/some/key/prefix/',
+    defined: (model: LookupSpec) =>
+      deepGet(model, 'extractionNamespace.type') === 'uri' &&
+      !deepGet(model, 'extractionNamespace.uri'),
+    required: (model: LookupSpec) =>
+      !deepGet(model, 'extractionNamespace.uriPrefix') &&
+      !deepGet(model, 'extractionNamespace.uri'),
+    info:
+      'A URI which specifies a directory (or other searchable resource) in which to search for files',
+  },
+  {
+    name: 'extractionNamespace.uri',
+    type: 'string',
+    label: 'URI (deprecated)',
+    placeholder: 's3://bucket/some/key/prefix/lookups-01.gz',
+    defined: (model: LookupSpec) =>
+      deepGet(model, 'extractionNamespace.type') === 'uri' &&
+      !deepGet(model, 'extractionNamespace.uriPrefix'),
+    required: (model: LookupSpec) =>
+      !deepGet(model, 'extractionNamespace.uriPrefix') &&
+      !deepGet(model, 'extractionNamespace.uri'),
+    info: (
+      <>
+        <p>URI for the file of interest, specified as a file, hdfs, or s3 path</p>
+        <p>The URI prefix option is strictly better than URI and should be used instead</p>
+      </>
+    ),
+  },
+  {
+    name: 'extractionNamespace.fileRegex',
+    type: 'string',
+    label: 'File regex',
+    defaultValue: '.*',
+    defined: (model: LookupSpec) =>
+      deepGet(model, 'extractionNamespace.type') === 'uri' &&
+      Boolean(deepGet(model, 'extractionNamespace.uriPrefix')),
+    info: 'Optional regex for matching the file name under uriPrefix.',
+  },
+
+  // namespaceParseSpec
+  {
+    name: 'extractionNamespace.namespaceParseSpec.format',
+    type: 'string',
+    label: 'Parse format',
+    suggestions: ['csv', 'tsv', 'simpleJson', 'customJson'],
+    defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'uri',
+    required: true,
+    info: (
+      <>
+        <p>The format of the data in the lookup files.</p>
+        <p>
+          The <Code>simpleJson</Code> lookupParseSpec does not take any parameters. It is simply a
+          line delimited JSON file where the field is the key, and the field's value is the value.
+        </p>
+      </>
+    ),
+  },
+
+  // CSV + TSV
+  {
+    name: 'extractionNamespace.namespaceParseSpec.skipHeaderRows',
+    type: 'number',
+    label: 'Skip header rows',
+    defaultValue: 0,
+    defined: (model: LookupSpec) =>
+      deepGet(model, 'extractionNamespace.type') === 'uri' &&
+      oneOf(deepGet(model, 'extractionNamespace.namespaceParseSpec.format'), 'csv', 'tsv'),
+    info: `Number of header rows to be skipped. The default number of header rows to be skipped is 0.`,
+  },
+  {
+    name: 'extractionNamespace.namespaceParseSpec.hasHeaderRow',
+    type: 'boolean',
+    label: 'Has header row',
+    defaultValue: false,
+    defined: (model: LookupSpec) =>
+      deepGet(model, 'extractionNamespace.type') === 'uri' &&
+      oneOf(deepGet(model, 'extractionNamespace.namespaceParseSpec.format'), 'csv', 'tsv'),
+    info: `A flag to indicate that column information can be extracted from the input files' header row`,
+  },
+  {
+    name: 'extractionNamespace.namespaceParseSpec.columns',
+    type: 'string-array',
+    label: 'Columns',
+    placeholder: `["key", "value"]`,
+    defined: (model: LookupSpec) =>
+      deepGet(model, 'extractionNamespace.type') === 'uri' &&
+      oneOf(deepGet(model, 'extractionNamespace.namespaceParseSpec.format'), 'csv', 'tsv'),
+    required: (model: LookupSpec) =>
+      !deepGet(model, 'extractionNamespace.namespaceParseSpec.hasHeaderRow'),
+    info: 'The list of columns in the csv file',
+  },
+  {
+    name: 'extractionNamespace.namespaceParseSpec.keyColumn',
+    type: 'string',
+    label: 'Key column',
+    placeholder: '(optional - defaults to the first column)',
+    defined: (model: LookupSpec) =>
+      deepGet(model, 'extractionNamespace.type') === 'uri' &&
+      oneOf(deepGet(model, 'extractionNamespace.namespaceParseSpec.format'), 'csv', 'tsv'),
+    info: 'The name of the column containing the key',
+  },
+  {
+    name: 'extractionNamespace.namespaceParseSpec.valueColumn',
+    type: 'string',
+    label: 'Value column',
+    placeholder: '(optional - defaults to the second column)',
+    defined: (model: LookupSpec) =>
+      deepGet(model, 'extractionNamespace.type') === 'uri' &&
+      oneOf(deepGet(model, 'extractionNamespace.namespaceParseSpec.format'), 'csv', 'tsv'),
+    info: 'The name of the column containing the value',
+  },
+
+  // TSV only
+  {
+    name: 'extractionNamespace.namespaceParseSpec.delimiter',
+    type: 'string',
+    label: 'Delimiter',
+    placeholder: `(optional)`,
+    defined: (model: LookupSpec) =>
+      deepGet(model, 'extractionNamespace.type') === 'uri' &&
+      deepGet(model, 'extractionNamespace.namespaceParseSpec.format') === 'tsv',
+  },
+  {
+    name: 'extractionNamespace.namespaceParseSpec.listDelimiter',
+    type: 'string',
+    label: 'List delimiter',
+    placeholder: `(optional)`,
+    defined: (model: LookupSpec) =>
+      deepGet(model, 'extractionNamespace.type') === 'uri' &&
+      deepGet(model, 'extractionNamespace.namespaceParseSpec.format') === 'tsv',
+  },
+
+  // Custom JSON
+  {
+    name: 'extractionNamespace.namespaceParseSpec.keyFieldName',
+    type: 'string',
+    label: 'Key field name',
+    placeholder: `key`,
+    defined: (model: LookupSpec) =>
+      deepGet(model, 'extractionNamespace.type') === 'uri' &&
+      deepGet(model, 'extractionNamespace.namespaceParseSpec.format') === 'customJson',
+    required: true,
+  },
+  {
+    name: 'extractionNamespace.namespaceParseSpec.valueFieldName',
+    type: 'string',
+    label: 'Value field name',
+    placeholder: `value`,
+    defined: (model: LookupSpec) =>
+      deepGet(model, 'extractionNamespace.type') === 'uri' &&
+      deepGet(model, 'extractionNamespace.namespaceParseSpec.format') === 'customJson',
+    required: true,
+  },
+  {
+    name: 'extractionNamespace.pollPeriod',
+    type: 'string',
+    label: 'Poll period',
+    defaultValue: '0',
+    defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'uri',
+    info: `Period between polling for updates`,
+  },
+
+  // JDBC stuff
+  {
+    name: 'extractionNamespace.namespace',
+    type: 'string',
+    label: 'Namespace',
+    placeholder: 'some_lookup',
+    defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'jdbc',
+    required: true,
+    info: (
+      <>
+        <p>The namespace value in the SQL query:</p>
+        <p>
+          SELECT keyColumn, valueColumn, tsColumn? FROM <strong>namespace</strong>.table WHERE
+          filter
+        </p>
+      </>
+    ),
+  },
+  {
+    name: 'extractionNamespace.connectorConfig.connectURI',
+    type: 'string',
+    label: 'Connect URI',
+    defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'jdbc',
+    required: true,
+    info: 'Defines the connectURI value on the The connector config to used',
+  },
+  {
+    name: 'extractionNamespace.connectorConfig.user',
+    type: 'string',
+    label: 'User',
+    defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'jdbc',
+    info: 'Defines the user to be used by the connector config',
+  },
+  {
+    name: 'extractionNamespace.connectorConfig.password',
+    type: 'string',
+    label: 'Password',
+    defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'jdbc',
+    info: 'Defines the password to be used by the connector config',
+  },
+  {
+    name: 'extractionNamespace.connectorConfig.createTables',
+    type: 'boolean',
+    label: 'Create tables',
+    defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'jdbc',
+    info: 'Should tables be created',
+  },
+  {
+    name: 'extractionNamespace.table',
+    type: 'string',
+    label: 'Table',
+    placeholder: 'some_lookup_table',
+    defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'jdbc',
+    required: true,
+    info: (
+      <>
+        <p>
+          The table which contains the key value pairs. This will become the table value in the SQL
+          query:
+        </p>
+        <p>
+          SELECT keyColumn, valueColumn, tsColumn? FROM namespace.<strong>table</strong> WHERE
+          filter
+        </p>
+      </>
+    ),
+  },
+  {
+    name: 'extractionNamespace.keyColumn',
+    type: 'string',
+    label: 'Key column',
+    placeholder: 'my_key_value',
+    defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'jdbc',
+    required: true,
+    info: (
+      <>
+        <p>
+          The column in the table which contains the keys. This will become the keyColumn value in
+          the SQL query:
+        </p>
+        <p>
+          SELECT <strong>keyColumn</strong>, valueColumn, tsColumn? FROM namespace.table WHERE
+          filter
+        </p>
+      </>
+    ),
+  },
+  {
+    name: 'extractionNamespace.valueColumn',
+    type: 'string',
+    label: 'Value column',
+    placeholder: 'my_column_value',
+    defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'jdbc',
+    required: true,
+    info: (
+      <>
+        <p>
+          The column in table which contains the values. This will become the valueColumn value in
+          the SQL query:
+        </p>
+        <p>
+          SELECT keyColumn, <strong>valueColumn</strong>, tsColumn? FROM namespace.table WHERE
+          filter
+        </p>
+      </>
+    ),
+  },
+  {
+    name: 'extractionNamespace.filter',
+    type: 'string',
+    label: 'Filter',
+    placeholder: '(optional)',
+    defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'jdbc',
+    info: (
+      <>
+        <p>
+          The filter to be used when selecting lookups, this is used to create a where clause on
+          lookup population. This will become the expression filter in the SQL query:
+        </p>
+        <p>
+          SELECT keyColumn, valueColumn, tsColumn? FROM namespace.table WHERE{' '}
+          <strong>filter</strong>
+        </p>
+      </>
+    ),
+  },
+  {
+    name: 'extractionNamespace.tsColumn',
+    type: 'string',
+    label: 'Timestamp column',
+    placeholder: '(optional)',
+    defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'jdbc',
+    info: (
+      <>
+        <p>
+          The column in table which contains when the key was updated. This will become the Value in
+          the SQL query:
+        </p>
+        <p>
+          SELECT keyColumn, valueColumn, <strong>tsColumn</strong>? FROM namespace.table WHERE
+          filter
+        </p>
+      </>
+    ),
+  },
+
+  // Extra cachedNamespace things
+  {
+    name: 'firstCacheTimeout',
+    type: 'number',
+    label: 'First cache timeout',
+    defaultValue: 0,
+    defined: (model: LookupSpec) => model.type === 'cachedNamespace',
+    info: `How long to wait (in ms) for the first run of the cache to populate. 0 indicates to not wait`,
+  },
+  {
+    name: 'injective',
+    type: 'boolean',
+    defaultValue: false,
+    defined: (model: LookupSpec) => model.type === 'cachedNamespace',
+    info: `If the underlying map is injective (keys and values are unique) then optimizations can occur internally by setting this to true`,
+  },
+];
+
+export function isLookupInvalid(
+  lookupName: string | undefined,
+  lookupVersion: string | undefined,
+  lookupTier: string | undefined,
+  lookupSpec: LookupSpec | undefined,
+) {
+  return (
+    !lookupName ||
+    !lookupVersion ||
+    !lookupTier ||
+    Boolean(AutoForm.issueWithModel(lookupSpec, LOOKUP_FIELDS))
+  );
+}
diff --git a/web-console/src/dialogs/compaction-dialog/compaction-dialog.scss b/web-console/src/druid-models/metric-spec.spec.ts
similarity index 71%
copy from web-console/src/dialogs/compaction-dialog/compaction-dialog.scss
copy to web-console/src/druid-models/metric-spec.spec.ts
index f5cd57f..25b3f15 100644
--- a/web-console/src/dialogs/compaction-dialog/compaction-dialog.scss
+++ b/web-console/src/druid-models/metric-spec.spec.ts
@@ -16,23 +16,17 @@
  * limitations under the License.
  */
 
-.compaction-dialog {
-  &.bp3-dialog {
-    height: 80vh;
-  }
+import { getMetricSpecs } from './metric-spec';
 
-  .form-json-selector {
-    margin: 15px;
-  }
-
-  .content {
-    margin: 0 15px 10px 0;
-    padding: 0 5px 0 15px;
-    flex: 1;
-    overflow: auto;
-  }
-
-  .ace-solarized-dark {
-    background-color: #232c35;
-  }
-}
+describe('metric-spec', () => {
+  it('getMetricSecs', () => {
+    expect(getMetricSpecs({ header: ['header'], rows: [] }, {})).toMatchInlineSnapshot(`
+      Array [
+        Object {
+          "name": "count",
+          "type": "count",
+        },
+      ]
+    `);
+  });
+});
diff --git a/web-console/src/druid-models/metric-spec.tsx b/web-console/src/druid-models/metric-spec.tsx
new file mode 100644
index 0000000..fd1282f
--- /dev/null
+++ b/web-console/src/druid-models/metric-spec.tsx
@@ -0,0 +1,347 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import { Code } from '@blueprintjs/core';
+import React from 'react';
+
+import { ExternalLink, Field } from '../components';
+import { getLink } from '../links';
+import { filterMap, oneOf } from '../utils';
+import { HeaderAndRows } from '../utils/sampler';
+
+import { getColumnTypeFromHeaderAndRows } from './ingestion-spec';
+
+export interface MetricSpec {
+  type: string;
+  name?: string;
+  fieldName?: string;
+  maxStringBytes?: number;
+  filterNullValues?: boolean;
+  fieldNames?: string[];
+  fnAggregate?: string;
+  fnCombine?: string;
+  fnReset?: string;
+  fields?: string[];
+  byRow?: boolean;
+  round?: boolean;
+  isInputHyperUnique?: boolean;
+  filter?: any;
+  aggregator?: MetricSpec;
+}
+
+export const METRIC_SPEC_FIELDS: Field<MetricSpec>[] = [
+  {
+    name: 'name',
+    type: 'string',
+    info: <>The metric name as it will appear in Druid.</>,
+  },
+  {
+    name: 'type',
+    type: 'string',
+    suggestions: [
+      'count',
+      {
+        group: 'sum',
+        suggestions: ['longSum', 'doubleSum', 'floatSum'],
+      },
+      {
+        group: 'min',
+        suggestions: ['longMin', 'doubleMin', 'floatMin'],
+      },
+      {
+        group: 'max',
+        suggestions: ['longMax', 'doubleMax', 'floatMax'],
+      },
+      {
+        group: 'first',
+        suggestions: ['longFirst', 'doubleFirst', 'floatFirst'],
+      },
+      {
+        group: 'last',
+        suggestions: ['longLast', 'doubleLast', 'floatLast'],
+      },
+      'thetaSketch',
+      {
+        group: 'HLLSketch',
+        suggestions: ['HLLSketchBuild', 'HLLSketchMerge'],
+      },
+      'quantilesDoublesSketch',
+      'momentSketch',
+      'fixedBucketsHistogram',
+      'hyperUnique',
+      'filtered',
+    ],
+    info: <>The aggregation function to apply.</>,
+  },
+  {
+    name: 'fieldName',
+    type: 'string',
+    defined: m => m.type !== 'filtered',
+    info: <>The column name for the aggregator to operate on.</>,
+  },
+  {
+    name: 'maxStringBytes',
+    type: 'number',
+    defaultValue: 1024,
+    defined: m => {
+      return oneOf(m.type, 'stringFirst', 'stringLast');
+    },
+  },
+  {
+    name: 'filterNullValues',
+    type: 'boolean',
+    defaultValue: false,
+    defined: m => {
+      return oneOf(m.type, 'stringFirst', 'stringLast');
+    },
+  },
+  // filtered
+  {
+    name: 'filter',
+    type: 'json',
+    defined: m => m.type === 'filtered',
+  },
+  {
+    name: 'aggregator',
+    type: 'json',
+    defined: m => m.type === 'filtered',
+  },
+  // thetaSketch
+  {
+    name: 'size',
+    type: 'number',
+    defined: m => m.type === 'thetaSketch',
+    defaultValue: 16384,
+    info: (
+      <>
+        <p>
+          Must be a power of 2. Internally, size refers to the maximum number of entries sketch
+          object will retain. Higher size means higher accuracy but more space to store sketches.
+          Note that after you index with a particular size, druid will persist sketch in segments
+          and you will use size greater or equal to that at query time.
+        </p>
+        <p>
+          See the{' '}
+          <ExternalLink href="https://datasketches.apache.org/docs/Theta/ThetaSize.html">
+            DataSketches site
+          </ExternalLink>{' '}
+          for details.
+        </p>
+        <p>In general, We recommend just sticking to default size.</p>
+      </>
+    ),
+  },
+  {
+    name: 'isInputThetaSketch',
+    type: 'boolean',
+    defined: m => m.type === 'thetaSketch',
+    defaultValue: false,
+    info: (
+      <>
+        This should only be used at indexing time if your input data contains theta sketch objects.
+        This would be the case if you use datasketches library outside of Druid, say with Pig/Hive,
+        to produce the data that you are ingesting into Druid
+      </>
+    ),
+  },
+  // HLLSketchBuild & HLLSketchMerge
+  {
+    name: 'lgK',
+    type: 'number',
+    defined: m => oneOf(m.type, 'HLLSketchBuild', 'HLLSketchMerge'),
+    defaultValue: 12,
+    info: (
+      <>
+        <p>
+          log2 of K that is the number of buckets in the sketch, parameter that controls the size
+          and the accuracy.
+        </p>
+        <p>Must be between 4 to 21 inclusively.</p>
+      </>
+    ),
+  },
+  {
+    name: 'tgtHllType',
+    type: 'string',
+    defined: m => oneOf(m.type, 'HLLSketchBuild', 'HLLSketchMerge'),
+    defaultValue: 'HLL_4',
+    suggestions: ['HLL_4', 'HLL_6', 'HLL_8'],
+    info: (
+      <>
+        The type of the target HLL sketch. Must be <Code>HLL_4</Code>, <Code>HLL_6</Code>, or{' '}
+        <Code>HLL_8</Code>.
+      </>
+    ),
+  },
+  // quantilesDoublesSketch
+  {
+    name: 'k',
+    type: 'number',
+    defined: m => m.type === 'quantilesDoublesSketch',
+    defaultValue: 128,
+    info: (
+      <>
+        <p>
+          Parameter that determines the accuracy and size of the sketch. Higher k means higher
+          accuracy but more space to store sketches.
+        </p>
+        <p>
+          Must be a power of 2 from 2 to 32768. See the{' '}
+          <ExternalLink href="https://datasketches.apache.org/docs/Quantiles/QuantilesAccuracy.html">
+            Quantiles Accuracy
+          </ExternalLink>{' '}
+          for details.
+        </p>
+      </>
+    ),
+  },
+  // momentSketch
+  {
+    name: 'k',
+    type: 'number',
+    defined: m => m.type === 'momentSketch',
+    required: true,
+    info: (
+      <>
+        Parameter that determines the accuracy and size of the sketch. Higher k means higher
+        accuracy but more space to store sketches. Usable range is generally [3,15]
+      </>
+    ),
+  },
+  {
+    name: 'compress',
+    type: 'boolean',
+    defined: m => m.type === 'momentSketch',
+    defaultValue: true,
+    info: (
+      <>
+        Flag for whether the aggregator compresses numeric values using arcsinh. Can improve
+        robustness to skewed and long-tailed distributions, but reduces accuracy slightly on more
+        uniform distributions.
+      </>
+    ),
+  },
+  // fixedBucketsHistogram
+  {
+    name: 'lowerLimit',
+    type: 'number',
+    defined: m => m.type === 'fixedBucketsHistogram',
+    required: true,
+    info: <>Lower limit of the histogram.</>,
+  },
+  {
+    name: 'upperLimit',
+    type: 'number',
+    defined: m => m.type === 'fixedBucketsHistogram',
+    required: true,
+    info: <>Upper limit of the histogram.</>,
+  },
+  {
+    name: 'numBuckets',
+    type: 'number',
+    defined: m => m.type === 'fixedBucketsHistogram',
+    defaultValue: 10,
+    required: true,
+    info: (
+      <>
+        Number of buckets for the histogram. The range <Code>[lowerLimit, upperLimit]</Code> will be
+        divided into <Code>numBuckets</Code> intervals of equal size.
+      </>
+    ),
+  },
+  {
+    name: 'outlierHandlingMode',
+    type: 'string',
+    defined: m => m.type === 'fixedBucketsHistogram',
+    required: true,
+    suggestions: ['ignore', 'overflow', 'clip'],
+    info: (
+      <>
+        <p>
+          Specifies how values outside of <Code>[lowerLimit, upperLimit]</Code> will be handled.
+        </p>
+        <p>
+          Supported modes are <Code>ignore</Code>, <Code>overflow</Code>, and <Code>clip</Code>. See
+          <ExternalLink
+            href={`${getLink(
+              'DOCS',
+            )}/development/extensions-core/approximate-histograms.html#outlier-handling-modes`}
+          >
+            outlier handling modes
+          </ExternalLink>{' '}
+          for more details.
+        </p>
+      </>
+    ),
+  },
+  // hyperUnique
+  {
+    name: 'isInputHyperUnique',
+    type: 'boolean',
+    defined: m => m.type === 'hyperUnique',
+    defaultValue: false,
+    info: (
+      <>
+        This can be set to true to index precomputed HLL (Base64 encoded output from druid-hll is
+        expected).
+      </>
+    ),
+  },
+];
+
+export function getMetricSpecName(metricSpec: MetricSpec): string {
+  return (
+    metricSpec.name || (metricSpec.aggregator ? getMetricSpecName(metricSpec.aggregator) : '?')
+  );
+}
+
+export function getMetricSpecSingleFieldName(metricSpec: MetricSpec): string | undefined {
+  return (
+    metricSpec.fieldName ||
+    (metricSpec.aggregator ? getMetricSpecSingleFieldName(metricSpec.aggregator) : undefined)
+  );
+}
+
+export function getMetricSpecOutputType(metricSpec: MetricSpec): string | undefined {
+  if (metricSpec.aggregator) return getMetricSpecOutputType(metricSpec.aggregator);
+  const m = String(metricSpec.type).match(/^(long|float|double)/);
+  if (!m) return;
+  return m[1];
+}
+
+export function getMetricSpecs(
+  headerAndRows: HeaderAndRows,
+  typeHints: Record<string, string>,
+): MetricSpec[] {
+  return [{ name: 'count', type: 'count' }].concat(
+    filterMap(headerAndRows.header, h => {
+      if (h === '__time') return;
+      const type = typeHints[h] || getColumnTypeFromHeaderAndRows(headerAndRows, h);
+      switch (type) {
+        case 'double':
+          return { name: `sum_${h}`, type: 'doubleSum', fieldName: h };
+        case 'float':
+          return { name: `sum_${h}`, type: 'floatSum', fieldName: h };
+        case 'long':
+          return { name: `sum_${h}`, type: 'longSum', fieldName: h };
+        default:
+          return;
+      }
+    }),
+  );
+}
diff --git a/web-console/src/utils/druid-time.spec.ts b/web-console/src/druid-models/time.spec.ts
similarity index 95%
rename from web-console/src/utils/druid-time.spec.ts
rename to web-console/src/druid-models/time.spec.ts
index 6ebb4bc..5670640 100644
--- a/web-console/src/utils/druid-time.spec.ts
+++ b/web-console/src/druid-models/time.spec.ts
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-import { timeFormatMatches } from './druid-time';
+import { timeFormatMatches } from './time';
 
 describe('timeFormatMatches', () => {
   it('works for auto', () => {
diff --git a/web-console/src/utils/druid-time.ts b/web-console/src/druid-models/time.ts
similarity index 98%
rename from web-console/src/utils/druid-time.ts
rename to web-console/src/druid-models/time.ts
index 3dc6fe9..c20d2cb 100644
--- a/web-console/src/utils/druid-time.ts
+++ b/web-console/src/druid-models/time.ts
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-import { jodaFormatToRegExp } from './joda-to-regexp';
+import { jodaFormatToRegExp } from '../utils/joda-to-regexp';
 
 export const NUMERIC_TIME_FORMATS: string[] = ['posix', 'millis', 'micro', 'nano'];
 export const BASIC_TIME_FORMATS: string[] = ['auto', 'iso'].concat(NUMERIC_TIME_FORMATS);
diff --git a/web-console/src/druid-models/timestamp-spec.tsx b/web-console/src/druid-models/timestamp-spec.tsx
new file mode 100644
index 0000000..8e17d20
--- /dev/null
+++ b/web-console/src/druid-models/timestamp-spec.tsx
@@ -0,0 +1,157 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import React from 'react';
+
+import { ExternalLink, Field } from '../components';
+import { deepGet, EMPTY_ARRAY, EMPTY_OBJECT } from '../utils';
+
+import { IngestionSpec } from './ingestion-spec';
+import {
+  BASIC_TIME_FORMATS,
+  DATE_ONLY_TIME_FORMATS,
+  DATETIME_TIME_FORMATS,
+  OTHER_TIME_FORMATS,
+} from './time';
+import { Transform } from './transform-spec';
+
+const NO_SUCH_COLUMN = '!!!_no_such_column_!!!';
+
+export const PLACEHOLDER_TIMESTAMP_SPEC: TimestampSpec = {
+  column: NO_SUCH_COLUMN,
+  missingValue: '1970-01-01T00:00:00Z',
+};
+
+export const CONSTANT_TIMESTAMP_SPEC: TimestampSpec = {
+  column: NO_SUCH_COLUMN,
+  missingValue: '2010-01-01T00:00:00Z',
+};
+
+export type TimestampSchema = 'none' | 'column' | 'expression';
+
+export function getTimestampSchema(spec: IngestionSpec): TimestampSchema {
+  const transforms: Transform[] =
+    deepGet(spec, 'spec.dataSchema.transformSpec.transforms') || EMPTY_ARRAY;
+
+  const timeTransform = transforms.find(transform => transform.name === '__time');
+  if (timeTransform) return 'expression';
+
+  const timestampSpec = deepGet(spec, 'spec.dataSchema.timestampSpec') || EMPTY_OBJECT;
+  return timestampSpec.column === NO_SUCH_COLUMN ? 'none' : 'column';
+}
+
+export interface TimestampSpec {
+  column?: string;
+  format?: string;
+  missingValue?: string;
+}
+
+export function getTimestampSpecColumnFromSpec(spec: IngestionSpec): string {
+  // For the default https://github.com/apache/druid/blob/master/core/src/main/java/org/apache/druid/data/input/impl/TimestampSpec.java#L44
+  return deepGet(spec, 'spec.dataSchema.timestampSpec.column') || 'timestamp';
+}
+
+export function getTimestampSpecConstantFromSpec(spec: IngestionSpec): string | undefined {
+  return deepGet(spec, 'spec.dataSchema.timestampSpec.missingValue');
+}
+
+export function getTimestampSpecExpressionFromSpec(spec: IngestionSpec): string | undefined {
+  const transforms: Transform[] =
+    deepGet(spec, 'spec.dataSchema.transformSpec.transforms') || EMPTY_ARRAY;
+
+  const timeTransform = transforms.find(transform => transform.name === '__time');
+  if (!timeTransform) return;
+  return timeTransform.expression;
+}
+
+export function getTimestampDetailFromSpec(spec: IngestionSpec): string {
+  const timestampSchema = getTimestampSchema(spec);
+  switch (timestampSchema) {
+    case 'none':
+      return `Constant: ${getTimestampSpecConstantFromSpec(spec)}`;
+
+    case 'column':
+      return `Column: ${getTimestampSpecColumnFromSpec(spec)}`;
+
+    case 'expression':
+      return `Expression: ${getTimestampSpecExpressionFromSpec(spec)}`;
+  }
+
+  return '-';
+}
+
+export const TIMESTAMP_SPEC_FIELDS: Field<TimestampSpec>[] = [
+  {
+    name: 'column',
+    type: 'string',
+    defaultValue: 'timestamp',
+    required: true,
+  },
+  {
+    name: 'format',
+    type: 'string',
+    defaultValue: 'auto',
+    suggestions: [
+      ...BASIC_TIME_FORMATS,
+      {
+        group: 'Date and time formats',
+        suggestions: DATETIME_TIME_FORMATS,
+      },
+      {
+        group: 'Date only formats',
+        suggestions: DATE_ONLY_TIME_FORMATS,
+      },
+      {
+        group: 'Other time formats',
+        suggestions: OTHER_TIME_FORMATS,
+      },
+    ],
+    info: (
+      <p>
+        Please specify your timestamp format by using the suggestions menu or typing in a{' '}
+        <ExternalLink href="https://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html">
+          format string
+        </ExternalLink>
+        .
+      </p>
+    ),
+  },
+  {
+    name: 'missingValue',
+    type: 'string',
+    placeholder: '(optional)',
+    info: <p>This value will be used if the specified column can not be found.</p>,
+  },
+];
+
+export const CONSTANT_TIMESTAMP_SPEC_FIELDS: Field<TimestampSpec>[] = [
+  {
+    name: 'missingValue',
+    label: 'Placeholder value',
+    type: 'string',
+    info: <p>The placeholder value that will be used as the timestamp.</p>,
+  },
+];
+
+export function issueWithTimestampSpec(
+  timestampSpec: TimestampSpec | undefined,
+): string | undefined {
+  if (!timestampSpec) return 'no spec';
+  if (!timestampSpec.column && !timestampSpec.missingValue) return 'timestamp spec is blank';
+  return;
+}
diff --git a/web-console/src/druid-models/transform-spec.tsx b/web-console/src/druid-models/transform-spec.tsx
new file mode 100644
index 0000000..81e9b33
--- /dev/null
+++ b/web-console/src/druid-models/transform-spec.tsx
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import { Code } from '@blueprintjs/core';
+import React from 'react';
+
+import { ExternalLink, Field } from '../components';
+import { getLink } from '../links';
+
+export interface TransformSpec {
+  transforms?: Transform[];
+  filter?: Record<string, any>;
+}
+
+export interface Transform {
+  type: string;
+  name: string;
+  expression: string;
+}
+
+export const TRANSFORM_FIELDS: Field<Transform>[] = [
+  {
+    name: 'name',
+    type: 'string',
+    placeholder: 'output_name',
+    required: true,
+  },
+  {
+    name: 'type',
+    type: 'string',
+    suggestions: ['expression'],
+    required: true,
+  },
+  {
+    name: 'expression',
+    type: 'string',
+    placeholder: '"foo" + "bar"',
+    required: true,
+    info: (
+      <>
+        A valid Druid{' '}
+        <ExternalLink href={`${getLink('DOCS')}/misc/math-expr.html`}>expression</ExternalLink>.
+      </>
+    ),
+  },
+];
+
+export function getTimestampExpressionFields(transforms: Transform[]): Field<Transform[]>[] {
+  const timeTransformIndex = transforms.findIndex(transform => transform.name === '__time');
+  if (timeTransformIndex < 0) return [];
+
+  return [
+    {
+      name: `${timeTransformIndex}.expression`,
+      label: 'Expression',
+      type: 'string',
+      placeholder: `timestamp_parse(concat("date", ' ', "time"))`,
+      required: true,
+      suggestions: [
+        `timestamp_parse(concat("date", ' ', "time"))`,
+        `timestamp_parse(concat("date", ' ', "time"), 'M/d/yyyy H:mm:ss')`,
+        `timestamp_parse(concat("year", '-', "month", '-', "day"))`,
+      ],
+      info: (
+        <>
+          A valid Druid{' '}
+          <ExternalLink href={`${getLink('DOCS')}/misc/math-expr.html`}>expression</ExternalLink>{' '}
+          that should output a millis timestamp. You most likely want to use the{' '}
+          <Code>timestamp_parse</Code> function at the outer level.
+        </>
+      ),
+    },
+  ];
+}
+
+export function addTimestampTransform(transforms: Transform[]): Transform[] {
+  return [
+    {
+      name: '__time',
+      type: 'expression',
+      expression: '',
+    },
+  ].concat(transforms);
+}
+
+export function removeTimestampTransform(transforms: Transform[]): Transform[] | undefined {
+  const newTransforms = transforms.filter(transform => transform.name !== '__time');
+  return newTransforms.length ? newTransforms : undefined;
+}
diff --git a/web-console/src/entry.scss b/web-console/src/entry.scss
index 38a0d19..426480e 100644
--- a/web-console/src/entry.scss
+++ b/web-console/src/entry.scss
@@ -16,13 +16,13 @@
  * limitations under the License.
  */
 
-@import '../node_modules/normalize.css/normalize';
+@import '~normalize.css/normalize';
 @import '~fontsource-open-sans/index.css';
 @import './blueprint-overrides';
 @import '~@blueprintjs/core/src/blueprint';
 @import '~@blueprintjs/datetime/src/blueprint-datetime';
+@import '~react-splitter-layout/lib/index';
 @import '../lib/react-table';
-@import '../node_modules/react-splitter-layout/lib/index.css';
 
 html,
 body {
@@ -45,6 +45,10 @@ body {
       outline: none !important;
     }
   }
+
+  .ace-solarized-dark {
+    background-color: rgba($dark-gray1, 0.5);
+  }
 }
 
 .app-container {
diff --git a/web-console/src/links.ts b/web-console/src/links.ts
index b4b3b23..488b238 100644
--- a/web-console/src/links.ts
+++ b/web-console/src/links.ts
@@ -19,7 +19,7 @@
 import hasOwnProp from 'has-own-prop';
 
 // This is set to the latest available version and should be updated to the next version before release
-const DRUID_DOCS_VERSION = '0.19.0';
+const DRUID_DOCS_VERSION = '0.20.0';
 
 function fillVersion(str: string): string {
   return str.replace(/\{\{VERSION}}/g, DRUID_DOCS_VERSION);
diff --git a/web-console/src/utils/druid-query.spec.ts b/web-console/src/utils/druid-query.spec.ts
index 140f146..55fd336 100644
--- a/web-console/src/utils/druid-query.spec.ts
+++ b/web-console/src/utils/druid-query.spec.ts
@@ -18,7 +18,7 @@
 
 import { sane } from 'druid-query-toolkit/build/test-utils';
 
-import { DruidError } from './druid-query';
+import { DruidError, getDruidErrorMessage, parseHtmlError, parseQueryPlan } from './druid-query';
 
 describe('DruidQuery', () => {
   describe('DruidError.parsePosition', () => {
@@ -128,4 +128,18 @@ describe('DruidQuery', () => {
       expect(suggestion).toBeUndefined();
     });
   });
+
+  describe('misc', () => {
+    it('parseHtmlError', () => {
+      expect(parseHtmlError('<div></div>')).toMatchInlineSnapshot(`undefined`);
+    });
+
+    it('parseHtmlError', () => {
+      expect(getDruidErrorMessage({})).toMatchInlineSnapshot(`undefined`);
+    });
+
+    it('parseQueryPlan', () => {
+      expect(parseQueryPlan('start')).toMatchInlineSnapshot(`"start"`);
+    });
+  });
 });
diff --git a/web-console/src/utils/druid-type.ts b/web-console/src/utils/druid-type.ts
deleted file mode 100644
index 821dddc..0000000
--- a/web-console/src/utils/druid-type.ts
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import { filterMap } from './general';
-import { DimensionMode, DimensionSpec, IngestionSpec, MetricSpec } from './ingestion-spec';
-import { deepDelete, deepSet } from './object-change';
-import { HeaderAndRows } from './sampler';
-
-export function guessTypeFromSample(sample: any[]): string {
-  const definedValues = sample.filter(v => v != null);
-  if (
-    definedValues.length &&
-    definedValues.every(v => !isNaN(v) && (typeof v === 'number' || typeof v === 'string'))
-  ) {
-    if (definedValues.every(v => v % 1 === 0)) {
-      return 'long';
-    } else {
-      return 'double';
-    }
-  } else {
-    return 'string';
-  }
-}
-
-export function getColumnTypeFromHeaderAndRows(
-  headerAndRows: HeaderAndRows,
-  column: string,
-): string {
-  return guessTypeFromSample(
-    filterMap(headerAndRows.rows, (r: any) => (r.parsed ? r.parsed[column] : undefined)),
-  );
-}
-
-export function getDimensionSpecs(
-  headerAndRows: HeaderAndRows,
-  hasRollup: boolean,
-): (string | DimensionSpec)[] {
-  return filterMap(headerAndRows.header, h => {
-    if (h === '__time') return;
-    const guessedType = getColumnTypeFromHeaderAndRows(headerAndRows, h);
-    if (guessedType === 'string') return h;
-    if (hasRollup) return;
-    return {
-      type: guessedType,
-      name: h,
-    };
-  });
-}
-
-export function getMetricSpecs(headerAndRows: HeaderAndRows): MetricSpec[] {
-  return [{ name: 'count', type: 'count' }].concat(
-    filterMap(headerAndRows.header, h => {
-      if (h === '__time') return;
-      const guessedType = getColumnTypeFromHeaderAndRows(headerAndRows, h);
-      switch (guessedType) {
-        case 'double':
-          return { name: `sum_${h}`, type: 'doubleSum', fieldName: h };
-        case 'long':
-          return { name: `sum_${h}`, type: 'longSum', fieldName: h };
-        default:
-          return;
-      }
-    }),
-  );
-}
-
-export function updateSchemaWithSample(
-  spec: IngestionSpec,
-  headerAndRows: HeaderAndRows,
-  dimensionMode: DimensionMode,
-  rollup: boolean,
-): IngestionSpec {
-  let newSpec = spec;
-
-  if (dimensionMode === 'auto-detect') {
-    newSpec = deepSet(newSpec, 'spec.dataSchema.dimensionsSpec.dimensions', []);
-  } else {
-    newSpec = deepDelete(newSpec, 'spec.dataSchema.dimensionsSpec.dimensionExclusions');
-
-    const dimensions = getDimensionSpecs(headerAndRows, rollup);
-    if (dimensions) {
-      newSpec = deepSet(newSpec, 'spec.dataSchema.dimensionsSpec.dimensions', dimensions);
-    }
-  }
-
-  if (rollup) {
-    newSpec = deepSet(newSpec, 'spec.dataSchema.granularitySpec.queryGranularity', 'HOUR');
-
-    const metrics = getMetricSpecs(headerAndRows);
-    if (metrics) {
-      newSpec = deepSet(newSpec, 'spec.dataSchema.metricsSpec', metrics);
-    }
-  } else {
-    newSpec = deepSet(newSpec, 'spec.dataSchema.granularitySpec.queryGranularity', 'NONE');
-    newSpec = deepDelete(newSpec, 'spec.dataSchema.metricsSpec');
-  }
-
-  newSpec = deepSet(newSpec, 'spec.dataSchema.granularitySpec.rollup', rollup);
-  return newSpec;
-}
diff --git a/web-console/src/utils/general.spec.ts b/web-console/src/utils/general.spec.ts
index a950103..e614b87 100644
--- a/web-console/src/utils/general.spec.ts
+++ b/web-console/src/utils/general.spec.ts
@@ -55,20 +55,26 @@ describe('general', () => {
   });
 
   describe('sqlQueryCustomTableFilter', () => {
-    it('works', () => {
+    it('works with contains', () => {
       expect(
-        sqlQueryCustomTableFilter({
-          id: 'datasource',
-          value: `hello`,
-        }),
-      ).toMatchInlineSnapshot(`"LOWER(\\"datasource\\") LIKE LOWER('%hello%')"`);
+        String(
+          sqlQueryCustomTableFilter({
+            id: 'datasource',
+            value: `Hello`,
+          }),
+        ),
+      ).toEqual(`LOWER("datasource") LIKE '%hello%'`);
+    });
 
+    it('works with exact', () => {
       expect(
-        sqlQueryCustomTableFilter({
-          id: 'datasource',
-          value: `"hello"`,
-        }),
-      ).toMatchInlineSnapshot(`"\\"datasource\\" = 'hello'"`);
+        String(
+          sqlQueryCustomTableFilter({
+            id: 'datasource',
+            value: `"hello"`,
+          }),
+        ),
+      ).toEqual(`"datasource" = 'hello'`);
     });
   });
 
diff --git a/web-console/src/utils/general.tsx b/web-console/src/utils/general.tsx
index 2fc5762..ca2a111 100644
--- a/web-console/src/utils/general.tsx
+++ b/web-console/src/utils/general.tsx
@@ -19,6 +19,7 @@
 import { Button, HTMLSelect, InputGroup, Intent } from '@blueprintjs/core';
 import { IconNames } from '@blueprintjs/icons';
 import copy from 'copy-to-clipboard';
+import { SqlExpression, SqlFunction, SqlLiteral, SqlRef } from 'druid-query-toolkit';
 import FileSaver from 'file-saver';
 import hasOwnProp from 'has-own-prop';
 import numeral from 'numeral';
@@ -27,6 +28,10 @@ import { Filter, FilterRender } from 'react-table';
 
 import { AppToaster } from '../singletons/toaster';
 
+// These constants are used to make sure that they are not constantly recreated thrashing the pure components
+export const EMPTY_OBJECT: any = {};
+export const EMPTY_ARRAY: any[] = [];
+
 export function wait(ms: number): Promise<void> {
   return new Promise(resolve => {
     setTimeout(resolve, ms);
@@ -117,14 +122,15 @@ export function booleanCustomTableFilter(filter: Filter, value: any): boolean {
   return haystack.includes(needle);
 }
 
-export function sqlQueryCustomTableFilter(filter: Filter): string {
-  const columnName = JSON.stringify(filter.id);
+export function sqlQueryCustomTableFilter(filter: Filter): SqlExpression {
   const needleAndMode: NeedleAndMode = getNeedleAndMode(filter.value);
   const needle = needleAndMode.needle;
   if (needleAndMode.mode === 'exact') {
-    return `${columnName} = '${needle}'`;
+    return SqlRef.columnWithQuotes(filter.id).equal(SqlLiteral.create(needle));
   } else {
-    return `LOWER(${columnName}) LIKE LOWER('%${needle}%')`;
+    return SqlFunction.simple('LOWER', [SqlRef.columnWithQuotes(filter.id)]).like(
+      SqlLiteral.create(`%${needle.toLowerCase()}%`),
+    );
   }
 }
 
@@ -135,6 +141,10 @@ export function caseInsensitiveContains(testString: string, searchString: string
   return testString.toLowerCase().includes(searchString.toLowerCase());
 }
 
+export function oneOf<T>(thing: T, ...options: T[]): boolean {
+  return options.includes(thing);
+}
+
 // ----------------------------
 
 export function countBy<T>(
diff --git a/web-console/src/utils/index.tsx b/web-console/src/utils/index.tsx
index 2bcf661..d278317 100644
--- a/web-console/src/utils/index.tsx
+++ b/web-console/src/utils/index.tsx
@@ -24,4 +24,5 @@ export * from './query-manager';
 export * from './query-cursor';
 export * from './local-storage-keys';
 export * from './column-metadata';
-export * from './compaction';
+export * from './object-change';
+export * from './capabilities';
diff --git a/web-console/src/utils/object-change.ts b/web-console/src/utils/object-change.ts
index 83b7cce..7ff7d5e 100644
--- a/web-console/src/utils/object-change.ts
+++ b/web-console/src/utils/object-change.ts
@@ -83,6 +83,17 @@ export function deepSet<T extends Record<string, any>>(value: T, path: string, x
   return valueCopy;
 }
 
+export function deepSetMulti<T extends Record<string, any>>(
+  value: T,
+  changes: Record<string, any>,
+): T {
+  let newValue = value;
+  for (const k in changes) {
+    newValue = deepSet(newValue, k, changes[k]);
+  }
+  return newValue;
+}
+
 export function deepDelete<T extends Record<string, any>>(value: T, path: string): T {
   const valueCopy = shallowCopy(value);
   const parts = parsePath(path);
diff --git a/web-console/src/utils/query-manager.tsx b/web-console/src/utils/query-manager.tsx
index 411d054..906c7b6 100644
--- a/web-console/src/utils/query-manager.tsx
+++ b/web-console/src/utils/query-manager.tsx
@@ -165,5 +165,8 @@ export class QueryManager<Q, R> {
 
   public terminate(): void {
     this.terminated = true;
+    if (this.currentRunCancelFn) {
+      this.currentRunCancelFn();
+    }
   }
 }
diff --git a/web-console/src/utils/sampler.ts b/web-console/src/utils/sampler.ts
index c56f572..48db5f6 100644
--- a/web-console/src/utils/sampler.ts
+++ b/web-console/src/utils/sampler.ts
@@ -18,24 +18,31 @@
 
 import axios from 'axios';
 
-import { getDruidErrorMessage, queryDruidRune } from './druid-query';
-import { alphanumericCompare, filterMap, sortWithPrefixSuffix } from './general';
 import {
   DimensionsSpec,
-  getDummyTimestampSpec,
   getSpecType,
+  getTimestampSchema,
   IngestionSpec,
   IngestionType,
   InputFormat,
   IoConfig,
-  isColumnTimestampSpec,
   isDruidSource,
   MetricSpec,
+  PLACEHOLDER_TIMESTAMP_SPEC,
   TimestampSpec,
   Transform,
   TransformSpec,
   upgradeSpec,
-} from './ingestion-spec';
+} from '../druid-models';
+
+import { getDruidErrorMessage, queryDruidRune } from './druid-query';
+import {
+  alphanumericCompare,
+  EMPTY_ARRAY,
+  filterMap,
+  oneOf,
+  sortWithPrefixSuffix,
+} from './general';
 import { deepGet, deepSet } from './object-change';
 
 const SAMPLER_URL = `/druid/indexer/v1/sampler`;
@@ -231,7 +238,8 @@ function cleanupQueryGranularity(queryGranularity: any): any {
   if (typeof queryGranularityType !== 'string') return queryGranularity;
   queryGranularityType = queryGranularityType.toUpperCase();
 
-  const knownGranularity = [
+  const knownGranularity = oneOf(
+    queryGranularityType,
     'NONE',
     'SECOND',
     'MINUTE',
@@ -240,7 +248,7 @@ function cleanupQueryGranularity(queryGranularity: any): any {
     'WEEK',
     'MONTH',
     'YEAR',
-  ].includes(queryGranularityType);
+  );
 
   return knownGranularity ? queryGranularityType : queryGranularity;
 }
@@ -272,7 +280,7 @@ export async function sampleForConnect(
       ioConfig,
       dataSchema: {
         dataSource: 'sample',
-        timestampSpec: getDummyTimestampSpec(),
+        timestampSpec: PLACEHOLDER_TIMESTAMP_SPEC,
         dimensionsSpec: {},
       },
     } as any,
@@ -326,7 +334,7 @@ export async function sampleForParser(
       ioConfig,
       dataSchema: {
         dataSource: 'sample',
-        timestampSpec: getDummyTimestampSpec(),
+        timestampSpec: PLACEHOLDER_TIMESTAMP_SPEC,
         dimensionsSpec: {},
       },
     },
@@ -342,7 +350,7 @@ export async function sampleForTimestamp(
 ): Promise<SampleResponse> {
   const samplerType = getSpecType(spec);
   const timestampSpec: TimestampSpec = deepGet(spec, 'spec.dataSchema.timestampSpec');
-  const columnTimestampSpec = isColumnTimestampSpec(timestampSpec);
+  const timestampSchema = getTimestampSchema(spec);
 
   // First do a query with a static timestamp spec
   const sampleSpecColumns: SampleSpec = {
@@ -352,7 +360,7 @@ export async function sampleForTimestamp(
       dataSchema: {
         dataSource: 'sample',
         dimensionsSpec: {},
-        timestampSpec: columnTimestampSpec ? getDummyTimestampSpec() : timestampSpec,
+        timestampSpec: timestampSchema === 'column' ? PLACEHOLDER_TIMESTAMP_SPEC : timestampSpec,
       },
     },
     samplerConfig: BASE_SAMPLER_CONFIG,
@@ -364,7 +372,10 @@ export async function sampleForTimestamp(
   );
 
   // If we are not parsing a column then there is nothing left to do
-  if (!columnTimestampSpec) return sampleColumns;
+  if (timestampSchema === 'none') return sampleColumns;
+
+  const transforms: Transform[] =
+    deepGet(spec, 'spec.dataSchema.transformSpec.transforms') || EMPTY_ARRAY;
 
   // If we are trying to parts a column then get a bit fancy:
   // Query the same sample again (same cache key)
@@ -376,6 +387,9 @@ export async function sampleForTimestamp(
         dataSource: 'sample',
         dimensionsSpec: {},
         timestampSpec,
+        transformSpec: {
+          transforms: transforms.filter(transform => transform.name === '__time'),
+        },
       },
     },
     samplerConfig: BASE_SAMPLER_CONFIG,
diff --git a/web-console/src/utils/utils.spec.ts b/web-console/src/utils/utils.spec.ts
index cc85278..b2ddb02 100644
--- a/web-console/src/utils/utils.spec.ts
+++ b/web-console/src/utils/utils.spec.ts
@@ -16,18 +16,11 @@
  * limitations under the License.
  */
 
-import { getDruidErrorMessage, parseHtmlError, parseQueryPlan } from './druid-query';
-import {
-  getColumnTypeFromHeaderAndRows,
-  getDimensionSpecs,
-  getMetricSpecs,
-  guessTypeFromSample,
-  updateSchemaWithSample,
-} from './druid-type';
-import { IngestionSpec } from './ingestion-spec';
+import { IngestionSpec } from '../druid-models';
+
 import { applyCache, headerFromSampleResponse } from './sampler';
 
-describe('test-utils', () => {
+describe('utils', () => {
   const ingestionSpec: IngestionSpec = {
     type: 'index_parallel',
     spec: {
@@ -123,161 +116,4 @@ describe('test-utils', () => {
       }
     `);
   });
-
-  // it('spec-utils sampleForParser', async () => {
-  //   expect(await sampleForParser(ingestionSpec, 'start', 'abc123')).toMatchInlineSnapshot(
-  //     `Promise {}`,
-  //   );
-  // });
-  //
-  // it('spec-utils SampleSpec', async () => {
-  //   expect(await sampleForConnect(ingestionSpec, 'start')).toMatchInlineSnapshot(`Promise {}`);
-  // });
-  //
-  // it('spec-utils sampleForTimestamp', async () => {
-  //   expect(await sampleForTimestamp(ingestionSpec, 'start', cacheRows)).toMatchInlineSnapshot();
-  // });
-  //
-  // it('spec-utils sampleForTransform', async () => {
-  //   expect(await sampleForTransform(ingestionSpec, 'start', cacheRows)).toMatchInlineSnapshot();
-  // });
-  //
-  // it('spec-utils sampleForFilter', async () => {
-  //   expect(await sampleForFilter(ingestionSpec, 'start', cacheRows)).toMatchInlineSnapshot();
-  // });
-  //
-  // it('spec-utils sampleForSchema', async () => {
-  //   expect(await sampleForSchema(ingestionSpec, 'start', cacheRows)).toMatchInlineSnapshot();
-  // });
-  //
-  // it('spec-utils sampleForExampleManifests', async () => {
-  //   expect(await sampleForExampleManifests('some url')).toMatchInlineSnapshot();
-  // });
-});
-
-describe('druid-type.ts', () => {
-  const ingestionSpec: IngestionSpec = {
-    type: 'index_parallel',
-    spec: {
-      ioConfig: {
-        type: 'index_parallel',
-        inputSource: {
-          type: 'http',
-          uris: ['https://static.imply.io/data/wikipedia.json.gz'],
-        },
-        inputFormat: {
-          type: 'json',
-        },
-      },
-      tuningConfig: {
-        type: 'index_parallel',
-      },
-      dataSchema: {
-        dataSource: 'wikipedia',
-        granularitySpec: {
-          type: 'uniform',
-          segmentGranularity: 'DAY',
-          queryGranularity: 'HOUR',
-        },
-        timestampSpec: {
-          column: 'timestamp',
-          format: 'iso',
-        },
-        dimensionsSpec: {},
-      },
-    },
-  };
-
-  it('spec-utils guessTypeFromSample', () => {
-    expect(guessTypeFromSample([])).toMatchInlineSnapshot(`"string"`);
-  });
-
-  it('spec-utils getColumnTypeFromHeaderAndRows', () => {
-    expect(
-      getColumnTypeFromHeaderAndRows({ header: ['header'], rows: [] }, 'header'),
-    ).toMatchInlineSnapshot(`"string"`);
-  });
-
-  it('spec-utils getDimensionSpecs', () => {
-    expect(getDimensionSpecs({ header: ['header'], rows: [] }, true)).toMatchInlineSnapshot(`
-      Array [
-        "header",
-      ]
-    `);
-  });
-
-  it('spec-utils getMetricSecs', () => {
-    expect(getMetricSpecs({ header: ['header'], rows: [] })).toMatchInlineSnapshot(`
-      Array [
-        Object {
-          "name": "count",
-          "type": "count",
-        },
-      ]
-    `);
-  });
-
-  it('spec-utils updateSchemaWithSample', () => {
-    expect(
-      updateSchemaWithSample(ingestionSpec, { header: ['header'], rows: [] }, 'specific', true),
-    ).toMatchInlineSnapshot(`
-      Object {
-        "spec": Object {
-          "dataSchema": Object {
-            "dataSource": "wikipedia",
-            "dimensionsSpec": Object {
-              "dimensions": Array [
-                "header",
-              ],
-            },
-            "granularitySpec": Object {
-              "queryGranularity": "HOUR",
-              "rollup": true,
-              "segmentGranularity": "DAY",
-              "type": "uniform",
-            },
-            "metricsSpec": Array [
-              Object {
-                "name": "count",
-                "type": "count",
-              },
-            ],
-            "timestampSpec": Object {
-              "column": "timestamp",
-              "format": "iso",
-            },
-          },
-          "ioConfig": Object {
-            "inputFormat": Object {
-              "type": "json",
-            },
-            "inputSource": Object {
-              "type": "http",
-              "uris": Array [
-                "https://static.imply.io/data/wikipedia.json.gz",
-              ],
-            },
-            "type": "index_parallel",
-          },
-          "tuningConfig": Object {
-            "type": "index_parallel",
-          },
-        },
-        "type": "index_parallel",
-      }
-    `);
-  });
-});
-describe('druid-query.ts', () => {
-  it('spec-utils parseHtmlError', () => {
-    expect(parseHtmlError('<div></div>')).toMatchInlineSnapshot(`undefined`);
-  });
-
-  it('spec-utils parseHtmlError', () => {
-    expect(getDruidErrorMessage({})).toMatchInlineSnapshot(`undefined`);
-  });
-
-  it('spec-utils parseQueryPlan', () => {
-    expect(parseQueryPlan('start')).toMatchInlineSnapshot(`"start"`);
-  });
 });
diff --git a/web-console/src/views/datasource-view/__snapshots__/datasource-view.spec.tsx.snap b/web-console/src/views/datasource-view/__snapshots__/datasource-view.spec.tsx.snap
index 64b412d..947e1d2 100755
--- a/web-console/src/views/datasource-view/__snapshots__/datasource-view.spec.tsx.snap
+++ b/web-console/src/views/datasource-view/__snapshots__/datasource-view.spec.tsx.snap
@@ -64,6 +64,7 @@ exports[`data source view matches snapshot 1`] = `
           "Segment load/drop queues",
           "Total data size",
           "Segment size",
+          "Segment granularity",
           "Total rows",
           "Avg. row size",
           "Replicated size",
@@ -196,6 +197,19 @@ exports[`data source view matches snapshot 1`] = `
         Object {
           "Cell": [Function],
           "Header": <React.Fragment>
+            Segment
+            <br />
+            granularity
+          </React.Fragment>,
+          "accessor": [Function],
+          "filterable": false,
+          "id": "segment_granularity",
+          "show": true,
+          "width": 100,
+        },
+        Object {
+          "Cell": [Function],
+          "Header": <React.Fragment>
             Total
             <br />
             rows
diff --git a/web-console/src/views/datasource-view/datasource-view.tsx b/web-console/src/views/datasource-view/datasource-view.tsx
index 9ad70d9..effd3d1 100644
--- a/web-console/src/views/datasource-view/datasource-view.tsx
+++ b/web-console/src/views/datasource-view/datasource-view.tsx
@@ -39,14 +39,20 @@ import {
 } from '../../components';
 import { AsyncActionDialog, CompactionDialog, RetentionDialog } from '../../dialogs';
 import { DatasourceTableActionDialog } from '../../dialogs/datasource-table-action-dialog/datasource-table-action-dialog';
-import { AppToaster } from '../../singletons/toaster';
 import {
-  addFilter,
   CompactionConfig,
   CompactionStatus,
+  formatCompactionConfigAndStatus,
+  zeroCompactionStatus,
+} from '../../druid-models';
+import { AppToaster } from '../../singletons/toaster';
+import {
+  addFilter,
+  Capabilities,
+  CapabilitiesMode,
   countBy,
+  deepGet,
   formatBytes,
-  formatCompactionConfigAndStatus,
   formatInteger,
   formatMillions,
   formatPercent,
@@ -57,13 +63,10 @@ import {
   queryDruidSql,
   QueryManager,
   QueryState,
-  zeroCompactionStatus,
 } from '../../utils';
 import { BasicAction } from '../../utils/basic-action';
-import { Capabilities, CapabilitiesMode } from '../../utils/capabilities';
 import { Rule, RuleUtil } from '../../utils/load-rule';
 import { LocalStorageBackedArray } from '../../utils/local-storage-backed-array';
-import { deepGet } from '../../utils/object-change';
 
 import './datasource-view.scss';
 
@@ -74,6 +77,7 @@ const tableColumns: Record<CapabilitiesMode, string[]> = {
     'Segment load/drop queues',
     'Total data size',
     'Segment size',
+    'Segment granularity',
     'Total rows',
     'Avg. row size',
     'Replicated size',
@@ -100,6 +104,7 @@ const tableColumns: Record<CapabilitiesMode, string[]> = {
     'Segment load/drop queues',
     'Total data size',
     'Segment size',
+    'Segment granularity',
     'Total rows',
     'Avg. row size',
     'Replicated size',
@@ -149,6 +154,11 @@ interface DatasourceQueryResultRow {
   readonly num_available_segments: number;
   readonly num_segments_to_load: number;
   readonly num_segments_to_drop: number;
+  readonly minute_aligned_segments: number;
+  readonly hour_aligned_segments: number;
+  readonly day_aligned_segments: number;
+  readonly month_aligned_segments: number;
+  readonly year_aligned_segments: number;
   readonly total_data_size: number;
   readonly replicated_size: number;
   readonly min_segment_rows: number;
@@ -158,6 +168,17 @@ interface DatasourceQueryResultRow {
   readonly avg_row_size: number;
 }
 
+function segmentGranularityCountsToRank(row: DatasourceQueryResultRow): number {
+  return (
+    Number(Boolean(row.num_segments)) +
+    Number(Boolean(row.minute_aligned_segments)) +
+    Number(Boolean(row.hour_aligned_segments)) +
+    Number(Boolean(row.day_aligned_segments)) +
+    Number(Boolean(row.month_aligned_segments)) +
+    Number(Boolean(row.year_aligned_segments))
+  );
+}
+
 interface Datasource extends DatasourceQueryResultRow {
   readonly rules: Rule[];
   readonly compactionConfig?: CompactionConfig;
@@ -227,6 +248,11 @@ export class DatasourcesView extends React.PureComponent<
   COUNT(*) FILTER (WHERE is_available = 1 AND ((is_published = 1 AND is_overshadowed = 0) OR is_realtime = 1)) AS num_available_segments,
   COUNT(*) FILTER (WHERE is_published = 1 AND is_overshadowed = 0 AND is_available = 0) AS num_segments_to_load,
   COUNT(*) FILTER (WHERE is_available = 1 AND NOT ((is_published = 1 AND is_overshadowed = 0) OR is_realtime = 1)) AS num_segments_to_drop,
+  COUNT(*) FILTER (WHERE ((is_published = 1 AND is_overshadowed = 0) OR is_realtime = 1) AND "start" LIKE '%:00.000Z' AND "end" LIKE '%:00.000Z') AS minute_aligned_segments,
+  COUNT(*) FILTER (WHERE ((is_published = 1 AND is_overshadowed = 0) OR is_realtime = 1) AND "start" LIKE '%:00:00.000Z' AND "end" LIKE '%:00:00.000Z') AS hour_aligned_segments,
+  COUNT(*) FILTER (WHERE ((is_published = 1 AND is_overshadowed = 0) OR is_realtime = 1) AND "start" LIKE '%T00:00:00.000Z' AND "end" LIKE '%T00:00:00.000Z') AS day_aligned_segments,
+  COUNT(*) FILTER (WHERE ((is_published = 1 AND is_overshadowed = 0) OR is_realtime = 1) AND "start" LIKE '%-01T00:00:00.000Z' AND "end" LIKE '%-01T00:00:00.000Z') AS month_aligned_segments,
+  COUNT(*) FILTER (WHERE ((is_published = 1 AND is_overshadowed = 0) OR is_realtime = 1) AND "start" LIKE '%-01-01T00:00:00.000Z' AND "end" LIKE '%-01-01T00:00:00.000Z') AS year_aligned_segments,
   SUM("size") FILTER (WHERE is_published = 1 AND is_overshadowed = 0) AS total_data_size,
   SUM("size" * "num_replicas") FILTER (WHERE is_published = 1 AND is_overshadowed = 0) AS replicated_size,
   MIN("num_rows") FILTER (WHERE is_published = 1 AND is_overshadowed = 0) AS min_segment_rows,
@@ -306,6 +332,11 @@ GROUP BY 1`;
                 num_segments: numSegments,
                 num_segments_to_load: segmentsToLoad,
                 num_segments_to_drop: 0,
+                minute_aligned_segments: -1,
+                hour_aligned_segments: -1,
+                day_aligned_segments: -1,
+                month_aligned_segments: -1,
+                year_aligned_segments: -1,
                 replicated_size: -1,
                 total_data_size: totalDataSize,
                 min_segment_rows: -1,
@@ -1032,6 +1063,37 @@ GROUP BY 1`;
               ),
             },
             {
+              Header: twoLines('Segment', 'granularity'),
+              show: capabilities.hasSql() && hiddenColumns.exists('Segment granularity'),
+              id: 'segment_granularity',
+              accessor: segmentGranularityCountsToRank,
+              filterable: false,
+              width: 100,
+              Cell: ({ original }) => {
+                const segmentGranularities: string[] = [];
+                if (!original.num_segments) return '-';
+                if (original.num_segments - original.minute_aligned_segments) {
+                  segmentGranularities.push('Sub minute');
+                }
+                if (original.minute_aligned_segments - original.hour_aligned_segments) {
+                  segmentGranularities.push('Minute');
+                }
+                if (original.hour_aligned_segments - original.day_aligned_segments) {
+                  segmentGranularities.push('Hour');
+                }
+                if (original.day_aligned_segments - original.month_aligned_segments) {
+                  segmentGranularities.push('Day');
+                }
+                if (original.month_aligned_segments - original.year_aligned_segments) {
+                  segmentGranularities.push('Month');
+                }
+                if (original.year_aligned_segments) {
+                  segmentGranularities.push('Year');
+                }
+                return segmentGranularities.join(', ');
+              },
+            },
+            {
               Header: twoLines('Total', 'rows'),
               show: capabilities.hasSql() && hiddenColumns.exists('Total rows'),
               accessor: 'total_rows',
diff --git a/web-console/src/views/home-view/segments-card/segments-card.tsx b/web-console/src/views/home-view/segments-card/segments-card.tsx
index d84061e..2245aae 100644
--- a/web-console/src/views/home-view/segments-card/segments-card.tsx
+++ b/web-console/src/views/home-view/segments-card/segments-card.tsx
@@ -22,9 +22,7 @@ import { sum } from 'd3-array';
 import React from 'react';
 
 import { useQueryManager } from '../../../hooks';
-import { pluralIfNeeded, queryDruidSql } from '../../../utils';
-import { Capabilities } from '../../../utils/capabilities';
-import { deepGet } from '../../../utils/object-change';
+import { Capabilities, deepGet, pluralIfNeeded, queryDruidSql } from '../../../utils';
 import { HomeViewCard } from '../home-view-card/home-view-card';
 
 export interface SegmentCounts {
diff --git a/web-console/src/views/ingestion-view/ingestion-view.spec.tsx b/web-console/src/views/ingestion-view/ingestion-view.spec.tsx
index fa8c867..ddbaa9d 100644
--- a/web-console/src/views/ingestion-view/ingestion-view.spec.tsx
+++ b/web-console/src/views/ingestion-view/ingestion-view.spec.tsx
@@ -32,7 +32,6 @@ describe('tasks view', () => {
         datasourceId={'datasource'}
         goToDatasource={() => {}}
         goToQuery={() => {}}
-        goToMiddleManager={() => {}}
         goToLoadData={() => {}}
         capabilities={Capabilities.FULL}
       />,
diff --git a/web-console/src/views/ingestion-view/ingestion-view.tsx b/web-console/src/views/ingestion-view/ingestion-view.tsx
index 380c1cf..d4826d8 100644
--- a/web-console/src/views/ingestion-view/ingestion-view.tsx
+++ b/web-console/src/views/ingestion-view/ingestion-view.tsx
@@ -45,11 +45,13 @@ import {
   addFilter,
   addFilterRaw,
   booleanCustomTableFilter,
+  deepGet,
   formatDuration,
   getDruidErrorMessage,
   localStorageGet,
   LocalStorageKeys,
   localStorageSet,
+  oneOf,
   queryDruidSql,
   QueryManager,
   QueryState,
@@ -57,7 +59,6 @@ import {
 import { BasicAction } from '../../utils/basic-action';
 import { Capabilities } from '../../utils/capabilities';
 import { LocalStorageBackedArray } from '../../utils/local-storage-backed-array';
-import { deepGet } from '../../utils/object-change';
 
 import './ingestion-view.scss';
 
@@ -108,7 +109,6 @@ export interface IngestionViewProps {
   openDialog: string | undefined;
   goToDatasource: (datasource: string) => void;
   goToQuery: (initSql: string) => void;
-  goToMiddleManager: (middleManager: string) => void;
   goToLoadData: (supervisorId?: string, taskId?: string) => void;
   capabilities: Capabilities;
 }
@@ -385,7 +385,7 @@ ORDER BY "rank" DESC, "created_time" DESC`;
     const { goToDatasource, goToLoadData } = this.props;
 
     const actions: BasicAction[] = [];
-    if (type === 'kafka' || type === 'kinesis') {
+    if (oneOf(type, 'kafka', 'kinesis')) {
       actions.push(
         {
           icon: IconNames.MULTI_SELECT,
@@ -659,14 +659,14 @@ ORDER BY "rank" DESC, "created_time" DESC`;
         onAction: () => goToDatasource(datasource),
       });
     }
-    if (type === 'index' || type === 'index_parallel') {
+    if (oneOf(type, 'index', 'index_parallel')) {
       actions.push({
         icon: IconNames.CLOUD_UPLOAD,
         title: 'Open in data loader',
         onAction: () => goToLoadData(undefined, id),
       });
     }
-    if (status === 'RUNNING' || status === 'WAITING' || status === 'PENDING') {
+    if (oneOf(status, 'RUNNING', 'WAITING', 'PENDING')) {
       actions.push({
         icon: IconNames.CROSS,
         title: 'Kill',
@@ -704,7 +704,6 @@ ORDER BY "rank" DESC, "created_time" DESC`;
   }
 
   renderTaskTable() {
-    const { goToMiddleManager } = this.props;
     const {
       tasksState,
       taskFilter,
@@ -812,21 +811,12 @@ ORDER BY "rank" DESC, "created_time" DESC`;
               }),
               Cell: row => {
                 if (row.aggregated) return '';
-                const { status, location } = row.original;
-                const locationHostname = location ? location.split(':')[0] : null;
+                const { status } = row.original;
                 const errorMsg = row.original.error_msg;
                 return (
                   <span>
                     <span style={{ color: statusToColor(status) }}>&#x25cf;&nbsp;</span>
                     {status}
-                    {location && (
-                      <a
-                        onClick={() => goToMiddleManager(locationHostname)}
-                        title={`Go to: ${locationHostname}`}
-                      >
-                        &nbsp;&#x279A;
-                      </a>
-                    )}
                     {errorMsg && (
                       <a
                         onClick={() => this.setState({ alertErrorMsg: errorMsg })}
diff --git a/web-console/src/views/load-data-view/filter-table/filter-table.tsx b/web-console/src/views/load-data-view/filter-table/filter-table.tsx
index 89f380f..db0bddc 100644
--- a/web-console/src/views/load-data-view/filter-table/filter-table.tsx
+++ b/web-console/src/views/load-data-view/filter-table/filter-table.tsx
@@ -21,8 +21,8 @@ import React from 'react';
 import ReactTable from 'react-table';
 
 import { TableCell } from '../../../components';
+import { DruidFilter } from '../../../druid-models';
 import { caseInsensitiveContains, filterMap } from '../../../utils';
-import { DruidFilter } from '../../../utils/ingestion-spec';
 import { HeaderAndRows, SampleEntry } from '../../../utils/sampler';
 
 import './filter-table.scss';
diff --git a/web-console/src/views/load-data-view/load-data-view.scss b/web-console/src/views/load-data-view/load-data-view.scss
index 905352e..bbc8627 100644
--- a/web-console/src/views/load-data-view/load-data-view.scss
+++ b/web-console/src/views/load-data-view/load-data-view.scss
@@ -19,6 +19,12 @@
 @import '~@blueprintjs/core/src/common/colors';
 @import '../../variables';
 
+$control-bar-width: 300px;
+
+$icon-width: 100px;
+$actual-icon-width: 520px;
+$actual-icon-height: 400px;
+
 @mixin sunk-panel {
   background: rgba($dark-gray1, 0.5);
   border-radius: $pt-border-radius;
@@ -30,7 +36,7 @@
   height: 100%;
   display: grid;
   grid-gap: $thin-padding 5px;
-  grid-template-columns: 1fr 280px;
+  grid-template-columns: 1fr $control-bar-width;
   grid-template-rows: 60px 1fr 28px;
   grid-template-areas:
     'navi navi'
@@ -133,7 +139,8 @@
         }
 
         img {
-          width: 100px;
+          width: $icon-width;
+          height: $icon-width * ($actual-icon-height / $actual-icon-width);
           display: inline-block;
         }
       }
@@ -144,7 +151,7 @@
   &.tuning,
   &.publish {
     grid-gap: 20px 40px;
-    grid-template-columns: 1fr 1fr 280px;
+    grid-template-columns: 1fr 1fr $control-bar-width;
     grid-template-areas:
       'navi navi navi'
       'main othr ctrl'
diff --git a/web-console/src/views/load-data-view/load-data-view.tsx b/web-console/src/views/load-data-view/load-data-view.tsx
index 2d5074a..4db8286 100644
--- a/web-console/src/views/load-data-view/load-data-view.tsx
+++ b/web-console/src/views/load-data-view/load-data-view.tsx
@@ -54,53 +54,46 @@ import {
 } from '../../components';
 import { FormGroupWithInfo } from '../../components/form-group-with-info/form-group-with-info';
 import { AsyncActionDialog } from '../../dialogs';
-import { getLink } from '../../links';
-import { AppToaster } from '../../singletons/toaster';
-import { UrlBaser } from '../../singletons/url-baser';
 import {
-  filterMap,
-  getDruidErrorMessage,
-  localStorageGet,
-  LocalStorageKeys,
-  localStorageSet,
-  parseJson,
-  pluralIfNeeded,
-  QueryState,
-} from '../../utils';
-import { NUMERIC_TIME_FORMATS, possibleDruidFormatForValues } from '../../utils/druid-time';
-import { updateSchemaWithSample } from '../../utils/druid-type';
+  addTimestampTransform,
+  CONSTANT_TIMESTAMP_SPEC,
+  CONSTANT_TIMESTAMP_SPEC_FIELDS,
+  DIMENSION_SPEC_FIELDS,
+  FILTER_FIELDS,
+  FLATTEN_FIELD_FIELDS,
+  getTimestampExpressionFields,
+  getTimestampSchema,
+  INPUT_FORMAT_FIELDS,
+  METRIC_SPEC_FIELDS,
+  removeTimestampTransform,
+  TIMESTAMP_SPEC_FIELDS,
+  TimestampSpec,
+  Transform,
+  TRANSFORM_FIELDS,
+  updateSchemaWithSample,
+} from '../../druid-models';
 import {
-  adjustIngestionSpec,
   adjustTuningConfig,
   cleanSpec,
+  computeFlattenPathsForData,
   DimensionMode,
   DimensionSpec,
   DimensionsSpec,
   DruidFilter,
-  EMPTY_ARRAY,
-  EMPTY_OBJECT,
   fillDataSourceNameIfNeeded,
   fillInputFormat,
   FlattenField,
-  getConstantTimestampSpec,
   getDimensionMode,
-  getDimensionSpecFormFields,
-  getFilterFormFields,
-  getFlattenFieldFormFields,
   getIngestionComboType,
   getIngestionDocLink,
   getIngestionImage,
   getIngestionTitle,
-  getInputFormatFormFields,
   getIoConfigFormFields,
   getIoConfigTuningFormFields,
-  getMetricSpecFormFields,
   getPartitionRelatedTuningSpecFormFields,
   getRequiredModule,
   getRollup,
   getSpecType,
-  getTimestampSpecFormFields,
-  getTransformFormFields,
   getTuningSpecFormFields,
   GranularitySpec,
   IngestionComboTypeWithExtra,
@@ -110,7 +103,6 @@ import {
   invalidIoConfig,
   invalidTuningConfig,
   IoConfig,
-  isColumnTimestampSpec,
   isDruidSource,
   isEmptyIngestionSpec,
   issueWithIoConfig,
@@ -119,14 +111,33 @@ import {
   MAX_INLINE_DATA_LENGTH,
   MetricSpec,
   normalizeSpec,
+  NUMERIC_TIME_FORMATS,
+  possibleDruidFormatForValues,
   splitFilter,
-  TimestampSpec,
-  Transform,
   TuningConfig,
   updateIngestionType,
   upgradeSpec,
-} from '../../utils/ingestion-spec';
-import { deepDelete, deepGet, deepSet } from '../../utils/object-change';
+} from '../../druid-models';
+import { getLink } from '../../links';
+import { AppToaster } from '../../singletons/toaster';
+import { UrlBaser } from '../../singletons/url-baser';
+import {
+  deepDelete,
+  deepGet,
+  deepSet,
+  deepSetMulti,
+  EMPTY_ARRAY,
+  EMPTY_OBJECT,
+  filterMap,
+  getDruidErrorMessage,
+  localStorageGet,
+  LocalStorageKeys,
+  localStorageSet,
+  oneOf,
+  parseJson,
+  pluralIfNeeded,
+  QueryState,
+} from '../../utils';
 import {
   CacheRows,
   ExampleManifest,
@@ -146,7 +157,6 @@ import {
   SampleResponseWithExtraInfo,
   SampleStrategy,
 } from '../../utils/sampler';
-import { computeFlattenPathsForData } from '../../utils/spec-utils';
 
 import { ExamplePicker } from './example-picker/example-picker';
 import { FilterTable, filterTableSelectedColumnName } from './filter-table/filter-table';
@@ -187,7 +197,7 @@ function showBlankLine(line: SampleEntry): string {
 }
 
 function getTimestampSpec(headerAndRows: HeaderAndRows | null): TimestampSpec {
-  if (!headerAndRows) return getConstantTimestampSpec();
+  if (!headerAndRows) return CONSTANT_TIMESTAMP_SPEC;
 
   const timestampSpecs = filterMap(headerAndRows.header, sampleHeader => {
     const possibleFormat = possibleDruidFormatForValues(
@@ -204,7 +214,7 @@ function getTimestampSpec(headerAndRows: HeaderAndRows | null): TimestampSpec {
     timestampSpecs.find(ts => /time/i.test(ts.column)) || // Use a suggestion that has time in the name if possible
     timestampSpecs.find(ts => !NUMERIC_TIME_FORMATS.includes(ts.format)) || // Use a suggestion that is not numeric
     timestampSpecs[0] || // Fall back to the first one
-    getConstantTimestampSpec() // Ok, empty it is...
+    CONSTANT_TIMESTAMP_SPEC // Ok, empty it is...
   );
 }
 
@@ -300,7 +310,7 @@ export interface LoadDataViewState {
   // for timestamp
   timestampQueryState: QueryState<{
     headerAndRows: HeaderAndRows;
-    timestampSpec: TimestampSpec;
+    spec: IngestionSpec;
   }>;
 
   // for transform
@@ -454,7 +464,6 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
   private updateSpec = (newSpec: IngestionSpec) => {
     newSpec = normalizeSpec(newSpec);
     newSpec = upgradeSpec(newSpec);
-    newSpec = adjustIngestionSpec(newSpec);
     const deltaState: Partial<LoadDataViewState> = { spec: newSpec, specPreview: newSpec };
     if (!deepGet(newSpec, 'spec.ioConfig.type')) {
       deltaState.cacheRows = undefined;
@@ -470,7 +479,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
   private applyPreviewSpec = () => {
     this.setState(state => {
       localStorageSet(LocalStorageKeys.INGESTION_SPEC, JSON.stringify(state.specPreview));
-      return { spec: state.specPreview };
+      return { spec: Object.assign({}, state.specPreview) };
     });
   };
 
@@ -577,14 +586,15 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
     );
   }
 
-  renderApplyButtonBar() {
+  renderApplyButtonBar(queryState: QueryState<unknown>) {
     const previewSpecSame = this.isPreviewSpecSame();
+    const queryStateHasError = Boolean(queryState && queryState.error);
 
     return (
       <FormGroup className="control-buttons">
         <Button
           text="Apply"
-          disabled={previewSpecSame}
+          disabled={previewSpecSame && !queryStateHasError}
           intent={Intent.PRIMARY}
           onClick={this.applyPreviewSpec}
         />
@@ -1047,7 +1057,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
       sampleResponse = await sampleForConnect(spec, sampleStrategy);
     } catch (e) {
       this.setState({
-        inputQueryState: new QueryState({ error: e.message }),
+        inputQueryState: new QueryState({ error: e }),
       });
       return;
     }
@@ -1091,7 +1101,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
     } else if (inputQueryState.isLoading()) {
       mainFill = <Loader />;
     } else if (inputQueryState.error) {
-      mainFill = <CenterMessage>{`Error: ${inputQueryState.error.message}`}</CenterMessage>;
+      mainFill = <CenterMessage>{`Error: ${inputQueryState.getErrorMessage()}`}</CenterMessage>;
     } else if (inputQueryState.data) {
       const inputData = inputQueryState.data.data;
       mainFill = (
@@ -1168,7 +1178,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
               </Callout>
             </FormGroup>
           )}
-          {(specType === 'kafka' || specType === 'kinesis') && (
+          {oneOf(specType, 'kafka', 'kinesis') && (
             <FormGroup label="Where should the data be sampled from?">
               <HTMLSelect
                 value={sampleStrategy}
@@ -1179,7 +1189,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
               </HTMLSelect>
             </FormGroup>
           )}
-          {this.renderApplyButtonBar()}
+          {this.renderApplyButtonBar(inputQueryState)}
         </div>
         {this.renderNextBar({
           disabled: !inputQueryState.data,
@@ -1278,7 +1288,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
       sampleResponse = await sampleForParser(spec, sampleStrategy);
     } catch (e) {
       this.setState({
-        parserQueryState: new QueryState({ error: e.message }),
+        parserQueryState: new QueryState({ error: e }),
       });
       return;
     }
@@ -1315,7 +1325,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
     } else if (parserQueryState.isLoading()) {
       mainFill = <Loader />;
     } else if (parserQueryState.error) {
-      mainFill = <CenterMessage>{`Error: ${parserQueryState.error.message}`}</CenterMessage>;
+      mainFill = <CenterMessage>{`Error: ${parserQueryState.getErrorMessage()}`}</CenterMessage>;
     } else if (parserQueryState.data) {
       mainFill = (
         <div className="table-with-control">
@@ -1380,13 +1390,13 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
           {!selectedFlattenField && (
             <>
               <AutoForm
-                fields={getInputFormatFormFields()}
+                fields={INPUT_FORMAT_FIELDS}
                 model={inputFormat}
                 onChange={p =>
                   this.updateSpecPreview(deepSet(spec, 'spec.ioConfig.inputFormat', p))
                 }
               />
-              {this.renderApplyButtonBar()}
+              {this.renderApplyButtonBar(parserQueryState)}
             </>
           )}
           {this.renderFlattenControls()}
@@ -1461,7 +1471,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
       return (
         <div className="edit-controls">
           <AutoForm
-            fields={getFlattenFieldFormFields()}
+            fields={FLATTEN_FIELD_FIELDS}
             model={selectedFlattenField}
             onChange={f => this.setState({ selectedFlattenField: f })}
           />
@@ -1529,7 +1539,6 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
     const { spec, cacheRows } = this.state;
     const inputFormatColumns: string[] =
       deepGet(spec, 'spec.ioConfig.inputFormat.columns') || EMPTY_ARRAY;
-    const timestampSpec = deepGet(spec, 'spec.dataSchema.timestampSpec') || EMPTY_OBJECT;
 
     if (!cacheRows) {
       this.setState({
@@ -1549,7 +1558,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
       sampleResponse = await sampleForTimestamp(spec, cacheRows);
     } catch (e) {
       this.setState({
-        timestampQueryState: new QueryState({ error: e.message }),
+        timestampQueryState: new QueryState({ error: e }),
       });
       return;
     }
@@ -1562,7 +1571,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
             undefined,
             ['__time'].concat(inputFormatColumns),
           ),
-          timestampSpec,
+          spec,
         },
       }),
     });
@@ -1570,9 +1579,11 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
 
   renderTimestampStep() {
     const { specPreview: spec, columnFilter, specialColumnsOnly, timestampQueryState } = this.state;
+    const timestampSchema = getTimestampSchema(spec);
     const timestampSpec: TimestampSpec =
       deepGet(spec, 'spec.dataSchema.timestampSpec') || EMPTY_OBJECT;
-    const timestampSpecFromColumn = isColumnTimestampSpec(timestampSpec);
+    const transforms: Transform[] =
+      deepGet(spec, 'spec.dataSchema.transformSpec.transforms') || EMPTY_ARRAY;
 
     let mainFill: JSX.Element | string = '';
     if (timestampQueryState.isInit()) {
@@ -1585,7 +1596,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
     } else if (timestampQueryState.isLoading()) {
       mainFill = <Loader />;
     } else if (timestampQueryState.error) {
-      mainFill = <CenterMessage>{`Error: ${timestampQueryState.error.message}`}</CenterMessage>;
+      mainFill = <CenterMessage>{`Error: ${timestampQueryState.getErrorMessage()}`}</CenterMessage>;
     } else if (timestampQueryState.data) {
       mainFill = (
         <div className="table-with-control">
@@ -1622,46 +1633,88 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
           <Callout className="intro">
             <p>
               Druid partitions data based on the primary time column of your data. This column is
-              stored internally in Druid as <Code>__time</Code>. Please specify the primary time
-              column. If you do not have any time columns, you can choose "Constant value" to create
-              a default one.
+              stored internally in Druid as <Code>__time</Code>.
+            </p>
+            <p>Configure how to define the time column for this data.</p>
+            <p>
+              If your data does not have a time column, you can select "None" to use a placeholder
+              value. If the time information is spread across multiple columns you can combine them
+              into one by selecting "Expression" and defining a transform expression.
             </p>
             <LearnMore href={`${getLink('DOCS')}/ingestion/index.html#timestampspec`} />
           </Callout>
-          <FormGroup label="Timestamp spec">
+          <FormGroup label="Parse timestamp from">
             <ButtonGroup>
               <Button
-                text="From column"
-                active={timestampSpecFromColumn}
+                text="None"
+                active={timestampSchema === 'none'}
+                onClick={() => {
+                  this.updateSpecPreview(
+                    deepSetMulti(spec, {
+                      'spec.dataSchema.timestampSpec': CONSTANT_TIMESTAMP_SPEC,
+                      'spec.dataSchema.transformSpec.transforms': removeTimestampTransform(
+                        transforms,
+                      ),
+                    }),
+                  );
+                }}
+              />
+              <Button
+                text="Column"
+                active={timestampSchema === 'column'}
                 onClick={() => {
                   const timestampSpec = {
                     column: 'timestamp',
                     format: 'auto',
                   };
                   this.updateSpecPreview(
-                    deepSet(spec, 'spec.dataSchema.timestampSpec', timestampSpec),
+                    deepSetMulti(spec, {
+                      'spec.dataSchema.timestampSpec': timestampSpec,
+                      'spec.dataSchema.transformSpec.transforms': removeTimestampTransform(
+                        transforms,
+                      ),
+                    }),
                   );
                 }}
               />
               <Button
-                text="Constant value"
-                active={!timestampSpecFromColumn}
+                text="Expression"
+                active={timestampSchema === 'expression'}
                 onClick={() => {
                   this.updateSpecPreview(
-                    deepSet(spec, 'spec.dataSchema.timestampSpec', getConstantTimestampSpec()),
+                    deepSetMulti(spec, {
+                      'spec.dataSchema.timestampSpec': CONSTANT_TIMESTAMP_SPEC,
+                      'spec.dataSchema.transformSpec.transforms': addTimestampTransform(transforms),
+                    }),
                   );
                 }}
               />
             </ButtonGroup>
           </FormGroup>
-          <AutoForm
-            fields={getTimestampSpecFormFields(timestampSpec)}
-            model={timestampSpec}
-            onChange={timestampSpec => {
-              this.updateSpecPreview(deepSet(spec, 'spec.dataSchema.timestampSpec', timestampSpec));
-            }}
-          />
-          {this.renderApplyButtonBar()}
+          {timestampSchema === 'expression' ? (
+            <AutoForm
+              fields={getTimestampExpressionFields(transforms)}
+              model={transforms}
+              onChange={transforms => {
+                this.updateSpecPreview(
+                  deepSet(spec, 'spec.dataSchema.transformSpec.transforms', transforms),
+                );
+              }}
+            />
+          ) : (
+            <AutoForm
+              fields={
+                timestampSchema === 'none' ? CONSTANT_TIMESTAMP_SPEC_FIELDS : TIMESTAMP_SPEC_FIELDS
+              }
+              model={timestampSpec}
+              onChange={timestampSpec => {
+                this.updateSpecPreview(
+                  deepSet(spec, 'spec.dataSchema.timestampSpec', timestampSpec),
+                );
+              }}
+            />
+          )}
+          {this.renderApplyButtonBar(timestampQueryState)}
         </div>
         {this.renderNextBar({
           disabled: !timestampQueryState.data,
@@ -1700,7 +1753,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
       sampleResponse = await sampleForTransform(spec, cacheRows);
     } catch (e) {
       this.setState({
-        transformQueryState: new QueryState({ error: e.message }),
+        transformQueryState: new QueryState({ error: e }),
       });
       return;
     }
@@ -1734,7 +1787,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
     } else if (transformQueryState.isLoading()) {
       mainFill = <Loader />;
     } else if (transformQueryState.error) {
-      mainFill = <CenterMessage>{`Error: ${transformQueryState.error.message}`}</CenterMessage>;
+      mainFill = <CenterMessage>{`Error: ${transformQueryState.getErrorMessage()}`}</CenterMessage>;
     } else if (transformQueryState.data) {
       mainFill = (
         <div className="table-with-control">
@@ -1834,7 +1887,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
       return (
         <div className="edit-controls">
           <AutoForm
-            fields={getTransformFormFields()}
+            fields={TRANSFORM_FIELDS}
             model={selectedTransform}
             onChange={selectedTransform => this.setState({ selectedTransform })}
           />
@@ -1915,7 +1968,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
       sampleResponse = await sampleForFilter(spec, cacheRows);
     } catch (e) {
       this.setState({
-        filterQueryState: new QueryState({ error: e.message }),
+        filterQueryState: new QueryState({ error: e }),
       });
       return;
     }
@@ -1941,7 +1994,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
       sampleResponseNoFilter = await sampleForFilter(specNoFilter, cacheRows);
     } catch (e) {
       this.setState({
-        filterQueryState: new QueryState({ error: e.message }),
+        filterQueryState: new QueryState({ error: e }),
       });
       return;
     }
@@ -1976,7 +2029,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
     } else if (filterQueryState.isLoading()) {
       mainFill = <Loader />;
     } else if (filterQueryState.error) {
-      mainFill = <CenterMessage>{`Error: ${filterQueryState.error.message}`}</CenterMessage>;
+      mainFill = <CenterMessage>{`Error: ${filterQueryState.getErrorMessage()}`}</CenterMessage>;
     } else if (filterQueryState.data) {
       mainFill = (
         <div className="table-with-control">
@@ -2048,10 +2101,10 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
       return (
         <div className="edit-controls">
           <AutoForm
-            fields={getFilterFormFields()}
+            fields={FILTER_FIELDS}
             model={selectedFilter}
             onChange={f => this.setState({ selectedFilter: f })}
-            showCustom={f => !['selector', 'in', 'regex', 'like', 'not'].includes(f.type)}
+            showCustom={f => !oneOf(f.type, 'selector', 'in', 'regex', 'like', 'not')}
           />
           <div className="control-buttons">
             <Button
@@ -2122,12 +2175,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
                 label: 'Time intervals',
                 type: 'string-array',
                 placeholder: 'ex: 2018-01-01/2018-06-01',
-                info: (
-                  <>
-                    A comma separated list of intervals for the raw data being ingested. Ignored for
-                    real-time ingestion.
-                  </>
-                ),
+                info: <>A comma separated list of intervals for the raw data being ingested.</>,
               },
             ]}
             model={spec}
@@ -2202,7 +2250,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
       sampleResponse = await sampleForSchema(spec, cacheRows);
     } catch (e) {
       this.setState({
-        schemaQueryState: new QueryState({ error: e.message }),
+        schemaQueryState: new QueryState({ error: e }),
       });
       return;
     }
@@ -2242,7 +2290,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
     } else if (schemaQueryState.isLoading()) {
       mainFill = <Loader />;
     } else if (schemaQueryState.error) {
-      mainFill = <CenterMessage>{`Error: ${schemaQueryState.error.message}`}</CenterMessage>;
+      mainFill = <CenterMessage>{`Error: ${schemaQueryState.getErrorMessage()}`}</CenterMessage>;
     } else if (schemaQueryState.data) {
       mainFill = (
         <div className="table-with-control">
@@ -2362,7 +2410,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
                 <Switch
                   checked={rollup}
                   onChange={() => this.setState({ newRollup: !rollup })}
-                  labelElement="Rollup"
+                  label="Rollup"
                 />
               </FormGroupWithInfo>
               <AutoForm
@@ -2394,6 +2442,17 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
         </div>
         {this.renderNextBar({
           disabled: !schemaQueryState.data,
+          onNextStep: () => {
+            let newSpec = spec;
+            if (rollup) {
+              newSpec = deepSet(newSpec, 'spec.tuningConfig.partitionsSpec', { type: 'hashed' });
+              newSpec = deepSet(newSpec, 'spec.tuningConfig.forceGuaranteedRollup', true);
+            } else {
+              newSpec = deepSet(newSpec, 'spec.tuningConfig.partitionsSpec', { type: 'dynamic' });
+              newSpec = deepDelete(newSpec, 'spec.tuningConfig.forceGuaranteedRollup');
+            }
+            this.updateSpec(newSpec);
+          },
         })}
       </>
     );
@@ -2544,7 +2603,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
       return (
         <div className="edit-controls">
           <AutoForm
-            fields={getDimensionSpecFormFields()}
+            fields={DIMENSION_SPEC_FIELDS}
             model={selectedDimensionSpec}
             onChange={selectedDimensionSpec => this.setState({ selectedDimensionSpec })}
           />
@@ -2667,7 +2726,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
       return (
         <div className="edit-controls">
           <AutoForm
-            fields={getMetricSpecFormFields()}
+            fields={METRIC_SPEC_FIELDS}
             model={selectedMetricSpec}
             onChange={selectedMetricSpec => this.setState({ selectedMetricSpec })}
           />
@@ -2742,6 +2801,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
     const tuningConfig: TuningConfig = deepGet(spec, 'spec.tuningConfig') || EMPTY_OBJECT;
     const granularitySpec: GranularitySpec =
       deepGet(spec, 'spec.dataSchema.granularitySpec') || EMPTY_OBJECT;
+    const isStreaming = oneOf(spec.type, 'kafka', 'kinesis');
 
     return (
       <>
@@ -2774,25 +2834,25 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
             model={granularitySpec}
             onChange={g => this.updateSpec(deepSet(spec, 'spec.dataSchema.granularitySpec', g))}
           />
-          <AutoForm
-            fields={[
-              {
-                name: 'spec.dataSchema.granularitySpec.intervals',
-                label: 'Time intervals',
-                type: 'string-array',
-                placeholder: 'ex: 2018-01-01/2018-06-01',
-                required: spec => Boolean(deepGet(spec, 'spec.tuningConfig.forceGuaranteedRollup')),
-                info: (
-                  <>
-                    A comma separated list of intervals for the raw data being ingested. Ignored for
-                    real-time ingestion.
-                  </>
-                ),
-              },
-            ]}
-            model={spec}
-            onChange={s => this.updateSpec(s)}
-          />
+          {!isStreaming && (
+            <AutoForm
+              fields={[
+                {
+                  name: 'spec.dataSchema.granularitySpec.intervals',
+                  label: 'Time intervals',
+                  type: 'string-array',
+                  placeholder: 'ex: 2018-01-01/2018-06-01',
+                  required: spec =>
+                    ['hashed', 'single_dim'].includes(
+                      deepGet(spec, 'spec.tuningConfig.partitionsSpec.type'),
+                    ),
+                  info: <>A comma separated list of intervals for the raw data being ingested.</>,
+                },
+              ]}
+              model={spec}
+              onChange={s => this.updateSpec(s)}
+            />
+          )}
         </div>
         <div className="other">
           <H5>Secondary partitioning</H5>
@@ -2904,7 +2964,8 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
                 label: 'Append to existing',
                 type: 'boolean',
                 defaultValue: false,
-                defined: spec => !deepGet(spec, 'spec.tuningConfig.forceGuaranteedRollup'),
+                defined: spec =>
+                  deepGet(spec, 'spec.tuningConfig.partitionsSpec.type') === 'dynamic',
                 info: (
                   <>
                     Creates segments as additional shards of the latest version, effectively
diff --git a/web-console/src/views/load-data-view/parse-data-table/parse-data-table.tsx b/web-console/src/views/load-data-view/parse-data-table/parse-data-table.tsx
index 6a6975d..e76f3ce 100644
--- a/web-console/src/views/load-data-view/parse-data-table/parse-data-table.tsx
+++ b/web-console/src/views/load-data-view/parse-data-table/parse-data-table.tsx
@@ -22,8 +22,8 @@ import ReactTable from 'react-table';
 
 import { TableCell } from '../../../components';
 import { TableCellUnparseable } from '../../../components/table-cell-unparseable/table-cell-unparseable';
+import { FlattenField } from '../../../druid-models';
 import { caseInsensitiveContains, filterMap } from '../../../utils';
-import { FlattenField } from '../../../utils/ingestion-spec';
 import { HeaderAndRows, SampleEntry } from '../../../utils/sampler';
 
 import './parse-data-table.scss';
diff --git a/web-console/src/views/load-data-view/parse-time-table/parse-time-table.spec.tsx b/web-console/src/views/load-data-view/parse-time-table/parse-time-table.spec.tsx
index 13b82fc..f748c05 100644
--- a/web-console/src/views/load-data-view/parse-time-table/parse-time-table.spec.tsx
+++ b/web-console/src/views/load-data-view/parse-time-table/parse-time-table.spec.tsx
@@ -19,7 +19,8 @@
 import { render } from '@testing-library/react';
 import React from 'react';
 
-import { getDummyTimestampSpec } from '../../../utils/ingestion-spec';
+import { IngestionSpec, PLACEHOLDER_TIMESTAMP_SPEC } from '../../../druid-models';
+import { deepSet } from '../../../utils';
 
 import { ParseTimeTable } from './parse-time-table';
 
@@ -35,11 +36,17 @@ describe('parse time table', () => {
       ],
     };
 
+    const spec = deepSet(
+      {} as IngestionSpec,
+      'spec.dataSchema.timestampSpec',
+      PLACEHOLDER_TIMESTAMP_SPEC,
+    );
+
     const parseTimeTable = (
       <ParseTimeTable
         sampleBundle={{
           headerAndRows: sampleData,
-          timestampSpec: getDummyTimestampSpec(),
+          spec,
         }}
         columnFilter=""
         possibleTimestampColumnsOnly={false}
diff --git a/web-console/src/views/load-data-view/parse-time-table/parse-time-table.tsx b/web-console/src/views/load-data-view/parse-time-table/parse-time-table.tsx
index 6a5e12f..9289cba 100644
--- a/web-console/src/views/load-data-view/parse-time-table/parse-time-table.tsx
+++ b/web-console/src/views/load-data-view/parse-time-table/parse-time-table.tsx
@@ -22,13 +22,14 @@ import ReactTable from 'react-table';
 
 import { TableCell } from '../../../components';
 import { TableCellUnparseable } from '../../../components/table-cell-unparseable/table-cell-unparseable';
-import { caseInsensitiveContains, filterMap } from '../../../utils';
-import { possibleDruidFormatForValues } from '../../../utils/druid-time';
 import {
-  getTimestampSpecColumn,
-  isColumnTimestampSpec,
+  getTimestampDetailFromSpec,
+  getTimestampSpecColumnFromSpec,
+  IngestionSpec,
+  possibleDruidFormatForValues,
   TimestampSpec,
-} from '../../../utils/ingestion-spec';
+} from '../../../druid-models';
+import { caseInsensitiveContains, filterMap } from '../../../utils';
 import { HeaderAndRows, SampleEntry } from '../../../utils/sampler';
 
 import './parse-time-table.scss';
@@ -46,7 +47,7 @@ export function parseTimeTableSelectedColumnName(
 export interface ParseTimeTableProps {
   sampleBundle: {
     headerAndRows: HeaderAndRows;
-    timestampSpec: TimestampSpec;
+    spec: IngestionSpec;
   };
   columnFilter: string;
   possibleTimestampColumnsOnly: boolean;
@@ -62,9 +63,9 @@ export const ParseTimeTable = React.memo(function ParseTimeTable(props: ParseTim
     selectedColumnName,
     onTimestampColumnSelect,
   } = props;
-  const { headerAndRows, timestampSpec } = sampleBundle;
-  const timestampSpecColumn = getTimestampSpecColumn(timestampSpec);
-  const timestampSpecFromColumn = isColumnTimestampSpec(timestampSpec);
+  const { headerAndRows, spec } = sampleBundle;
+  const timestampSpecColumn = getTimestampSpecColumnFromSpec(spec);
+  const timestampDetail = getTimestampDetailFromSpec(spec);
 
   return (
     <ReactTable
@@ -73,27 +74,27 @@ export const ParseTimeTable = React.memo(function ParseTimeTable(props: ParseTim
       columns={filterMap(
         headerAndRows.header.length ? headerAndRows.header : ['__error__'],
         (columnName, i) => {
-          const timestamp = columnName === '__time';
-          if (!timestamp && !caseInsensitiveContains(columnName, columnFilter)) return;
-          const used = timestampSpec.column === columnName;
-          const possibleFormat = timestamp
+          const isTimestamp = columnName === '__time';
+          if (!isTimestamp && !caseInsensitiveContains(columnName, columnFilter)) return;
+          const used = timestampSpecColumn === columnName;
+          const possibleFormat = isTimestamp
             ? null
             : possibleDruidFormatForValues(
                 filterMap(headerAndRows.rows, d => (d.parsed ? d.parsed[columnName] : undefined)),
               );
-          if (possibleTimestampColumnsOnly && !timestamp && !possibleFormat) return;
+          if (possibleTimestampColumnsOnly && !isTimestamp && !possibleFormat) return;
 
           const columnClassName = classNames({
-            timestamp,
+            timestamp: isTimestamp,
             used,
             selected: selectedColumnName === columnName,
           });
           return {
             Header: (
               <div
-                className={classNames({ clickable: !timestamp })}
+                className={classNames({ clickable: !isTimestamp })}
                 onClick={
-                  timestamp
+                  isTimestamp
                     ? undefined
                     : () => {
                         onTimestampColumnSelect({
@@ -105,11 +106,7 @@ export const ParseTimeTable = React.memo(function ParseTimeTable(props: ParseTim
               >
                 <div className="column-name">{columnName}</div>
                 <div className="column-detail">
-                  {timestamp
-                    ? timestampSpecFromColumn
-                      ? `from: '${timestampSpecColumn}'`
-                      : `mv: ${timestampSpec.missingValue}`
-                    : possibleFormat || ''}
+                  {isTimestamp ? timestampDetail : possibleFormat || ''}
                   &nbsp;
                 </div>
               </div>
@@ -123,12 +120,12 @@ export const ParseTimeTable = React.memo(function ParseTimeTable(props: ParseTim
                 return <TableCell value={row.original.error} />;
               }
               if (row.original.unparseable) {
-                return <TableCellUnparseable timestamp={timestamp} />;
+                return <TableCellUnparseable timestamp={isTimestamp} />;
               }
-              return <TableCell value={timestamp ? new Date(row.value) : row.value} />;
+              return <TableCell value={isTimestamp ? new Date(row.value) : row.value} />;
             },
-            minWidth: timestamp ? 200 : 100,
-            resizable: !timestamp,
+            minWidth: isTimestamp ? 200 : 100,
+            resizable: !isTimestamp,
           };
         },
       )}
diff --git a/web-console/src/views/load-data-view/schema-table/schema-table.tsx b/web-console/src/views/load-data-view/schema-table/schema-table.tsx
index c334d8a..ecb50e5 100644
--- a/web-console/src/views/load-data-view/schema-table/schema-table.tsx
+++ b/web-console/src/views/load-data-view/schema-table/schema-table.tsx
@@ -21,7 +21,6 @@ import React from 'react';
 import ReactTable from 'react-table';
 
 import { TableCell } from '../../../components';
-import { caseInsensitiveContains, filterMap, sortWithPrefixSuffix } from '../../../utils';
 import {
   DimensionSpec,
   DimensionsSpec,
@@ -30,7 +29,8 @@ import {
   getMetricSpecName,
   inflateDimensionSpec,
   MetricSpec,
-} from '../../../utils/ingestion-spec';
+} from '../../../druid-models';
+import { caseInsensitiveContains, filterMap, sortWithPrefixSuffix } from '../../../utils';
 import { HeaderAndRows, SampleEntry } from '../../../utils/sampler';
 
 import './schema-table.scss';
@@ -99,7 +99,7 @@ export const SchemaTable = React.memo(function SchemaTable(props: SchemaTablePro
             className: columnClassName,
             id: String(i),
             accessor: (row: SampleEntry) => (row.parsed ? row.parsed[columnName] : null),
-            Cell: row => <TableCell value={row.value} />,
+            Cell: ({ value }) => <TableCell value={value} />,
           };
         } else {
           const timestamp = columnName === '__time';
diff --git a/web-console/src/views/load-data-view/transform-table/transform-table.tsx b/web-console/src/views/load-data-view/transform-table/transform-table.tsx
index 76dc1bf..eb7c1bc 100644
--- a/web-console/src/views/load-data-view/transform-table/transform-table.tsx
+++ b/web-console/src/views/load-data-view/transform-table/transform-table.tsx
@@ -21,9 +21,9 @@ import React from 'react';
 import ReactTable from 'react-table';
 
 import { TableCell } from '../../../components';
+import { Transform } from '../../../druid-models';
 import { caseInsensitiveContains, filterMap } from '../../../utils';
 import { escapeColumnName } from '../../../utils/druid-expression';
-import { Transform } from '../../../utils/ingestion-spec';
 import { HeaderAndRows, SampleEntry } from '../../../utils/sampler';
 
 import './transform-table.scss';
diff --git a/web-console/src/views/lookups-view/lookups-view.tsx b/web-console/src/views/lookups-view/lookups-view.tsx
index 3666263..1b2218c 100644
--- a/web-console/src/views/lookups-view/lookups-view.tsx
+++ b/web-console/src/views/lookups-view/lookups-view.tsx
@@ -32,8 +32,8 @@ import {
   ViewControlBar,
 } from '../../components';
 import { AsyncActionDialog, LookupEditDialog } from '../../dialogs/';
-import { LookupSpec } from '../../dialogs/lookup-edit-dialog/lookup-edit-dialog';
 import { LookupTableActionDialog } from '../../dialogs/lookup-table-action-dialog/lookup-table-action-dialog';
+import { LookupSpec } from '../../druid-models';
 import { AppToaster } from '../../singletons/toaster';
 import {
   getDruidErrorMessage,
@@ -167,7 +167,7 @@ export class LookupsView extends React.PureComponent<LookupsViewProps, LookupsVi
     const lookupEntriesAndTiers = lookupEntriesAndTiersState.data;
     if (!lookupEntriesAndTiers) return;
 
-    const target: any = lookupEntriesAndTiers.lookupEntries.find((lookupEntry: any) => {
+    const target: any = lookupEntriesAndTiers.lookupEntries.find(lookupEntry => {
       return lookupEntry.tier === tier && lookupEntry.id === id;
     });
     if (id === '') {
@@ -179,7 +179,7 @@ export class LookupsView extends React.PureComponent<LookupsViewProps, LookupsVi
           lookupEdit: {
             name: '',
             tier: loadingEntriesAndTiers ? loadingEntriesAndTiers.tiers[0] : '',
-            spec: { type: '' },
+            spec: { type: 'map', map: {} },
             version: new Date().toISOString(),
           },
         };
diff --git a/web-console/src/views/query-view/column-tree/column-tree.tsx b/web-console/src/views/query-view/column-tree/column-tree.tsx
index e0f17cc..697b90e 100644
--- a/web-console/src/views/query-view/column-tree/column-tree.tsx
+++ b/web-console/src/views/query-view/column-tree/column-tree.tsx
@@ -31,8 +31,7 @@ import React, { ChangeEvent } from 'react';
 
 import { Loader } from '../../../components';
 import { Deferred } from '../../../components/deferred/deferred';
-import { copyAndAlert, groupBy, prettyPrintSql } from '../../../utils';
-import { ColumnMetadata } from '../../../utils/column-metadata';
+import { ColumnMetadata, copyAndAlert, groupBy, oneOf, prettyPrintSql } from '../../../utils';
 import { dataTypeToIcon } from '../query-utils';
 
 import { NumberMenuItems, StringMenuItems, TimeMenuItems } from './column-tree-menu';
@@ -340,8 +339,7 @@ export class ColumnTree extends React.PureComponent<ColumnTreeProps, ColumnTreeS
                                     }}
                                   />
                                   {parsedQuery &&
-                                    (columnData.DATA_TYPE === 'BIGINT' ||
-                                      columnData.DATA_TYPE === 'FLOAT') && (
+                                    oneOf(columnData.DATA_TYPE, 'BIGINT', 'FLOAT') && (
                                       <NumberMenuItems
                                         table={tableName}
                                         schema={schemaName}
diff --git a/web-console/src/views/query-view/query-output/query-output.tsx b/web-console/src/views/query-view/query-output/query-output.tsx
index 6ff5055..ecd714a 100644
--- a/web-console/src/views/query-view/query-output/query-output.tsx
+++ b/web-console/src/views/query-view/query-output/query-output.tsx
@@ -31,9 +31,8 @@ import ReactTable from 'react-table';
 
 import { BracedText, TableCell } from '../../../components';
 import { ShowValueDialog } from '../../../dialogs/show-value-dialog/show-value-dialog';
-import { copyAndAlert, filterMap, prettyPrintSql } from '../../../utils';
+import { copyAndAlert, deepSet, filterMap, prettyPrintSql } from '../../../utils';
 import { BasicAction, basicActionsToMenu } from '../../../utils/basic-action';
-import { deepSet } from '../../../utils/object-change';
 
 import { ColumnRenameInput } from './column-rename-input/column-rename-input';
 
diff --git a/web-console/src/views/segments-view/__snapshots__/segments-view.spec.tsx.snap b/web-console/src/views/segments-view/__snapshots__/segments-view.spec.tsx.snap
index a0af868..80d1467 100755
--- a/web-console/src/views/segments-view/__snapshots__/segments-view.spec.tsx.snap
+++ b/web-console/src/views/segments-view/__snapshots__/segments-view.spec.tsx.snap
@@ -48,6 +48,8 @@ exports[`segments-view matches snapshot 1`] = `
             "Start",
             "End",
             "Version",
+            "Time span",
+            "Partitioning",
             "Partition",
             "Size",
             "Num rows",
@@ -164,6 +166,22 @@ exports[`segments-view matches snapshot 1`] = `
             "width": 120,
           },
           Object {
+            "Cell": [Function],
+            "Header": "Time span",
+            "accessor": "time_span",
+            "filterable": true,
+            "show": true,
+            "width": 100,
+          },
+          Object {
+            "Cell": [Function],
+            "Header": "Partitioning",
+            "accessor": "partitioning",
+            "filterable": true,
+            "show": true,
+            "width": 100,
+          },
+          Object {
             "Header": "Partition",
             "accessor": "partition_num",
             "filterable": false,
diff --git a/web-console/src/views/segments-view/segments-view.tsx b/web-console/src/views/segments-view/segments-view.tsx
index f9585cd..4d2f649 100644
--- a/web-console/src/views/segments-view/segments-view.tsx
+++ b/web-console/src/views/segments-view/segments-view.tsx
@@ -19,6 +19,7 @@
 import { Button, ButtonGroup, Intent, Label, MenuItem } from '@blueprintjs/core';
 import { IconNames } from '@blueprintjs/icons';
 import axios from 'axios';
+import { SqlExpression, SqlRef } from 'druid-query-toolkit';
 import React from 'react';
 import ReactTable, { Filter } from 'react-table';
 
@@ -38,6 +39,7 @@ import { SegmentTableActionDialog } from '../../dialogs/segments-table-action-di
 import {
   addFilter,
   compact,
+  deepGet,
   filterMap,
   formatBytes,
   formatInteger,
@@ -61,6 +63,8 @@ const tableColumns: Record<CapabilitiesMode, string[]> = {
     'Start',
     'End',
     'Version',
+    'Time span',
+    'Partitioning',
     'Partition',
     'Size',
     'Num rows',
@@ -87,6 +91,7 @@ const tableColumns: Record<CapabilitiesMode, string[]> = {
     'Start',
     'End',
     'Version',
+    'Partitioning',
     'Partition',
     'Size',
     'Num rows',
@@ -127,7 +132,9 @@ interface SegmentQueryResultRow {
   end: string;
   segment_id: string;
   version: string;
-  size: 0;
+  time_span: string;
+  partitioning: string;
+  size: number;
   partition_num: number;
   num_rows: number;
   num_replicas: number;
@@ -153,6 +160,31 @@ export interface SegmentsViewState {
 export class SegmentsView extends React.PureComponent<SegmentsViewProps, SegmentsViewState> {
   static PAGE_SIZE = 25;
 
+  static WITH_QUERY = `WITH s AS (
+  SELECT
+    "segment_id", "datasource", "start", "end", "size", "version",
+    CASE
+      WHEN "start" LIKE '%-01-01T00:00:00.000Z' AND "end" LIKE '%-01-01T00:00:00.000Z' THEN 'Year'
+      WHEN "start" LIKE '%-01T00:00:00.000Z' AND "end" LIKE '%-01T00:00:00.000Z' THEN 'Month'
+      WHEN "start" LIKE '%T00:00:00.000Z' AND "end" LIKE '%T00:00:00.000Z' THEN 'Day'
+      WHEN "start" LIKE '%:00:00.000Z' AND "end" LIKE '%:00:00.000Z' THEN 'Hour'
+      WHEN "start" LIKE '%:00.000Z' AND "end" LIKE '%:00.000Z' THEN 'Minute'
+      ELSE 'Sub minute'
+    END AS "time_span",
+    CASE
+      WHEN "shard_spec" LIKE '%"type":"numbered"%' THEN 'dynamic'
+      WHEN "shard_spec" LIKE '%"type":"hashed"%' THEN 'hashed'
+      WHEN "shard_spec" LIKE '%"type":"single"%' THEN 'single_dim'
+      WHEN "shard_spec" LIKE '%"type":"none"%' THEN 'none'
+      WHEN "shard_spec" LIKE '%"type":"linear"%' THEN 'linear'
+      WHEN "shard_spec" LIKE '%"type":"numbered_overwrite"%' THEN 'numbered_overwrite'
+      ELSE '-'
+    END AS "partitioning",
+    "partition_num", "num_replicas", "num_rows",
+    "is_published", "is_available", "is_realtime", "is_overshadowed"
+  FROM sys.segments
+)`;
+
   private segmentsSqlQueryManager: QueryManager<SegmentsQuery, SegmentQueryResultRow[]>;
   private segmentsNoSqlQueryManager: QueryManager<null, SegmentQueryResultRow[]>;
 
@@ -178,12 +210,10 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
     this.segmentsSqlQueryManager = new QueryManager({
       debounceIdle: 500,
       processQuery: async (query: SegmentsQuery, _cancelToken, setIntermediateQuery) => {
-        const totalQuerySize = (query.page + 1) * query.pageSize;
-
         const whereParts = filterMap(query.filtered, (f: Filter) => {
           if (f.id.startsWith('is_')) {
             if (f.value === 'all') return;
-            return `${JSON.stringify(f.id)} = ${f.value === 'true' ? 1 : 0}`;
+            return SqlRef.columnWithQuotes(f.id).equal(f.value === 'true' ? 1 : 0);
           } else {
             return sqlQueryCustomTableFilter(f);
           }
@@ -193,17 +223,18 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
 
         let whereClause = '';
         if (whereParts.length) {
-          whereClause = whereParts.join(' AND ');
+          whereClause = SqlExpression.and(...whereParts).toString();
         }
 
         if (query.groupByInterval) {
           const innerQuery = compact([
             `SELECT "start" || '/' || "end" AS "interval"`,
             `FROM sys.segments`,
-            whereClause ? `WHERE ${whereClause}` : '',
+            whereClause ? `WHERE ${whereClause}` : undefined,
             `GROUP BY 1`,
             `ORDER BY 1 DESC`,
-            `LIMIT ${totalQuerySize}`,
+            `LIMIT ${query.pageSize}`,
+            query.page ? `OFFSET ${query.page * query.pageSize}` : undefined,
           ]).join('\n');
 
           const intervals: string = (await queryDruidSql({ query: innerQuery }))
@@ -211,10 +242,9 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
             .join(', ');
 
           queryParts = compact([
-            `SELECT`,
-            `  ("start" || '/' || "end") AS "interval",`,
-            `  "segment_id", "datasource", "start", "end", "size", "version", "partition_num", "num_replicas", "num_rows", "is_published", "is_available", "is_realtime", "is_overshadowed"`,
-            `FROM sys.segments`,
+            SegmentsView.WITH_QUERY,
+            `SELECT "start" || '/' || "end" AS "interval", *`,
+            `FROM s`,
             `WHERE`,
             intervals ? `  ("start" || '/' || "end") IN (${intervals})` : 'FALSE',
             whereClause ? `  AND ${whereClause}` : '',
@@ -229,12 +259,9 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
             );
           }
 
-          queryParts.push(`LIMIT ${totalQuerySize * 1000}`);
+          queryParts.push(`LIMIT ${query.pageSize * 1000}`);
         } else {
-          queryParts = [
-            `SELECT "segment_id", "datasource", "start", "end", "size", "version", "partition_num", "num_replicas", "num_rows", "is_published", "is_available", "is_realtime", "is_overshadowed"`,
-            `FROM sys.segments`,
-          ];
+          queryParts = [SegmentsView.WITH_QUERY, `SELECT *`, `FROM s`];
 
           if (whereClause) {
             queryParts.push(`WHERE ${whereClause}`);
@@ -249,11 +276,15 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
             );
           }
 
-          queryParts.push(`LIMIT ${totalQuerySize}`);
+          queryParts.push(`LIMIT ${query.pageSize}`);
+
+          if (query.page) {
+            queryParts.push(`OFFSET ${query.page * query.pageSize}`);
+          }
         }
         const sqlQuery = queryParts.join('\n');
         setIntermediateQuery(sqlQuery);
-        return (await queryDruidSql({ query: sqlQuery })).slice(query.page * query.pageSize);
+        return await queryDruidSql({ query: sqlQuery });
       },
       onStateChange: segmentsState => {
         this.setState({
@@ -270,23 +301,27 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
             const segments = (await axios.get(`/druid/coordinator/v1/datasources/${d}?full`)).data
               .segments;
 
-            return segments.map((segment: any) => {
-              return {
-                segment_id: segment.identifier,
-                datasource: segment.dataSource,
-                start: segment.interval.split('/')[0],
-                end: segment.interval.split('/')[1],
-                version: segment.version,
-                partition_num: segment.shardSpec.partitionNum ? 0 : segment.shardSpec.partitionNum,
-                size: segment.size,
-                num_rows: -1,
-                num_replicas: -1,
-                is_available: -1,
-                is_published: -1,
-                is_realtime: -1,
-                is_overshadowed: -1,
-              };
-            });
+            return segments.map(
+              (segment: any): SegmentQueryResultRow => {
+                return {
+                  segment_id: segment.identifier,
+                  datasource: segment.dataSource,
+                  start: segment.interval.split('/')[0],
+                  end: segment.interval.split('/')[1],
+                  version: segment.version,
+                  time_span: '-',
+                  partitioning: '-',
+                  partition_num: deepGet(segment, 'shardSpec.partitionNum') || 0,
+                  size: segment.size,
+                  num_rows: -1,
+                  num_replicas: -1,
+                  is_available: -1,
+                  is_published: -1,
+                  is_realtime: -1,
+                  is_overshadowed: -1,
+                };
+              },
+            );
           }),
         );
 
@@ -387,6 +422,23 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
 
     const numRowsValues = segments.map(d => formatInteger(d.num_rows)).concat('(unknown)');
 
+    const renderFilterableCell = (field: string) => {
+      return (row: { value: any }) => {
+        const value = row.value;
+        return (
+          <a
+            onClick={() => {
+              this.setState({
+                segmentFilter: addFilter(segmentFilter, field, value),
+              });
+            }}
+          >
+            {value}
+          </a>
+        );
+      };
+    };
+
     return (
       <ReactTable
         data={segments}
@@ -421,18 +473,7 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
             Header: 'Datasource',
             show: hiddenColumns.exists('Datasource'),
             accessor: 'datasource',
-            Cell: row => {
-              const value = row.value;
-              return (
-                <a
-                  onClick={() => {
-                    this.setState({ segmentFilter: addFilter(segmentFilter, 'datasource', value) });
-                  }}
-                >
-                  {value}
-                </a>
-              );
-            },
+            Cell: renderFilterableCell('datasource'),
           },
           {
             Header: 'Interval',
@@ -440,18 +481,7 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
             accessor: 'interval',
             width: 120,
             defaultSortDesc: true,
-            Cell: row => {
-              const value = row.value;
-              return (
-                <a
-                  onClick={() => {
-                    this.setState({ segmentFilter: addFilter(segmentFilter, 'interval', value) });
-                  }}
-                >
-                  {value}
-                </a>
-              );
-            },
+            Cell: renderFilterableCell('interval'),
           },
           {
             Header: 'Start',
@@ -459,18 +489,7 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
             accessor: 'start',
             width: 120,
             defaultSortDesc: true,
-            Cell: row => {
-              const value = row.value;
-              return (
-                <a
-                  onClick={() => {
-                    this.setState({ segmentFilter: addFilter(segmentFilter, 'start', value) });
-                  }}
-                >
-                  {value}
-                </a>
-              );
-            },
+            Cell: renderFilterableCell('start'),
           },
           {
             Header: 'End',
@@ -478,18 +497,7 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
             accessor: 'end',
             defaultSortDesc: true,
             width: 120,
-            Cell: row => {
-              const value = row.value;
-              return (
-                <a
-                  onClick={() => {
-                    this.setState({ segmentFilter: addFilter(segmentFilter, 'end', value) });
-                  }}
-                >
-                  {value}
-                </a>
-              );
-            },
+            Cell: renderFilterableCell('end'),
           },
           {
             Header: 'Version',
@@ -499,6 +507,22 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
             width: 120,
           },
           {
+            Header: 'Time span',
+            show: capabilities.hasSql() && hiddenColumns.exists('Time span'),
+            accessor: 'time_span',
+            width: 100,
+            filterable: true,
+            Cell: renderFilterableCell('time_span'),
+          },
+          {
+            Header: 'Partitioning',
+            show: capabilities.hasSql() && hiddenColumns.exists('Partitioning'),
+            accessor: 'partitioning',
+            width: 100,
+            filterable: true,
+            Cell: renderFilterableCell('partitioning'),
+          },
+          {
             Header: 'Partition',
             show: hiddenColumns.exists('Partition'),
             accessor: 'partition_num',
diff --git a/web-console/src/views/services-view/services-view.spec.tsx b/web-console/src/views/services-view/services-view.spec.tsx
index ec63686..6b083ce 100644
--- a/web-console/src/views/services-view/services-view.spec.tsx
+++ b/web-console/src/views/services-view/services-view.spec.tsx
@@ -26,12 +26,7 @@ import { ServicesView } from './services-view';
 describe('services view', () => {
   it('action services view', () => {
     const servicesView = shallow(
-      <ServicesView
-        middleManager={'test'}
-        goToQuery={() => {}}
-        goToTask={() => {}}
-        capabilities={Capabilities.FULL}
-      />,
+      <ServicesView goToQuery={() => {}} goToTask={() => {}} capabilities={Capabilities.FULL} />,
     );
     expect(servicesView).toMatchSnapshot();
   });
diff --git a/web-console/src/views/services-view/services-view.tsx b/web-console/src/views/services-view/services-view.tsx
index a5069a9..1f0280d 100644
--- a/web-console/src/views/services-view/services-view.tsx
+++ b/web-console/src/views/services-view/services-view.tsx
@@ -37,18 +37,20 @@ import {
 import { AsyncActionDialog } from '../../dialogs';
 import {
   addFilter,
+  Capabilities,
+  CapabilitiesMode,
+  deepGet,
   formatBytes,
   formatBytesCompact,
   LocalStorageKeys,
   lookupBy,
+  oneOf,
   queryDruidSql,
   QueryManager,
   QueryState,
 } from '../../utils';
 import { BasicAction } from '../../utils/basic-action';
-import { Capabilities, CapabilitiesMode } from '../../utils/capabilities';
 import { LocalStorageBackedArray } from '../../utils/local-storage-backed-array';
-import { deepGet } from '../../utils/object-change';
 
 import './services-view.scss';
 
@@ -92,7 +94,6 @@ function formatQueues(
 }
 
 export interface ServicesViewProps {
-  middleManager: string | undefined;
   goToQuery: (initSql: string) => void;
   goToTask: (taskId: string) => void;
   capabilities: Capabilities;
@@ -326,8 +327,7 @@ ORDER BY "rank" DESC, "service" DESC`;
             show: hiddenColumns.exists('Type'),
             accessor: 'service_type',
             width: 150,
-            Cell: row => {
-              const value = row.value;
+            Cell: ({ value }) => {
               return (
                 <a
                   onClick={() => {
@@ -348,8 +348,7 @@ ORDER BY "rank" DESC, "service" DESC`;
             accessor: row => {
               return row.tier ? row.tier : row.worker ? row.worker.category : null;
             },
-            Cell: row => {
-              const value = row.value;
+            Cell: ({ value }) => {
               return (
                 <a
                   onClick={() => {
@@ -428,7 +427,7 @@ ORDER BY "rank" DESC, "service" DESC`;
             width: 100,
             filterable: false,
             accessor: row => {
-              if (row.service_type === 'middle_manager' || row.service_type === 'indexer') {
+              if (oneOf(row.service_type, 'middle_manager', 'indexer')) {
                 return row.worker ? (row.currCapacityUsed || 0) / row.worker.capacity : null;
               } else {
                 return row.max_size ? row.curr_size / row.max_size : null;
@@ -488,7 +487,7 @@ ORDER BY "rank" DESC, "service" DESC`;
             width: 400,
             filterable: false,
             accessor: row => {
-              if (row.service_type === 'middle_manager' || row.service_type === 'indexer') {
+              if (oneOf(row.service_type, 'middle_manager', 'indexer')) {
                 if (deepGet(row, 'worker.version') === '') return 'Disabled';
 
                 const details: string[] = [];
@@ -551,10 +550,10 @@ ORDER BY "rank" DESC, "service" DESC`;
             width: ACTION_COLUMN_WIDTH,
             accessor: row => row.worker,
             filterable: false,
-            Cell: row => {
-              if (!row.value) return null;
-              const disabled = row.value.version === '';
-              const workerActions = this.getWorkerActions(row.value.host, disabled);
+            Cell: ({ value }) => {
+              if (!value) return null;
+              const disabled = value.version === '';
+              const workerActions = this.getWorkerActions(value.host, disabled);
               return <ActionCell actions={workerActions} />;
             },
           },
diff --git a/web-console/webpack.config.js b/web-console/webpack.config.js
index 533792a..2bf795e 100644
--- a/web-console/webpack.config.js
+++ b/web-console/webpack.config.js
@@ -61,7 +61,7 @@ module.exports = env => {
     },
     target: 'web',
     resolve: {
-      extensions: ['.tsx', '.ts', '.html', '.js', '.json', '.scss', '.css'],
+      extensions: ['.tsx', '.ts', '.js', '.scss', '.css'],
     },
     devServer: {
       publicPath: '/public',


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@druid.apache.org
For additional commands, e-mail: commits-help@druid.apache.org