You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@druid.apache.org by GitBox <gi...@apache.org> on 2018/08/30 16:56:31 UTC

[GitHub] gianm closed pull request #6266: Rename io.druid to org.apache.druid.

gianm closed pull request #6266: Rename io.druid to org.apache.druid.
URL: https://github.com/apache/incubator-druid/pull/6266
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/.idea/inspectionProfiles/Druid.xml b/.idea/inspectionProfiles/Druid.xml
index d0029e103fa..c735b2614c8 100644
--- a/.idea/inspectionProfiles/Druid.xml
+++ b/.idea/inspectionProfiles/Druid.xml
@@ -63,7 +63,7 @@
     <inspection_tool class="JavadocReference" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="JsonStandardCompliance" enabled="true" level="WARNING" enabled_by_default="true" />
     <inspection_tool class="MalformedFormatString" enabled="true" level="ERROR" enabled_by_default="true">
-      <option name="additionalClasses" value="io.druid.java.util.common.StringUtils,io.druid.java.util.common.logger.Logger" />
+      <option name="additionalClasses" value="org.apache.druid.java.util.common.StringUtils,org.apache.druid.java.util.common.logger.Logger" />
       <option name="additionalMethods" value="trace,debug,info,warn,error,wtf,format,nonStrictFormat" />
     </inspection_tool>
     <inspection_tool class="MalformedRegex" enabled="true" level="ERROR" enabled_by_default="true" />
diff --git a/.idea/misc.xml b/.idea/misc.xml
index 846d8ad4f8c..41f0d62b854 100644
--- a/.idea/misc.xml
+++ b/.idea/misc.xml
@@ -8,18 +8,18 @@
       <item index="3" class="java.lang.String" itemvalue="com.google.inject.Inject" />
       <item index="4" class="java.lang.String" itemvalue="com.google.inject.Provides" />
       <item index="5" class="java.lang.String" itemvalue="io.airlift.airline.Command" />
-      <item index="6" class="java.lang.String" itemvalue="io.druid.annotations.UsedByJUnitParamsRunner" />
-      <item index="7" class="java.lang.String" itemvalue="io.druid.annotations.UsedInGeneratedCode" />
-      <item index="8" class="java.lang.String" itemvalue="io.druid.guice.annotations.ExtensionPoint" />
-      <item index="9" class="java.lang.String" itemvalue="io.druid.guice.annotations.PublicApi" />
-      <item index="10" class="java.lang.String" itemvalue="io.druid.java.util.common.lifecycle.LifecycleStart" />
-      <item index="11" class="java.lang.String" itemvalue="io.druid.java.util.common.lifecycle.LifecycleStop" />
+      <item index="6" class="java.lang.String" itemvalue="org.apache.druid.annotations.UsedByJUnitParamsRunner" />
+      <item index="7" class="java.lang.String" itemvalue="org.apache.druid.annotations.UsedInGeneratedCode" />
+      <item index="8" class="java.lang.String" itemvalue="org.apache.druid.guice.annotations.ExtensionPoint" />
+      <item index="9" class="java.lang.String" itemvalue="org.apache.druid.guice.annotations.PublicApi" />
+      <item index="10" class="java.lang.String" itemvalue="org.apache.druid.java.util.common.lifecycle.LifecycleStart" />
+      <item index="11" class="java.lang.String" itemvalue="org.apache.druid.java.util.common.lifecycle.LifecycleStop" />
       <item index="12" class="java.lang.String" itemvalue="javax.inject.Inject" />
       <item index="13" class="java.lang.String" itemvalue="org.openjdk.jmh.annotations.Benchmark" />
     </list>
-    <pattern value="io.druid.cli.GuiceRunnable" hierarchically="true" method="run" />
-    <pattern value="io.druid.cli.GuiceRunnable" hierarchically="true" />
-    <pattern value="io.druid.initialization.DruidModule" hierarchically="true" method="getJacksonModules" />
+    <pattern value="org.apache.druid.cli.GuiceRunnable" hierarchically="true" method="run" />
+    <pattern value="org.apache.druid.cli.GuiceRunnable" hierarchically="true" />
+    <pattern value="org.apache.druid.initialization.DruidModule" hierarchically="true" method="getJacksonModules" />
     <writeAnnotations>
       <writeAnnotation name="com.fasterxml.jackson.annotation.JacksonInject" />
       <writeAnnotation name="com.fasterxml.jackson.annotation.JsonProperty" />
diff --git a/.idea/scopes/NonGeneratedFiles.xml b/.idea/scopes/NonGeneratedFiles.xml
index 22375373e96..5bd4a87fc46 100644
--- a/.idea/scopes/NonGeneratedFiles.xml
+++ b/.idea/scopes/NonGeneratedFiles.xml
@@ -1,3 +1,3 @@
 <component name="DependencyValidationManager">
-  <scope name="NonGeneratedFiles" pattern="(src:*..*||test:*..*)&amp;&amp;!test[druid-protobuf-extensions]:io.druid.data.input.protobuf.ProtoTestEventWrapper" />
+  <scope name="NonGeneratedFiles" pattern="(src:*..*||test:*..*)&amp;&amp;!test[druid-protobuf-extensions]:org.apache.druid.data.input.protobuf.ProtoTestEventWrapper" />
 </component>
\ No newline at end of file
diff --git a/.travis.yml b/.travis.yml
index 67afc2651e7..c0e093dab78 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -2,7 +2,7 @@ language: java
 
 # On 12-12-2017, Travis updated their trusty image, which caused integration tests to fail.
 # The group: config instructs Travis to use the previous trusty image.
-# Please see https://github.com/druid-io/druid/pull/5155 for more information.
+# Please see https://github.com/apache/incubator-druid/pull/5155 for more information.
 sudo: false
 dist: trusty
 group: deprecated-2017Q4
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 6a0247b6de7..c6973a9b451 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -4,15 +4,15 @@ When submitting a pull request (PR), please use the following guidelines:
 
 - Make sure your code respects existing formatting conventions. In general, follow
   the same coding style as the code that you are modifying.
-- For Intellij you can import our code style settings xml: [druid_intellij_formatting.xml](https://github.com/druid-io/druid/raw/master/druid_intellij_formatting.xml).
-- For Eclipse you can import our code style settings xml: [eclipse_formatting.xml](https://github.com/druid-io/druid/raw/master/eclipse_formatting.xml).
+- For Intellij you can import our code style settings xml: [druid_intellij_formatting.xml](https://github.com/apache/incubator-druid/raw/master/druid_intellij_formatting.xml).
+- For Eclipse you can import our code style settings xml: [eclipse_formatting.xml](https://github.com/apache/incubator-druid/raw/master/eclipse_formatting.xml).
 - Do add/update documentation appropriately for the change you are making.
 - If you are introducing a new feature you may want to first write about your idea
   for feedback to [dev@druid.apache.org](https://lists.apache.org/list.html?dev@druid.apache.org).
   Non-trivial features should include unit tests covering the new functionality.
 - Bugfixes should include a unit test or integration test reproducing the issue.
 - Do not use author tags/information in the code.
-- Always include license header on each java file your create. See [this example](https://github.com/druid-io/druid/blob/master/common/src/main/java/io/druid/metadata/PasswordProvider.java)
+- Always include license header on each java file your create. See [this example](https://github.com/apache/incubator-druid/blob/master/common/src/main/java/org/apache/druid/metadata/PasswordProvider.java)
 - Try to keep pull requests short and submit separate ones for unrelated
   features, but feel free to combine simple bugfixes/tests into one pull request.
 - Keep the number of commits small and combine commits for related changes.
@@ -22,9 +22,9 @@ When submitting a pull request (PR), please use the following guidelines:
 
 ## GitHub Workflow
 
-1. Fork the druid-io/druid repository into your GitHub account
+1. Fork the apache/incubator-druid repository into your GitHub account
 
-    https://github.com/druid-io/druid/fork
+    https://github.com/apache/incubator-druid/fork
 
 1. Clone your fork of the GitHub repository
 
@@ -37,7 +37,7 @@ When submitting a pull request (PR), please use the following guidelines:
 1. Add a remote to keep up with upstream changes
 
     ```
-    git remote add upstream https://github.com/druid-io/druid.git
+    git remote add upstream https://github.com/apache/incubator-druid.git
     ```
 
     If you already have a copy, fetch upstream changes
@@ -84,7 +84,7 @@ When submitting a pull request (PR), please use the following guidelines:
     If you recently pushed your changes GitHub will automatically pop up a
     `Compare & pull request` button for any branches you recently pushed to. If you
     click that button it will automatically offer you to submit your pull-request
-    to the druid-io/druid repository.
+    to the apache/incubator-druid repository.
 
     - Give your pull-request a meaningful title.
     - In the description, explain your changes and the problem they are solving.
diff --git a/INTELLIJ_SETUP.md b/INTELLIJ_SETUP.md
index fac77fb5e1b..e1cbeb6e430 100644
--- a/INTELLIJ_SETUP.md
+++ b/INTELLIJ_SETUP.md
@@ -33,7 +33,7 @@ You can configure application definitions in XML for import into IntelliJ. Below
 <component name="ProjectRunConfigurationManager">
   <configuration default="false" name="Historical" type="Application" factoryName="Application">
     <extension name="coverage" enabled="false" merge="false" sample_coverage="true" runner="idea" />
-    <option name="MAIN_CLASS_NAME" value="io.druid.cli.Main" />
+    <option name="MAIN_CLASS_NAME" value="org.apache.druid.cli.Main" />
     <option name="VM_PARAMETERS" value="-server -Duser.timezone=UTC -Dfile.encoding=UTF-8 -Xmx2G -XX:MaxJavaStackTraceDepth=9999 -XX:+UseG1GC -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintAdaptiveSizePolicy -XX:+PrintReferenceGC -verbose:gc -XX:+PrintFlagsFinal -Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager -Dorg.jboss.logging.provider=slf4j -Dlog4j.configurationFile=$PROJECT_DIR$/common/src/main/resources/log4j2.debug.xml -Ddruid.host=localhost -Ddruid.service=historical -Ddruid.server.maxSize=10000000000 -Ddruid.processing.buffer.sizeBytes=100000000 -Ddruid.extensions.hadoopDependenciesDir=$PROJECT_DIR$/distribution/target/hadoop-dependencies/ -Ddruid.extensions.directory=$PROJECT_DIR$/distribution/target/extensions/ -Ddruid.extensions.loadList=[\&quot;druid-s3-extensions\&quot;,\&quot;druid-histogram\&quot;,\&quot;mysql-metadata-storage\&quot;] -Ddruid.historical.cache.useCache=false -Ddruid.historical.cache.populateCache=false -Ddruid.segmentCache.locations=&quot;[{\&quot;path\&quot;:\&quot;/tmp/druid/indexCache\&quot;,\&quot;maxSize\&quot;:10000000000}]&quot; -Ddruid.zk.service.host=localhost -Ddruid.processing.numThreads=1 -Ddruid.server.http.numThreads=50 -Ddruid.announcer.type=batch -Ddruid.emitter=logging" />
     <option name="PROGRAM_PARAMETERS" value="server historical" />
     <option name="WORKING_DIRECTORY" value="file://$PROJECT_DIR$" />
@@ -54,7 +54,7 @@ You can configure application definitions in XML for import into IntelliJ. Below
 <component name="ProjectRunConfigurationManager">
   <configuration default="false" name="Coordinator" type="Application" factoryName="Application">
     <extension name="coverage" enabled="false" merge="false" sample_coverage="true" runner="idea" />
-    <option name="MAIN_CLASS_NAME" value="io.druid.cli.Main" />
+    <option name="MAIN_CLASS_NAME" value="org.apache.druid.cli.Main" />
     <option name="VM_PARAMETERS" value="-server -Duser.timezone=UTC -Dfile.encoding=UTF-8 -Xmx256M -Xmx256M -XX:+UseG1GC -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintAdaptiveSizePolicy -XX:+PrintReferenceGC -verbose:gc -XX:+PrintFlagsFinal -Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager -Dorg.jboss.logging.provider=slf4j -Ddruid.host=localhost -Ddruid.service=coordinator -Ddruid.extensions.directory=$PROJECT_DIR$/distribution/target/extensions/ -Ddruid.extensions.loadList=[\&quot;druid-s3-extensions\&quot;,\&quot;druid-histogram\&quot;,\&quot;mysql-metadata-storage\&quot;] -Ddruid.zk.service.host=localhost -Ddruid.metadata.storage.type=mysql -Ddruid.metadata.storage.connector.connectURI=&quot;jdbc:mysql://localhost:3306/druid&quot; -Ddruid.metadata.storage.connector.user=druid -Ddruid.metadata.storage.connector.password=diurd -Ddruid.announcer.type=batch -Ddruid.emitter=logging -Ddruid.coordinator.period=PT10S -Ddruid.coordinator.startDelay=PT5S" />
     <option name="PROGRAM_PARAMETERS" value="server coordinator" />
     <option name="WORKING_DIRECTORY" value="file://$PROJECT_DIR$" />
diff --git a/README.md b/README.md
index c4b70df7d07..b4f728d5c67 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-[![Build Status](https://travis-ci.org/apache/incubator-druid.svg?branch=master)](https://travis-ci.org/apache/incubator-druid) [![Inspections Status](https://img.shields.io/teamcity/http/teamcity.jetbrains.com/s/OpenSourceProjects_Druid_Inspections.svg?label=TeamCity%20inspections)](https://teamcity.jetbrains.com/viewType.html?buildTypeId=OpenSourceProjects_Druid_Inspections) [![Coverage Status](https://coveralls.io/repos/druid-io/druid/badge.svg?branch=master)](https://coveralls.io/r/druid-io/druid?branch=master) [![IRC#druid-dev](https://img.shields.io/badge/IRC-druid--dev-blue.svg)](https://webchat.freenode.net?channels=druid-dev)
+[![Build Status](https://travis-ci.org/apache/incubator-druid.svg?branch=master)](https://travis-ci.org/apache/incubator-druid) [![Inspections Status](https://img.shields.io/teamcity/http/teamcity.jetbrains.com/s/OpenSourceProjects_Druid_Inspections.svg?label=TeamCity%20inspections)](https://teamcity.jetbrains.com/viewType.html?buildTypeId=OpenSourceProjects_Druid_Inspections) [![Coverage Status](https://coveralls.io/repos/apache/incubator-druid/badge.svg?branch=master)](https://coveralls.io/r/apache/incubator-druid?branch=master) [![IRC#druid-dev](https://img.shields.io/badge/IRC-druid--dev-blue.svg)](https://webchat.freenode.net?channels=druid-dev)
 
 ## Apache Druid (incubating)
 
@@ -28,7 +28,7 @@ You can get started with Druid with our [quickstart](http://druid.io/docs/latest
 
 ### Reporting Issues
 
-If you find any bugs, please file a [GitHub issue](https://github.com/druid-io/druid/issues).
+If you find any bugs, please file a [GitHub issue](https://github.com/apache/incubator-druid/issues).
 
 ### Community
 
diff --git a/api/pom.xml b/api/pom.xml
index 0ae62419341..6f98f4a3267 100644
--- a/api/pom.xml
+++ b/api/pom.xml
@@ -26,14 +26,14 @@
     <description>Druid Extensions API</description>
 
     <parent>
-        <groupId>io.druid</groupId>
+        <groupId>org.apache.druid</groupId>
         <artifactId>druid</artifactId>
         <version>0.13.0-SNAPSHOT</version>
     </parent>
 
     <dependencies>
         <dependency>
-            <groupId>io.druid</groupId>
+            <groupId>org.apache.druid</groupId>
             <artifactId>java-util</artifactId>
             <version>${project.parent.version}</version>
                 <exclusions>
diff --git a/api/src/main/java/io/druid/cli/CliCommandCreator.java b/api/src/main/java/io/druid/cli/CliCommandCreator.java
deleted file mode 100644
index c4e8f362033..00000000000
--- a/api/src/main/java/io/druid/cli/CliCommandCreator.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.cli;
-
-import io.airlift.airline.Cli;
-import io.druid.guice.annotations.ExtensionPoint;
-
-/**
- */
-@ExtensionPoint
-public interface CliCommandCreator
-{
-  void addCommands(Cli.CliBuilder builder);
-}
diff --git a/api/src/main/java/io/druid/data/input/ByteBufferInputRowParser.java b/api/src/main/java/io/druid/data/input/ByteBufferInputRowParser.java
deleted file mode 100644
index 7402072932f..00000000000
--- a/api/src/main/java/io/druid/data/input/ByteBufferInputRowParser.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import io.druid.data.input.impl.InputRowParser;
-import io.druid.data.input.impl.ParseSpec;
-
-import java.nio.ByteBuffer;
-
-public interface ByteBufferInputRowParser extends InputRowParser<ByteBuffer>
-{
-  @Override
-  ByteBufferInputRowParser withParseSpec(ParseSpec parseSpec);
-}
diff --git a/api/src/main/java/io/druid/data/input/Committer.java b/api/src/main/java/io/druid/data/input/Committer.java
deleted file mode 100644
index 64b9833a730..00000000000
--- a/api/src/main/java/io/druid/data/input/Committer.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import io.druid.guice.annotations.ExtensionPoint;
-
-/**
- * Committer includes a Runnable and a Jackson-serialized metadata object containing the offset
- */
-@ExtensionPoint
-public interface Committer extends Runnable
-{
-  /**
-   * @return A json serialized representation of commit metadata,
-   * which needs to be serialized and deserialized by Jackson.
-   * Commit metadata can be a complex type, but we recommend keeping it to List/Map/"Primitive JSON" types
-   */
-  Object getMetadata();
-}
diff --git a/api/src/main/java/io/druid/data/input/FiniteFirehoseFactory.java b/api/src/main/java/io/druid/data/input/FiniteFirehoseFactory.java
deleted file mode 100644
index bb62161e671..00000000000
--- a/api/src/main/java/io/druid/data/input/FiniteFirehoseFactory.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import io.druid.data.input.impl.InputRowParser;
-
-import java.io.IOException;
-import java.util.stream.Stream;
-
-/**
- * {@link FiniteFirehoseFactory} designed for batch processing. Its implementations assume that the amount of inputs is
- * limited.
- *
- * @param <T> parser type
- * @param <S> input split type
- */
-public interface FiniteFirehoseFactory<T extends InputRowParser, S> extends FirehoseFactory<T>
-{
-  /**
-   * Returns true if this {@link FiniteFirehoseFactory} supports parallel batch indexing.
-   */
-  @JsonIgnore
-  @Override
-  default boolean isSplittable()
-  {
-    return true;
-  }
-
-  /**
-   * Returns a {@link Stream} for {@link InputSplit}s. In parallel batch indexing, each {@link InputSplit} is processed
-   * by a sub task.
-   *
-   * Listing splits may cause high overhead in some implementations. In this case, {@link InputSplit}s should be listed
-   * lazily so that the listing overhead could be amortized.
-   */
-  @JsonIgnore
-  Stream<InputSplit<S>> getSplits() throws IOException;
-
-  /**
-   * Returns number of splits returned by {@link #getSplits()}.
-   */
-  @JsonIgnore
-  int getNumSplits() throws IOException;
-
-  /**
-   * Returns the same {@link FiniteFirehoseFactory} but with the given {@link InputSplit}. The returned
-   * {@link FiniteFirehoseFactory} is used by sub tasks in parallel batch indexing.
-   */
-  FiniteFirehoseFactory<T, S> withSplit(InputSplit<S> split);
-}
diff --git a/api/src/main/java/io/druid/data/input/Firehose.java b/api/src/main/java/io/druid/data/input/Firehose.java
deleted file mode 100644
index da9ce15b524..00000000000
--- a/api/src/main/java/io/druid/data/input/Firehose.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import io.druid.guice.annotations.ExtensionPoint;
-
-import javax.annotation.Nullable;
-import java.io.Closeable;
-
-/**
- * This is an interface that holds onto the stream of incoming data.  Realtime data ingestion is built around this
- * abstraction.  In order to add a new type of source for realtime data ingestion, all you need to do is implement
- * one of these and register it with the Main.
- *
- * This object acts a lot like an Iterator, but it doesn't extend the Iterator interface because it extends
- * Closeable and it is very important that the close() method doesn't get forgotten, which is easy to do if this
- * gets passed around as an Iterator.
- * <p>
- * The implementation of this interface only needs to be minimally thread-safe. The three methods ##hasMore(),
- * ##nextRow() and ##commit() are all called from the same thread.  ##commit(), however, returns a callback
- * which will be called on another thread, so the operations inside of that callback must be thread-safe.
- * </p>
- */
-@ExtensionPoint
-public interface Firehose extends Closeable
-{
-  /**
-   * Returns whether there are more rows to process.  This is used to indicate that another item is immediately
-   * available via ##nextRow().  Thus, if the stream is still available but there are no new messages on it, this call
-   * should block until a new message is available.
-   *
-   * If something happens such that the stream is no longer available, this should return false.
-   *
-   * @return true if and when there is another row available, false if the stream has dried up
-   */
-  boolean hasMore();
-
-  /**
-   * The next row available.  Should only be called if hasMore returns true.
-   * The return value can be null which means the caller must skip this row.
-   *
-   * @return The next row
-   */
-  @Nullable
-  InputRow nextRow();
-
-  /**
-   * Returns a runnable that will "commit" everything read up to the point at which commit() is called.  This is
-   * often equivalent to everything that has been read since the last commit() call (or instantiation of the object),
-   * but doesn't necessarily have to be.
-   *
-   * This method is called when the main processing loop starts to persist its current batch of things to process.
-   * The returned runnable will be run when the current batch has been successfully persisted, there is usually
-   * some time lag between when this method is called and when the runnable is run.  The Runnable is also run on
-   * a separate thread so its operation should be thread-safe.
-   *
-   * The Runnable is essentially just a lambda/closure that is run() after data supplied by this instance has
-   * been committed on the writer side of this interface protocol.
-   * <p>
-   * A simple implementation of this interface might do nothing when run() is called 
-   * (in which case the same do-nothing instance can be returned every time), or 
-   * a more complex implementation might clean up temporary resources that are no longer needed 
-   * because of InputRows delivered by prior calls to ##nextRow().
-   * </p>
-   */
-  Runnable commit();
-}
diff --git a/api/src/main/java/io/druid/data/input/FirehoseFactory.java b/api/src/main/java/io/druid/data/input/FirehoseFactory.java
deleted file mode 100644
index d5c45483fc4..00000000000
--- a/api/src/main/java/io/druid/data/input/FirehoseFactory.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import com.fasterxml.jackson.annotation.JsonTypeInfo;
-import io.druid.data.input.impl.InputRowParser;
-import io.druid.data.input.impl.prefetch.PrefetchableTextFilesFirehoseFactory;
-import io.druid.guice.annotations.ExtensionPoint;
-import io.druid.java.util.common.parsers.ParseException;
-
-import javax.annotation.Nullable;
-import java.io.File;
-import java.io.IOException;
-
-/**
- * FirehoseFactory creates a {@link Firehose} which is an interface holding onto the stream of incoming data.
- * It currently provides two methods for creating a {@link Firehose} and their default implementations call each other
- * for the backward compatibility.  Implementations of this interface must implement one of these methods.
- */
-@ExtensionPoint
-@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
-public interface FirehoseFactory<T extends InputRowParser>
-{
-  /**
-   * Initialization method that connects up the fire hose.  If this method returns successfully it should be safe to
-   * call hasMore() on the returned Firehose (which might subsequently block).
-   * <p/>
-   * If this method returns null, then any attempt to call hasMore(), nextRow(), commit() and close() on the return
-   * value will throw a surprising NPE.   Throwing IOException on connection failure or runtime exception on
-   * invalid configuration is preferred over returning null.
-   *
-   * @param parser             an input row parser
-   */
-  @Deprecated
-  default Firehose connect(T parser) throws IOException, ParseException
-  {
-    return connect(parser, null);
-  }
-
-  /**
-   * Initialization method that connects up the fire hose.  If this method returns successfully it should be safe to
-   * call hasMore() on the returned Firehose (which might subsequently block).
-   * <p/>
-   * If this method returns null, then any attempt to call hasMore(), nextRow(), commit() and close() on the return
-   * value will throw a surprising NPE.   Throwing IOException on connection failure or runtime exception on
-   * invalid configuration is preferred over returning null.
-   * <p/>
-   * Some fire hoses like {@link PrefetchableTextFilesFirehoseFactory} may use a temporary
-   * directory to cache data in it.
-   *
-   * @param parser             an input row parser
-   * @param temporaryDirectory a directory where temporary files are stored
-   */
-  default Firehose connect(T parser, @Nullable File temporaryDirectory) throws IOException, ParseException
-  {
-    return connect(parser);
-  }
-
-  default boolean isSplittable()
-  {
-    return false;
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/FirehoseFactoryV2.java b/api/src/main/java/io/druid/data/input/FirehoseFactoryV2.java
deleted file mode 100644
index 86c67b04ab9..00000000000
--- a/api/src/main/java/io/druid/data/input/FirehoseFactoryV2.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import com.fasterxml.jackson.annotation.JsonTypeInfo;
-import io.druid.data.input.impl.InputRowParser;
-import io.druid.guice.annotations.ExtensionPoint;
-import io.druid.java.util.common.parsers.ParseException;
-
-import java.io.IOException;
-
-/**
- * Initialization method that connects up the FirehoseV2.  If this method returns successfully it should be safe to
- * call start() on the returned FirehoseV2 (which might subsequently block).
- *
- * In contrast to V1 version, FirehoseFactoryV2 is able to pass an additional json-serialized object to FirehoseV2,
- * which contains last commit metadata
- *
- * <p/>
- * If this method returns null, then any attempt to call start(), advance(), currRow(), makeCommitter() and close() on the return
- * value will throw a surprising NPE.   Throwing IOException on connection failure or runtime exception on
- * invalid configuration is preferred over returning null.
- */
-@ExtensionPoint
-@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
-public interface FirehoseFactoryV2<T extends InputRowParser>
-{
-  /**
-   * This method is declared to throw {@link IOException}, although it's not thrown in the implementations in Druid
-   * code, for compatibility with third-party extensions.
-   */
-  @SuppressWarnings("RedundantThrows")
-  FirehoseV2 connect(T parser, Object lastCommit) throws IOException, ParseException;
-}
diff --git a/api/src/main/java/io/druid/data/input/FirehoseV2.java b/api/src/main/java/io/druid/data/input/FirehoseV2.java
deleted file mode 100644
index c8acfa1d4d4..00000000000
--- a/api/src/main/java/io/druid/data/input/FirehoseV2.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import io.druid.guice.annotations.ExtensionPoint;
-
-import java.io.Closeable;
-
-/**
- * This is an interface that holds onto the stream of incoming data.  Realtime data ingestion is built around this
- * abstraction.  In order to add a new type of source for realtime data ingestion, all you need to do is implement
- * one of these and register it with the Main.
- *
- * In contrast to Firehose v1 version, FirehoseV2 will always operate in a "peek, then advance" manner.
- * And the intended usage patttern is
- * 1. Call start()
- * 2. Read currRow()
- * 3. Call advance()
- * 4. If index should be committed: commit()
- * 5. GOTO 2
- *
- * Note that commit() is being called *after* advance.
- *
- * This object acts a lot like an Iterator, but it doesn't extend the Iterator interface because it extends
- * Closeable and it is very important that the close() method doesn't get forgotten, which is easy to do if this
- * gets passed around as an Iterator.
- *
- * The implementation of this interface only needs to be minimally thread-safe. The methods ##start(), ##advance(),
- * ##currRow() and ##makeCommitter() are all called from the same thread.  ##makeCommitter(), however, returns a callback
- * which will be called on another thread, so the operations inside of that callback must be thread-safe.
- */
-@ExtensionPoint
-public interface FirehoseV2 extends Closeable
-{
-  /**
-   * For initial start
-   */
-  void start();
-
-  /**
-   * Advance the firehose to the next offset.  Implementations of this interface should make sure that
-   * if advance() is called and throws out an exception, the next call to currRow() should return a
-   * null value.
-   *
-   * @return true if and when there is another row available, false if the stream has dried up
-   */
-  boolean advance();
-
-  /**
-   * @return The current row
-   */
-  InputRow currRow();
-
-  /**
-   * Returns a Committer that will "commit" everything read up to the point at which makeCommitter() is called.
-   *
-   * This method is called when the main processing loop starts to persist its current batch of things to process.
-   * The returned committer will be run when the current batch has been successfully persisted
-   * and the metadata the committer carries can also be persisted along with segment data. There is usually
-   * some time lag between when this method is called and when the runnable is run.  The Runnable is also run on
-   * a separate thread so its operation should be thread-safe.
-   *
-   * Note that "correct" usage of this interface will always call advance() before commit() if the current row
-   * is considered in the commit.
-   *
-   * The Runnable is essentially just a lambda/closure that is run() after data supplied by this instance has
-   * been committed on the writer side of this interface protocol.
-   *
-   * A simple implementation of this interface might do nothing when run() is called,
-   * and save proper commit information in metadata
-   */
-  Committer makeCommitter();
-}
diff --git a/api/src/main/java/io/druid/data/input/InputRow.java b/api/src/main/java/io/druid/data/input/InputRow.java
deleted file mode 100644
index 0de7c199fef..00000000000
--- a/api/src/main/java/io/druid/data/input/InputRow.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import io.druid.guice.annotations.ExtensionPoint;
-
-import java.util.List;
-
-/**
- * An InputRow is the interface definition of an event being input into the data ingestion layer.
- *
- * An InputRow is a Row with a self-describing list of the dimensions available.  This list is used to
- * implement "schema-less" data ingestion that allows the system to add new dimensions as they appear.
- *
- */
-@ExtensionPoint
-public interface InputRow extends Row
-{
-  /**
-   * Returns the dimensions that exist in this row.
-   *
-   * @return the dimensions that exist in this row.
-   */
-  List<String> getDimensions();
-}
diff --git a/api/src/main/java/io/druid/data/input/InputSplit.java b/api/src/main/java/io/druid/data/input/InputSplit.java
deleted file mode 100644
index b3886b63091..00000000000
--- a/api/src/main/java/io/druid/data/input/InputSplit.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-/**
- * Input unit for distributed batch ingestion. Used in {@link FiniteFirehoseFactory}.
- * An {@link InputSplit} represents the input data processed by a {@code io.druid.indexing.common.task.Task}.
- */
-public class InputSplit<T>
-{
-  private final T split;
-
-  @JsonCreator
-  public InputSplit(@JsonProperty("split") T split)
-  {
-    this.split = split;
-  }
-
-  @JsonProperty("split")
-  public T get()
-  {
-    return split;
-  }
-
-  @Override
-  public String toString()
-  {
-    return "InputSplit{" +
-           "split=" + split +
-           "}";
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/MapBasedInputRow.java b/api/src/main/java/io/druid/data/input/MapBasedInputRow.java
deleted file mode 100644
index d373e2439f7..00000000000
--- a/api/src/main/java/io/druid/data/input/MapBasedInputRow.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import io.druid.guice.annotations.PublicApi;
-import io.druid.java.util.common.DateTimes;
-import org.joda.time.DateTime;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- */
-@PublicApi
-public class MapBasedInputRow extends MapBasedRow implements InputRow
-{
-  private final List<String> dimensions;
-
-  public MapBasedInputRow(
-      long timestamp,
-      List<String> dimensions,
-      Map<String, Object> event
-  )
-  {
-    super(timestamp, event);
-    this.dimensions = dimensions;
-  }
-
-  public MapBasedInputRow(
-      DateTime timestamp,
-      List<String> dimensions,
-      Map<String, Object> event
-  )
-  {
-    super(timestamp, event);
-    this.dimensions = dimensions;
-  }
-
-  @Override
-  public List<String> getDimensions()
-  {
-    return dimensions;
-  }
-
-  @Override
-  public String toString()
-  {
-    return "MapBasedInputRow{" +
-           "timestamp=" + DateTimes.utc(getTimestampFromEpoch()) +
-           ", event=" + getEvent() +
-           ", dimensions=" + dimensions +
-           '}';
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/MapBasedRow.java b/api/src/main/java/io/druid/data/input/MapBasedRow.java
deleted file mode 100644
index 4fae7399776..00000000000
--- a/api/src/main/java/io/druid/data/input/MapBasedRow.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import io.druid.guice.annotations.PublicApi;
-import io.druid.java.util.common.DateTimes;
-import org.joda.time.DateTime;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- */
-@PublicApi
-public class MapBasedRow implements Row
-{
-  private final DateTime timestamp;
-  private final Map<String, Object> event;
-
-  @JsonCreator
-  public MapBasedRow(
-      @JsonProperty("timestamp") DateTime timestamp,
-      @JsonProperty("event") Map<String, Object> event
-  )
-  {
-    this.timestamp = timestamp;
-    this.event = event;
-  }
-
-  public MapBasedRow(
-      long timestamp,
-      Map<String, Object> event
-  )
-  {
-    this(DateTimes.utc(timestamp), event);
-  }
-
-  @Override
-  public long getTimestampFromEpoch()
-  {
-    return timestamp.getMillis();
-  }
-
-  @Override
-  @JsonProperty
-  public DateTime getTimestamp()
-  {
-    return timestamp;
-  }
-
-  @JsonProperty
-  public Map<String, Object> getEvent()
-  {
-    return event;
-  }
-
-  @Override
-  public List<String> getDimension(String dimension)
-  {
-    return Rows.objectToStrings(event.get(dimension));
-  }
-
-  @Override
-  public Object getRaw(String dimension)
-  {
-    return event.get(dimension);
-  }
-
-  @Override
-  public Number getMetric(String metric)
-  {
-    return Rows.objectToNumber(metric, event.get(metric));
-  }
-
-  @Override
-  public String toString()
-  {
-    return "MapBasedRow{" +
-           "timestamp=" + timestamp +
-           ", event=" + event +
-           '}';
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    MapBasedRow that = (MapBasedRow) o;
-
-    if (!event.equals(that.event)) {
-      return false;
-    }
-    if (!timestamp.equals(that.timestamp)) {
-      return false;
-    }
-
-    return true;
-  }
-
-  @Override
-  public int hashCode()
-  {
-    int result = timestamp.hashCode();
-    result = 31 * result + event.hashCode();
-    return result;
-  }
-
-  @Override
-  public int compareTo(Row o)
-  {
-    return timestamp.compareTo(o.getTimestamp());
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/Row.java b/api/src/main/java/io/druid/data/input/Row.java
deleted file mode 100644
index 9657c61fcd7..00000000000
--- a/api/src/main/java/io/druid/data/input/Row.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import com.fasterxml.jackson.annotation.JsonSubTypes;
-import com.fasterxml.jackson.annotation.JsonTypeInfo;
-import io.druid.guice.annotations.PublicApi;
-import org.joda.time.DateTime;
-
-import java.util.List;
-
-/**
- * A Row of data.  This can be used for both input and output into various parts of the system.  It assumes
- * that the user already knows the schema of the row and can query for the parts that they care about.
- */
-@PublicApi
-@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "version", defaultImpl = MapBasedRow.class)
-@JsonSubTypes(value = {
-    @JsonSubTypes.Type(name = "v1", value = MapBasedRow.class)
-})
-public interface Row extends Comparable<Row>
-{
-  /**
-   * Returns the timestamp from the epoch in milliseconds.  If the event happened _right now_, this would return the
-   * same thing as System.currentTimeMillis();
-   *
-   * @return the timestamp from the epoch in milliseconds.
-   */
-  long getTimestampFromEpoch();
-
-  /**
-   * Returns the timestamp from the epoch as an org.joda.time.DateTime.  If the event happened _right now_, this would return the
-   * same thing as new DateTime();
-   *
-   * @return the timestamp from the epoch as an org.joda.time.DateTime object.
-   */
-  DateTime getTimestamp();
-
-  /**
-   * Returns the list of dimension values for the given column name.
-   * <p/>
-   *
-   * @param dimension the column name of the dimension requested
-   *
-   * @return the list of values for the provided column name
-   */
-  List<String> getDimension(String dimension);
-
-  /**
-   * Returns the raw dimension value for the given column name. This is different from {@link #getDimension} which
-   * converts all values to strings before returning them.
-   *
-   * @param dimension the column name of the dimension requested
-   *
-   * @return the value of the provided column name
-   */
-  Object getRaw(String dimension);
-
-  /**
-   * Returns the metric column value for the given column name. This method is different from {@link #getRaw} in two
-   * aspects:
-   *  1. If the column is absent in the row, numeric zero is returned, rather than null.
-   *  2. If the column has string value, an attempt is made to parse this value as a number.
-   */
-  Number getMetric(String metric);
-}
diff --git a/api/src/main/java/io/druid/data/input/Rows.java b/api/src/main/java/io/druid/data/input/Rows.java
deleted file mode 100644
index b511e7dfcab..00000000000
--- a/api/src/main/java/io/druid/data/input/Rows.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSortedSet;
-import com.google.common.collect.Maps;
-import com.google.common.primitives.Longs;
-import io.druid.common.config.NullHandling;
-import io.druid.java.util.common.StringUtils;
-import io.druid.java.util.common.parsers.ParseException;
-
-import javax.annotation.Nullable;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-/**
- */
-public class Rows
-{
-  public static final Long LONG_ZERO = 0L;
-
-  /**
-   * @param timeStamp rollup up timestamp to be used to create group key
-   * @param inputRow  input row
-   *
-   * @return groupKey for the given input row
-   */
-  public static List<Object> toGroupKey(long timeStamp, InputRow inputRow)
-  {
-    final Map<String, Set<String>> dims = Maps.newTreeMap();
-    for (final String dim : inputRow.getDimensions()) {
-      final Set<String> dimValues = ImmutableSortedSet.copyOf(inputRow.getDimension(dim));
-      if (dimValues.size() > 0) {
-        dims.put(dim, dimValues);
-      }
-    }
-    return ImmutableList.of(
-        timeStamp,
-        dims
-    );
-  }
-
-  /**
-   * Convert an object to a list of strings.
-   */
-  public static List<String> objectToStrings(final Object inputValue)
-  {
-    if (inputValue == null) {
-      return Collections.emptyList();
-    } else if (inputValue instanceof List) {
-      // guava's toString function fails on null objects, so please do not use it
-      final List<Object> values = (List) inputValue;
-
-      final List<String> retVal = new ArrayList<>(values.size());
-      for (Object val : values) {
-        retVal.add(String.valueOf(val));
-      }
-
-      return retVal;
-    } else {
-      return Collections.singletonList(String.valueOf(inputValue));
-    }
-  }
-
-  /**
-   * Convert an object to a number. Nulls are treated as zeroes.
-   *
-   * @param name       field name of the object being converted (may be used for exception messages)
-   * @param inputValue the actual object being converted
-   *
-   * @return a number
-   *
-   * @throws NullPointerException if the string is null
-   * @throws ParseException       if the column cannot be converted to a number
-   */
-  @Nullable
-  public static Number objectToNumber(final String name, final Object inputValue)
-  {
-    if (inputValue == null) {
-      return NullHandling.defaultLongValue();
-    }
-
-    if (inputValue instanceof Number) {
-      return (Number) inputValue;
-    } else if (inputValue instanceof String) {
-      try {
-        String metricValueString = StringUtils.removeChar(((String) inputValue).trim(), ',');
-        // Longs.tryParse() doesn't support leading '+', so we need to trim it ourselves
-        metricValueString = trimLeadingPlusOfLongString(metricValueString);
-        Long v = Longs.tryParse(metricValueString);
-        // Do NOT use ternary operator here, because it makes Java to convert Long to Double
-        if (v != null) {
-          return v;
-        } else {
-          return Double.valueOf(metricValueString);
-        }
-      }
-      catch (Exception e) {
-        throw new ParseException(e, "Unable to parse value[%s] for field[%s]", inputValue, name);
-      }
-    } else {
-      throw new ParseException("Unknown type[%s] for field", inputValue.getClass(), inputValue);
-    }
-  }
-
-  private static String trimLeadingPlusOfLongString(String metricValueString)
-  {
-    if (metricValueString.length() > 1 && metricValueString.charAt(0) == '+') {
-      char secondChar = metricValueString.charAt(1);
-      if (secondChar >= '0' && secondChar <= '9') {
-        metricValueString = metricValueString.substring(1);
-      }
-    }
-    return metricValueString;
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/AbstractTextFilesFirehoseFactory.java b/api/src/main/java/io/druid/data/input/impl/AbstractTextFilesFirehoseFactory.java
deleted file mode 100644
index 6c1ce3442d2..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/AbstractTextFilesFirehoseFactory.java
+++ /dev/null
@@ -1,147 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.google.common.base.Preconditions;
-import com.google.common.base.Throwables;
-import com.google.common.collect.ImmutableList;
-import io.druid.data.input.FiniteFirehoseFactory;
-import io.druid.data.input.Firehose;
-import io.druid.data.input.InputSplit;
-import io.druid.java.util.common.logger.Logger;
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.io.LineIterator;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.charset.StandardCharsets;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.List;
-import java.util.NoSuchElementException;
-import java.util.stream.Stream;
-
-/**
- * This is an abstract class for firehose factory for making firehoses reading text files.
- * It provides an unified {@link #connect(StringInputRowParser, File)} implementation for its subclasses.
- *
- * @param <T> object type representing input data
- */
-public abstract class AbstractTextFilesFirehoseFactory<T>
-    implements FiniteFirehoseFactory<StringInputRowParser, T>
-{
-  private static final Logger LOG = new Logger(AbstractTextFilesFirehoseFactory.class);
-
-  private List<T> objects;
-
-  @Override
-  public Firehose connect(StringInputRowParser firehoseParser, File temporaryDirectory) throws IOException
-  {
-    initializeObjectsIfNeeded();
-    final Iterator<T> iterator = objects.iterator();
-    return new FileIteratingFirehose(
-        new Iterator<LineIterator>()
-        {
-          @Override
-          public boolean hasNext()
-          {
-            return iterator.hasNext();
-          }
-
-          @Override
-          public LineIterator next()
-          {
-            if (!hasNext()) {
-              throw new NoSuchElementException();
-            }
-            final T object = iterator.next();
-            try {
-              return IOUtils.lineIterator(wrapObjectStream(object, openObjectStream(object)), StandardCharsets.UTF_8);
-            }
-            catch (Exception e) {
-              LOG.error(
-                  e,
-                  "Exception reading object[%s]",
-                  object
-              );
-              throw Throwables.propagate(e);
-            }
-          }
-        },
-        firehoseParser
-    );
-  }
-
-  protected void initializeObjectsIfNeeded() throws IOException
-  {
-    if (objects == null) {
-      objects = ImmutableList.copyOf(Preconditions.checkNotNull(initObjects(), "initObjects"));
-    }
-  }
-
-  public List<T> getObjects()
-  {
-    return objects;
-  }
-
-  @Override
-  public Stream<InputSplit<T>> getSplits() throws IOException
-  {
-    initializeObjectsIfNeeded();
-    return getObjects().stream().map(InputSplit::new);
-  }
-
-  @Override
-  public int getNumSplits() throws IOException
-  {
-    initializeObjectsIfNeeded();
-    return getObjects().size();
-  }
-
-  /**
-   * Initialize objects to be read by this firehose.  Since firehose factories are constructed whenever
-   * io.druid.indexing.common.task.Task objects are deserialized, actual initialization of objects is deferred
-   * until {@link #connect(StringInputRowParser, File)} is called.
-   *
-   * @return a collection of initialized objects.
-   */
-  protected abstract Collection<T> initObjects() throws IOException;
-
-  /**
-   * Open an input stream from the given object.  If the object is compressed, this method should return a byte stream
-   * as it is compressed.  The object compression should be handled in {@link #wrapObjectStream(Object, InputStream)}.
-   *
-   * @param object an object to be read
-   *
-   * @return an input stream for the object
-   */
-  protected abstract InputStream openObjectStream(T object) throws IOException;
-
-  /**
-   * Wrap the given input stream if needed.  The decompression logic should be applied to the given stream if the object
-   * is compressed.
-   *
-   * @param object an input object
-   * @param stream a stream for the object
-   * @return an wrapped input stream
-   */
-  protected abstract InputStream wrapObjectStream(T object, InputStream stream) throws IOException;
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/CSVParseSpec.java b/api/src/main/java/io/druid/data/input/impl/CSVParseSpec.java
deleted file mode 100644
index 576f0b94bdf..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/CSVParseSpec.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Preconditions;
-import io.druid.java.util.common.parsers.CSVParser;
-import io.druid.java.util.common.parsers.Parser;
-
-import java.util.List;
-
-/**
- */
-public class CSVParseSpec extends ParseSpec
-{
-  private final String listDelimiter;
-  private final List<String> columns;
-  private final boolean hasHeaderRow;
-  private final int skipHeaderRows;
-
-  @JsonCreator
-  public CSVParseSpec(
-      @JsonProperty("timestampSpec") TimestampSpec timestampSpec,
-      @JsonProperty("dimensionsSpec") DimensionsSpec dimensionsSpec,
-      @JsonProperty("listDelimiter") String listDelimiter,
-      @JsonProperty("columns") List<String> columns,
-      @JsonProperty("hasHeaderRow") boolean hasHeaderRow,
-      @JsonProperty("skipHeaderRows") int skipHeaderRows
-  )
-  {
-    super(timestampSpec, dimensionsSpec);
-
-    this.listDelimiter = listDelimiter;
-    this.columns = columns;
-    this.hasHeaderRow = hasHeaderRow;
-    this.skipHeaderRows = skipHeaderRows;
-
-    if (columns != null) {
-      for (String column : columns) {
-        Preconditions.checkArgument(!column.contains(","), "Column[%s] has a comma, it cannot", column);
-      }
-      verify(dimensionsSpec.getDimensionNames());
-    } else {
-      Preconditions.checkArgument(
-          hasHeaderRow,
-          "If columns field is not set, the first row of your data must have your header"
-          + " and hasHeaderRow must be set to true."
-      );
-    }
-  }
-
-  @Deprecated
-  public CSVParseSpec(
-      TimestampSpec timestampSpec,
-      DimensionsSpec dimensionsSpec,
-      String listDelimiter,
-      List<String> columns
-  )
-  {
-    this(timestampSpec, dimensionsSpec, listDelimiter, columns, false, 0);
-  }
-
-  @JsonProperty
-  public String getListDelimiter()
-  {
-    return listDelimiter;
-  }
-
-  @JsonProperty("columns")
-  public List<String> getColumns()
-  {
-    return columns;
-  }
-
-  @JsonProperty
-  public boolean isHasHeaderRow()
-  {
-    return hasHeaderRow;
-  }
-
-  @JsonProperty("skipHeaderRows")
-  public int getSkipHeaderRows()
-  {
-    return skipHeaderRows;
-  }
-
-  @Override
-  public void verify(List<String> usedCols)
-  {
-    for (String columnName : usedCols) {
-      Preconditions.checkArgument(columns.contains(columnName), "column[%s] not in columns.", columnName);
-    }
-  }
-
-  @Override
-  public Parser<String, Object> makeParser()
-  {
-    return new CSVParser(listDelimiter, columns, hasHeaderRow, skipHeaderRows);
-  }
-
-  @Override
-  public ParseSpec withTimestampSpec(TimestampSpec spec)
-  {
-    return new CSVParseSpec(spec, getDimensionsSpec(), listDelimiter, columns, hasHeaderRow, skipHeaderRows);
-  }
-
-  @Override
-  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
-  {
-    return new CSVParseSpec(getTimestampSpec(), spec, listDelimiter, columns, hasHeaderRow, skipHeaderRows);
-  }
-
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/DelimitedParseSpec.java b/api/src/main/java/io/druid/data/input/impl/DelimitedParseSpec.java
deleted file mode 100644
index 5ccd53a9a9e..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/DelimitedParseSpec.java
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Preconditions;
-import io.druid.java.util.common.parsers.DelimitedParser;
-import io.druid.java.util.common.parsers.Parser;
-
-import java.util.List;
-
-/**
- */
-public class DelimitedParseSpec extends ParseSpec
-{
-  private final String delimiter;
-  private final String listDelimiter;
-  private final List<String> columns;
-  private final boolean hasHeaderRow;
-  private final int skipHeaderRows;
-
-  @JsonCreator
-  public DelimitedParseSpec(
-      @JsonProperty("timestampSpec") TimestampSpec timestampSpec,
-      @JsonProperty("dimensionsSpec") DimensionsSpec dimensionsSpec,
-      @JsonProperty("delimiter") String delimiter,
-      @JsonProperty("listDelimiter") String listDelimiter,
-      @JsonProperty("columns") List<String> columns,
-      @JsonProperty("hasHeaderRow") boolean hasHeaderRow,
-      @JsonProperty("skipHeaderRows") int skipHeaderRows
-  )
-  {
-    super(timestampSpec, dimensionsSpec);
-
-    this.delimiter = delimiter;
-    this.listDelimiter = listDelimiter;
-    this.columns = columns;
-    this.hasHeaderRow = hasHeaderRow;
-    this.skipHeaderRows = skipHeaderRows;
-
-    if (columns != null) {
-      for (String column : this.columns) {
-        Preconditions.checkArgument(!column.contains(","), "Column[%s] has a comma, it cannot", column);
-      }
-      verify(dimensionsSpec.getDimensionNames());
-    } else {
-      Preconditions.checkArgument(
-          hasHeaderRow,
-          "If columns field is not set, the first row of your data must have your header"
-          + " and hasHeaderRow must be set to true."
-      );
-    }
-  }
-
-  @Deprecated
-  public DelimitedParseSpec(
-      TimestampSpec timestampSpec,
-      DimensionsSpec dimensionsSpec,
-      String delimiter,
-      String listDelimiter,
-      List<String> columns
-  )
-  {
-    this(timestampSpec, dimensionsSpec, delimiter, listDelimiter, columns, false, 0);
-  }
-
-  @JsonProperty("delimiter")
-  public String getDelimiter()
-  {
-    return delimiter;
-  }
-
-  @JsonProperty("listDelimiter")
-  public String getListDelimiter()
-  {
-    return listDelimiter;
-  }
-
-  @JsonProperty("columns")
-  public List<String> getColumns()
-  {
-    return columns;
-  }
-
-  @JsonProperty
-  public boolean isHasHeaderRow()
-  {
-    return hasHeaderRow;
-  }
-
-  @JsonProperty("skipHeaderRows")
-  public int getSkipHeaderRows()
-  {
-    return skipHeaderRows;
-  }
-
-  @Override
-  public void verify(List<String> usedCols)
-  {
-    for (String columnName : usedCols) {
-      Preconditions.checkArgument(columns.contains(columnName), "column[%s] not in columns.", columnName);
-    }
-  }
-
-  @Override
-  public Parser<String, Object> makeParser()
-  {
-    return new DelimitedParser(
-        delimiter,
-        listDelimiter,
-        columns,
-        hasHeaderRow,
-        skipHeaderRows
-    );
-  }
-
-  @Override
-  public ParseSpec withTimestampSpec(TimestampSpec spec)
-  {
-    return new DelimitedParseSpec(
-        spec,
-        getDimensionsSpec(),
-        delimiter,
-        listDelimiter,
-        columns,
-        hasHeaderRow,
-        skipHeaderRows
-    );
-  }
-
-  @Override
-  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
-  {
-    return new DelimitedParseSpec(
-        getTimestampSpec(),
-        spec,
-        delimiter,
-        listDelimiter,
-        columns,
-        hasHeaderRow,
-        skipHeaderRows
-    );
-  }
-
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/DimensionSchema.java b/api/src/main/java/io/druid/data/input/impl/DimensionSchema.java
deleted file mode 100644
index b0e7c1f5f7c..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/DimensionSchema.java
+++ /dev/null
@@ -1,195 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.annotation.JsonSubTypes;
-import com.fasterxml.jackson.annotation.JsonTypeInfo;
-import com.fasterxml.jackson.annotation.JsonValue;
-import com.google.common.base.Strings;
-import io.druid.guice.annotations.PublicApi;
-import io.druid.java.util.common.StringUtils;
-import io.druid.java.util.emitter.EmittingLogger;
-
-import java.util.Objects;
-
-/**
- */
-@PublicApi
-@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = StringDimensionSchema.class)
-@JsonSubTypes(value = {
-    @JsonSubTypes.Type(name = DimensionSchema.STRING_TYPE_NAME, value = StringDimensionSchema.class),
-    @JsonSubTypes.Type(name = DimensionSchema.LONG_TYPE_NAME, value = LongDimensionSchema.class),
-    @JsonSubTypes.Type(name = DimensionSchema.FLOAT_TYPE_NAME, value = FloatDimensionSchema.class),
-    @JsonSubTypes.Type(name = DimensionSchema.DOUBLE_TYPE_NAME, value = DoubleDimensionSchema.class),
-    @JsonSubTypes.Type(name = DimensionSchema.SPATIAL_TYPE_NAME, value = NewSpatialDimensionSchema.class),
-})
-public abstract class DimensionSchema
-{
-  public static final String STRING_TYPE_NAME = "string";
-  public static final String LONG_TYPE_NAME = "long";
-  public static final String FLOAT_TYPE_NAME = "float";
-  public static final String SPATIAL_TYPE_NAME = "spatial";
-  public static final String DOUBLE_TYPE_NAME = "double";
-  private static final EmittingLogger log = new EmittingLogger(DimensionSchema.class);
-
-
-  // main druid and druid-api should really use the same ValueType enum.
-  // merge them when druid-api is merged back into the main repo
-
-  /**
-   * Should be the same as {@code io.druid.segment.column.ValueType}.
-   * TODO merge them when druid-api is merged back into the main repo
-   */
-  public enum ValueType
-  {
-    FLOAT,
-    LONG,
-    STRING,
-    DOUBLE,
-    @SuppressWarnings("unused") // used in io.druid.segment.column.ValueType
-    COMPLEX;
-
-    @JsonValue
-    @Override
-    public String toString()
-    {
-      return StringUtils.toUpperCase(this.name());
-    }
-
-    @JsonCreator
-    public static ValueType fromString(String name)
-    {
-      return valueOf(StringUtils.toUpperCase(name));
-    }
-  }
-
-  public enum MultiValueHandling
-  {
-    SORTED_ARRAY,
-    SORTED_SET,
-    ARRAY {
-      @Override
-      public boolean needSorting()
-      {
-        return false;
-      }
-    };
-
-    public boolean needSorting()
-    {
-      return true;
-    }
-
-    @Override
-    @JsonValue
-    public String toString()
-    {
-      return StringUtils.toUpperCase(name());
-    }
-
-    @JsonCreator
-    public static MultiValueHandling fromString(String name)
-    {
-      return name == null ? ofDefault() : valueOf(StringUtils.toUpperCase(name));
-    }
-
-    // this can be system configuration
-    public static MultiValueHandling ofDefault()
-    {
-      return SORTED_ARRAY;
-    }
-  }
-
-  private final String name;
-  private final MultiValueHandling multiValueHandling;
-  private final boolean createBitmapIndex;
-
-  protected DimensionSchema(String name, MultiValueHandling multiValueHandling, boolean createBitmapIndex)
-  {
-    if (Strings.isNullOrEmpty(name)) {
-      log.warn("Null or Empty Dimension found");
-    }
-    this.name = name;
-    this.multiValueHandling = multiValueHandling == null ? MultiValueHandling.ofDefault() : multiValueHandling;
-    this.createBitmapIndex = createBitmapIndex;
-  }
-
-  @JsonProperty
-  public String getName()
-  {
-    return name;
-  }
-
-  @JsonProperty
-  public MultiValueHandling getMultiValueHandling()
-  {
-    return multiValueHandling;
-  }
-
-  @JsonProperty("createBitmapIndex")
-  public boolean hasBitmapIndex()
-  {
-    return createBitmapIndex;
-  }
-
-  @JsonIgnore
-  public abstract String getTypeName();
-
-  @JsonIgnore
-  public abstract ValueType getValueType();
-
-  @Override
-  public boolean equals(final Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-    final DimensionSchema that = (DimensionSchema) o;
-    return createBitmapIndex == that.createBitmapIndex &&
-           Objects.equals(name, that.name) &&
-           Objects.equals(getTypeName(), that.getTypeName()) &&
-           Objects.equals(getValueType(), that.getValueType()) &&
-           multiValueHandling == that.multiValueHandling;
-  }
-
-  @Override
-  public int hashCode()
-  {
-    return Objects.hash(name, multiValueHandling, createBitmapIndex, getTypeName(), getValueType());
-  }
-
-  @Override
-  public String toString()
-  {
-    return "DimensionSchema{" +
-           "name='" + name + '\'' +
-           ", valueType=" + getValueType() +
-           ", typeName=" + getTypeName() +
-           ", multiValueHandling=" + multiValueHandling +
-           ", createBitmapIndex=" + createBitmapIndex +
-           '}';
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/DimensionsSpec.java b/api/src/main/java/io/druid/data/input/impl/DimensionsSpec.java
deleted file mode 100644
index be801637031..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/DimensionsSpec.java
+++ /dev/null
@@ -1,256 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Function;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Iterables;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
-import io.druid.guice.annotations.PublicApi;
-import io.druid.java.util.common.parsers.ParserUtils;
-
-import javax.annotation.Nullable;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.stream.Collectors;
-
-@PublicApi
-public class DimensionsSpec
-{
-  private final List<DimensionSchema> dimensions;
-  private final Set<String> dimensionExclusions;
-  private final Map<String, DimensionSchema> dimensionSchemaMap;
-
-  public static final DimensionsSpec EMPTY = new DimensionsSpec(null, null, null);
-
-  public static List<DimensionSchema> getDefaultSchemas(List<String> dimNames)
-  {
-    return getDefaultSchemas(dimNames, DimensionSchema.MultiValueHandling.ofDefault());
-  }
-
-  public static List<DimensionSchema> getDefaultSchemas(
-      final List<String> dimNames,
-      final DimensionSchema.MultiValueHandling multiValueHandling
-  )
-  {
-    return dimNames.stream()
-                   .map(input -> new StringDimensionSchema(input, multiValueHandling, true))
-                   .collect(Collectors.toList());
-  }
-
-  public static DimensionSchema convertSpatialSchema(SpatialDimensionSchema spatialSchema)
-  {
-    return new NewSpatialDimensionSchema(spatialSchema.getDimName(), spatialSchema.getDims());
-  }
-
-  @JsonCreator
-  public DimensionsSpec(
-      @JsonProperty("dimensions") List<DimensionSchema> dimensions,
-      @JsonProperty("dimensionExclusions") List<String> dimensionExclusions,
-      @Deprecated @JsonProperty("spatialDimensions") List<SpatialDimensionSchema> spatialDimensions
-  )
-  {
-    this.dimensions = dimensions == null
-                      ? Lists.newArrayList()
-                      : Lists.newArrayList(dimensions);
-
-    this.dimensionExclusions = (dimensionExclusions == null)
-                               ? Sets.newHashSet()
-                               : Sets.newHashSet(dimensionExclusions);
-
-    List<SpatialDimensionSchema> spatialDims = (spatialDimensions == null)
-                                               ? Lists.newArrayList()
-                                               : spatialDimensions;
-
-    verify(spatialDims);
-
-    // Map for easy dimension name-based schema lookup
-    this.dimensionSchemaMap = new HashMap<>();
-    for (DimensionSchema schema : this.dimensions) {
-      dimensionSchemaMap.put(schema.getName(), schema);
-    }
-
-    for (SpatialDimensionSchema spatialSchema : spatialDims) {
-      DimensionSchema newSchema = DimensionsSpec.convertSpatialSchema(spatialSchema);
-      this.dimensions.add(newSchema);
-      dimensionSchemaMap.put(newSchema.getName(), newSchema);
-    }
-  }
-
-  public DimensionsSpec(List<DimensionSchema> dimensions)
-  {
-    this(dimensions, null, null);
-  }
-
-  @JsonProperty
-  public List<DimensionSchema> getDimensions()
-  {
-    return dimensions;
-  }
-
-  @JsonProperty
-  public Set<String> getDimensionExclusions()
-  {
-    return dimensionExclusions;
-  }
-
-  @Deprecated
-  @JsonIgnore
-  public List<SpatialDimensionSchema> getSpatialDimensions()
-  {
-    Iterable<NewSpatialDimensionSchema> filteredList = Iterables.filter(
-        dimensions, NewSpatialDimensionSchema.class
-    );
-
-    Iterable<SpatialDimensionSchema> transformedList = Iterables.transform(
-        filteredList,
-        new Function<NewSpatialDimensionSchema, SpatialDimensionSchema>()
-        {
-          @Nullable
-          @Override
-          public SpatialDimensionSchema apply(NewSpatialDimensionSchema input)
-          {
-            return new SpatialDimensionSchema(input.getName(), input.getDims());
-          }
-        }
-    );
-
-    return Lists.newArrayList(transformedList);
-  }
-
-
-  @JsonIgnore
-  public List<String> getDimensionNames()
-  {
-    return Lists.transform(
-        dimensions,
-        new Function<DimensionSchema, String>()
-        {
-          @Override
-          public String apply(DimensionSchema input)
-          {
-            return input.getName();
-          }
-        }
-    );
-  }
-
-  @PublicApi
-  public DimensionSchema getSchema(String dimension)
-  {
-    return dimensionSchemaMap.get(dimension);
-  }
-
-  public boolean hasCustomDimensions()
-  {
-    return !(dimensions == null || dimensions.isEmpty());
-  }
-
-  @PublicApi
-  public DimensionsSpec withDimensions(List<DimensionSchema> dims)
-  {
-    return new DimensionsSpec(dims, ImmutableList.copyOf(dimensionExclusions), null);
-  }
-
-  public DimensionsSpec withDimensionExclusions(Set<String> dimExs)
-  {
-    return new DimensionsSpec(
-        dimensions,
-        ImmutableList.copyOf(Sets.union(dimensionExclusions, dimExs)),
-        null
-    );
-  }
-
-  @Deprecated
-  public DimensionsSpec withSpatialDimensions(List<SpatialDimensionSchema> spatials)
-  {
-    return new DimensionsSpec(dimensions, ImmutableList.copyOf(dimensionExclusions), spatials);
-  }
-
-  private void verify(List<SpatialDimensionSchema> spatialDimensions)
-  {
-    List<String> dimNames = getDimensionNames();
-    Preconditions.checkArgument(
-        Sets.intersection(this.dimensionExclusions, Sets.newHashSet(dimNames)).isEmpty(),
-        "dimensions and dimensions exclusions cannot overlap"
-    );
-
-    ParserUtils.validateFields(dimNames);
-    ParserUtils.validateFields(dimensionExclusions);
-
-    List<String> spatialDimNames = Lists.transform(
-        spatialDimensions,
-        new Function<SpatialDimensionSchema, String>()
-        {
-          @Override
-          public String apply(SpatialDimensionSchema input)
-          {
-            return input.getDimName();
-          }
-        }
-    );
-
-    // Don't allow duplicates between main list and deprecated spatial list
-    ParserUtils.validateFields(Iterables.concat(dimNames, spatialDimNames));
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    DimensionsSpec that = (DimensionsSpec) o;
-
-    if (!dimensions.equals(that.dimensions)) {
-      return false;
-    }
-
-    return dimensionExclusions.equals(that.dimensionExclusions);
-  }
-
-  @Override
-  public int hashCode()
-  {
-    int result = dimensions.hashCode();
-    result = 31 * result + dimensionExclusions.hashCode();
-    return result;
-  }
-
-  @Override
-  public String toString()
-  {
-    return "DimensionsSpec{" +
-           "dimensions=" + dimensions +
-           ", dimensionExclusions=" + dimensionExclusions +
-           '}';
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/DoubleDimensionSchema.java b/api/src/main/java/io/druid/data/input/impl/DoubleDimensionSchema.java
deleted file mode 100644
index 6d4f924905c..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/DoubleDimensionSchema.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-public class DoubleDimensionSchema extends DimensionSchema
-{
-  @JsonCreator
-  public DoubleDimensionSchema(@JsonProperty("name") String name)
-  {
-    super(name, null, false);
-  }
-
-  @Override
-  public String getTypeName()
-  {
-    return DimensionSchema.DOUBLE_TYPE_NAME;
-  }
-
-  @Override
-  public ValueType getValueType()
-  {
-    return ValueType.DOUBLE;
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/FileIteratingFirehose.java b/api/src/main/java/io/druid/data/input/impl/FileIteratingFirehose.java
deleted file mode 100644
index 27b9df3cf50..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/FileIteratingFirehose.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import io.druid.data.input.Firehose;
-import io.druid.data.input.InputRow;
-import io.druid.utils.Runnables;
-import org.apache.commons.io.LineIterator;
-
-import javax.annotation.Nullable;
-import java.io.Closeable;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.NoSuchElementException;
-
-/**
- */
-public class FileIteratingFirehose implements Firehose
-{
-  private final Iterator<LineIterator> lineIterators;
-  private final StringInputRowParser parser;
-
-  private LineIterator lineIterator = null;
-
-  private final Closeable closer;
-
-  public FileIteratingFirehose(
-      Iterator<LineIterator> lineIterators,
-      StringInputRowParser parser
-  )
-  {
-    this(lineIterators, parser, null);
-  }
-
-  public FileIteratingFirehose(
-      Iterator<LineIterator> lineIterators,
-      StringInputRowParser parser,
-      Closeable closer
-  )
-  {
-    this.lineIterators = lineIterators;
-    this.parser = parser;
-    this.closer = closer;
-  }
-
-  @Override
-  public boolean hasMore()
-  {
-    while ((lineIterator == null || !lineIterator.hasNext()) && lineIterators.hasNext()) {
-      lineIterator = getNextLineIterator();
-    }
-
-    return lineIterator != null && lineIterator.hasNext();
-  }
-
-  @Nullable
-  @Override
-  public InputRow nextRow()
-  {
-    if (!hasMore()) {
-      throw new NoSuchElementException();
-    }
-
-    return parser.parse(lineIterator.next());
-  }
-
-  private LineIterator getNextLineIterator()
-  {
-    if (lineIterator != null) {
-      lineIterator.close();
-    }
-
-    final LineIterator iterator = lineIterators.next();
-    parser.startFileFromBeginning();
-    return iterator;
-  }
-
-  @Override
-  public Runnable commit()
-  {
-    return Runnables.getNoopRunnable();
-  }
-
-  @Override
-  public void close() throws IOException
-  {
-    try {
-      if (lineIterator != null) {
-        lineIterator.close();
-      }
-    }
-    catch (Throwable t) {
-      try {
-        if (closer != null) {
-          closer.close();
-        }
-      }
-      catch (Exception e) {
-        t.addSuppressed(e);
-      }
-      throw t;
-    }
-    if (closer != null) {
-      closer.close();
-    }
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/FloatDimensionSchema.java b/api/src/main/java/io/druid/data/input/impl/FloatDimensionSchema.java
deleted file mode 100644
index 91aaf6d0dad..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/FloatDimensionSchema.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-public class FloatDimensionSchema extends DimensionSchema
-{
-  @JsonCreator
-  public FloatDimensionSchema(
-      @JsonProperty("name") String name
-  )
-  {
-    super(name, null, false);
-  }
-
-  @Override
-  public String getTypeName()
-  {
-    return DimensionSchema.FLOAT_TYPE_NAME;
-  }
-
-  @Override
-  @JsonIgnore
-  public ValueType getValueType()
-  {
-    return ValueType.FLOAT;
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/InputRowParser.java b/api/src/main/java/io/druid/data/input/impl/InputRowParser.java
deleted file mode 100644
index 5e24888196b..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/InputRowParser.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonSubTypes;
-import com.fasterxml.jackson.annotation.JsonTypeInfo;
-import io.druid.data.input.InputRow;
-import io.druid.guice.annotations.ExtensionPoint;
-import io.druid.java.util.common.collect.Utils;
-
-import javax.annotation.Nullable;
-import javax.validation.constraints.NotNull;
-import java.util.List;
-
-@ExtensionPoint
-@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = StringInputRowParser.class)
-@JsonSubTypes(value = {
-    @JsonSubTypes.Type(name = "string", value = StringInputRowParser.class),
-    @JsonSubTypes.Type(name = "map", value = MapInputRowParser.class),
-    @JsonSubTypes.Type(name = "noop", value = NoopInputRowParser.class)
-})
-public interface InputRowParser<T>
-{
-  /**
-   * Parse an input into list of {@link InputRow}. List can contains null for rows that should be thrown away,
-   * or throws {@code ParseException} if the input is unparseable. This method should never return null otherwise
-   * lots of things will break.
-   */
-  @NotNull
-  default List<InputRow> parseBatch(T input)
-  {
-    return Utils.nullableListOf(parse(input));
-  }
-
-  /**
-   * Parse an input into an {@link InputRow}. Return null if this input should be thrown away, or throws
-   * {@code ParseException} if the input is unparseable.
-   */
-  @Deprecated
-  @Nullable
-  default InputRow parse(T input)
-  {
-    return null;
-  }
-
-  ParseSpec getParseSpec();
-
-  InputRowParser withParseSpec(ParseSpec parseSpec);
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/JSONLowercaseParseSpec.java b/api/src/main/java/io/druid/data/input/impl/JSONLowercaseParseSpec.java
deleted file mode 100644
index d8a7ae046fe..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/JSONLowercaseParseSpec.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import io.druid.java.util.common.parsers.JSONToLowerParser;
-import io.druid.java.util.common.parsers.Parser;
-
-import java.util.List;
-
-/**
- * This class is only here for backwards compatibility
- */
-@Deprecated
-public class JSONLowercaseParseSpec extends ParseSpec
-{
-  private final ObjectMapper objectMapper;
-
-  @JsonCreator
-  public JSONLowercaseParseSpec(
-      @JsonProperty("timestampSpec") TimestampSpec timestampSpec,
-      @JsonProperty("dimensionsSpec") DimensionsSpec dimensionsSpec
-  )
-  {
-    super(timestampSpec, dimensionsSpec);
-    this.objectMapper = new ObjectMapper();
-  }
-
-  @Override
-  public void verify(List<String> usedCols)
-  {
-  }
-
-  @Override
-  public Parser<String, Object> makeParser()
-  {
-    return new JSONToLowerParser(objectMapper, null, null);
-  }
-
-  @Override
-  public ParseSpec withTimestampSpec(TimestampSpec spec)
-  {
-    return new JSONLowercaseParseSpec(spec, getDimensionsSpec());
-  }
-
-  @Override
-  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
-  {
-    return new JSONLowercaseParseSpec(getTimestampSpec(), spec);
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/JSONParseSpec.java b/api/src/main/java/io/druid/data/input/impl/JSONParseSpec.java
deleted file mode 100644
index 61419580f8e..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/JSONParseSpec.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.core.JsonParser.Feature;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import io.druid.java.util.common.parsers.JSONPathParser;
-import io.druid.java.util.common.parsers.JSONPathSpec;
-import io.druid.java.util.common.parsers.Parser;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-
-/**
- */
-public class JSONParseSpec extends ParseSpec
-{
-  private final ObjectMapper objectMapper;
-  private final JSONPathSpec flattenSpec;
-  private final Map<String, Boolean> featureSpec;
-
-  @JsonCreator
-  public JSONParseSpec(
-      @JsonProperty("timestampSpec") TimestampSpec timestampSpec,
-      @JsonProperty("dimensionsSpec") DimensionsSpec dimensionsSpec,
-      @JsonProperty("flattenSpec") JSONPathSpec flattenSpec,
-      @JsonProperty("featureSpec") Map<String, Boolean> featureSpec
-  )
-  {
-    super(timestampSpec, dimensionsSpec);
-    this.objectMapper = new ObjectMapper();
-    this.flattenSpec = flattenSpec != null ? flattenSpec : JSONPathSpec.DEFAULT;
-    this.featureSpec = (featureSpec == null) ? new HashMap<String, Boolean>() : featureSpec;
-    for (Map.Entry<String, Boolean> entry : this.featureSpec.entrySet()) {
-      Feature feature = Feature.valueOf(entry.getKey());
-      objectMapper.configure(feature, entry.getValue());
-    }
-  }
-
-  @Deprecated
-  public JSONParseSpec(TimestampSpec ts, DimensionsSpec dims)
-  {
-    this(ts, dims, null, null);
-  }
-
-  @Override
-  public void verify(List<String> usedCols)
-  {
-  }
-
-  @Override
-  public Parser<String, Object> makeParser()
-  {
-    return new JSONPathParser(flattenSpec, objectMapper);
-  }
-
-  @Override
-  public ParseSpec withTimestampSpec(TimestampSpec spec)
-  {
-    return new JSONParseSpec(spec, getDimensionsSpec(), getFlattenSpec(), getFeatureSpec());
-  }
-
-  @Override
-  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
-  {
-    return new JSONParseSpec(getTimestampSpec(), spec, getFlattenSpec(), getFeatureSpec());
-  }
-
-  @JsonProperty
-  public JSONPathSpec getFlattenSpec()
-  {
-    return flattenSpec;
-  }
-
-  @JsonProperty
-  public Map<String, Boolean> getFeatureSpec()
-  {
-    return featureSpec;
-  }
-
-  @Override
-  public boolean equals(final Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-    if (!super.equals(o)) {
-      return false;
-    }
-    final JSONParseSpec that = (JSONParseSpec) o;
-    return Objects.equals(flattenSpec, that.flattenSpec) &&
-           Objects.equals(featureSpec, that.featureSpec);
-  }
-
-  @Override
-  public int hashCode()
-  {
-    return Objects.hash(super.hashCode(), flattenSpec, featureSpec);
-  }
-
-  @Override
-  public String toString()
-  {
-    return "JSONParseSpec{" +
-           "timestampSpec=" + getTimestampSpec() +
-           ", dimensionsSpec=" + getDimensionsSpec() +
-           ", flattenSpec=" + flattenSpec +
-           ", featureSpec=" + featureSpec +
-           '}';
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/JavaScriptParseSpec.java b/api/src/main/java/io/druid/data/input/impl/JavaScriptParseSpec.java
deleted file mode 100644
index 119fda6874e..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/JavaScriptParseSpec.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JacksonInject;
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Preconditions;
-import io.druid.java.util.common.parsers.JavaScriptParser;
-import io.druid.java.util.common.parsers.Parser;
-import io.druid.js.JavaScriptConfig;
-
-import java.util.List;
-
-/**
- */
-public class JavaScriptParseSpec extends ParseSpec
-{
-  private final String function;
-  private final JavaScriptConfig config;
-
-  // This variable is lazily initialized to avoid unnecessary JavaScript compilation during JSON serde
-  private JavaScriptParser parser;
-
-  @JsonCreator
-  public JavaScriptParseSpec(
-      @JsonProperty("timestampSpec") TimestampSpec timestampSpec,
-      @JsonProperty("dimensionsSpec") DimensionsSpec dimensionsSpec,
-      @JsonProperty("function") String function,
-      @JacksonInject JavaScriptConfig config
-  )
-  {
-    super(timestampSpec, dimensionsSpec);
-
-    this.function = function;
-    this.config = config;
-  }
-
-  @JsonProperty("function")
-  public String getFunction()
-  {
-    return function;
-  }
-
-  @Override
-  public void verify(List<String> usedCols)
-  {
-  }
-
-  @Override
-  public Parser<String, Object> makeParser()
-  {
-    // JavaScript configuration should be checked when it's actually used because someone might still want Druid
-    // nodes to be able to deserialize JavaScript-based objects even though JavaScript is disabled.
-    Preconditions.checkState(config.isEnabled(), "JavaScript is disabled");
-    parser = parser == null ? new JavaScriptParser(function) : parser;
-    return parser;
-  }
-
-  @Override
-  public ParseSpec withTimestampSpec(TimestampSpec spec)
-  {
-    return new JavaScriptParseSpec(spec, getDimensionsSpec(), function, config);
-  }
-
-  @Override
-  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
-  {
-    return new JavaScriptParseSpec(getTimestampSpec(), spec, function, config);
-  }
-
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/LongDimensionSchema.java b/api/src/main/java/io/druid/data/input/impl/LongDimensionSchema.java
deleted file mode 100644
index 58f44af883b..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/LongDimensionSchema.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-public class LongDimensionSchema extends DimensionSchema
-{
-  @JsonCreator
-  public LongDimensionSchema(
-      @JsonProperty("name") String name
-  )
-  {
-    super(name, null, false);
-  }
-
-  @Override
-  public String getTypeName()
-  {
-    return DimensionSchema.LONG_TYPE_NAME;
-  }
-
-  @Override
-  @JsonIgnore
-  public ValueType getValueType()
-  {
-    return ValueType.LONG;
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/MapInputRowParser.java b/api/src/main/java/io/druid/data/input/impl/MapInputRowParser.java
deleted file mode 100644
index 90de90400cd..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/MapInputRowParser.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
-import io.druid.data.input.InputRow;
-import io.druid.data.input.MapBasedInputRow;
-import io.druid.java.util.common.StringUtils;
-import io.druid.java.util.common.parsers.ParseException;
-import org.joda.time.DateTime;
-
-import java.util.List;
-import java.util.Map;
-
-public class MapInputRowParser implements InputRowParser<Map<String, Object>>
-{
-  private final ParseSpec parseSpec;
-
-  @JsonCreator
-  public MapInputRowParser(
-      @JsonProperty("parseSpec") ParseSpec parseSpec
-  )
-  {
-    this.parseSpec = parseSpec;
-  }
-
-  @Override
-  public List<InputRow> parseBatch(Map<String, Object> theMap)
-  {
-    final List<String> dimensions = parseSpec.getDimensionsSpec().hasCustomDimensions()
-                                    ? parseSpec.getDimensionsSpec().getDimensionNames()
-                                    : Lists.newArrayList(
-                                        Sets.difference(
-                                            theMap.keySet(),
-                                            parseSpec.getDimensionsSpec()
-                                                     .getDimensionExclusions()
-                                        )
-                                    );
-
-    final DateTime timestamp;
-    try {
-      timestamp = parseSpec.getTimestampSpec().extractTimestamp(theMap);
-      if (timestamp == null) {
-        final String input = theMap.toString();
-        throw new NullPointerException(
-            StringUtils.format(
-                "Null timestamp in input: %s",
-                input.length() < 100 ? input : input.substring(0, 100) + "..."
-            )
-        );
-      }
-    }
-    catch (Exception e) {
-      throw new ParseException(e, "Unparseable timestamp found! Event: %s", theMap);
-    }
-
-    return ImmutableList.of(new MapBasedInputRow(timestamp.getMillis(), dimensions, theMap));
-  }
-
-  @JsonProperty
-  @Override
-  public ParseSpec getParseSpec()
-  {
-    return parseSpec;
-  }
-
-  @Override
-  public InputRowParser withParseSpec(ParseSpec parseSpec)
-  {
-    return new MapInputRowParser(parseSpec);
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/NewSpatialDimensionSchema.java b/api/src/main/java/io/druid/data/input/impl/NewSpatialDimensionSchema.java
deleted file mode 100644
index 9bae7e43707..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/NewSpatialDimensionSchema.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-import java.util.List;
-
-/**
- * NOTE: 
- * This class should be deprecated after Druid supports configurable index types on dimensions.
- * When that exists, this should be the implementation: https://github.com/druid-io/druid/issues/2622
- * 
- * This is a stop-gap solution to consolidate the dimension specs and remove the separate spatial 
- * section in DimensionsSpec.
- */
-public class NewSpatialDimensionSchema extends DimensionSchema
-{
-  private final List<String> dims;
-
-  @JsonCreator
-  public NewSpatialDimensionSchema(
-      @JsonProperty("name") String name,
-      @JsonProperty("dims") List<String> dims
-  )
-  {
-    super(name, null, true);
-    this.dims = dims;
-  }
-
-  @JsonProperty
-  public List<String> getDims()
-  {
-    return dims;
-  }
-
-  @Override
-  public String getTypeName()
-  {
-    return DimensionSchema.SPATIAL_TYPE_NAME;
-  }
-
-  @Override
-  @JsonIgnore
-  public ValueType getValueType()
-  {
-    return ValueType.STRING;
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    NewSpatialDimensionSchema that = (NewSpatialDimensionSchema) o;
-
-    return dims != null ? dims.equals(that.dims) : that.dims == null;
-
-  }
-
-  @Override
-  public int hashCode()
-  {
-    return dims != null ? dims.hashCode() : 0;
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/NoopInputRowParser.java b/api/src/main/java/io/druid/data/input/impl/NoopInputRowParser.java
deleted file mode 100644
index fa86df7339d..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/NoopInputRowParser.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.collect.ImmutableList;
-import io.druid.data.input.InputRow;
-
-import java.util.List;
-
-/**
- */
-public class NoopInputRowParser implements InputRowParser<InputRow>
-{
-  private final ParseSpec parseSpec;
-
-  @JsonCreator
-  public NoopInputRowParser(
-      @JsonProperty("parseSpec") ParseSpec parseSpec
-  )
-  {
-    this.parseSpec = parseSpec != null ? parseSpec : new TimeAndDimsParseSpec(null, null);
-  }
-
-  @Override
-  public List<InputRow> parseBatch(InputRow input)
-  {
-    return ImmutableList.of(input);
-  }
-
-  @JsonProperty
-  @Override
-  public ParseSpec getParseSpec()
-  {
-    return parseSpec;
-  }
-
-  @Override
-  public InputRowParser withParseSpec(ParseSpec parseSpec)
-  {
-    return new NoopInputRowParser(parseSpec);
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    NoopInputRowParser that = (NoopInputRowParser) o;
-
-    return parseSpec.equals(that.parseSpec);
-
-  }
-
-  @Override
-  public int hashCode()
-  {
-    return parseSpec.hashCode();
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/ParseSpec.java b/api/src/main/java/io/druid/data/input/impl/ParseSpec.java
deleted file mode 100644
index 114aa3c6f40..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/ParseSpec.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.annotation.JsonSubTypes;
-import com.fasterxml.jackson.annotation.JsonTypeInfo;
-import com.google.common.base.Preconditions;
-import io.druid.guice.annotations.ExtensionPoint;
-import io.druid.guice.annotations.PublicApi;
-import io.druid.java.util.common.parsers.Parser;
-
-import java.util.List;
-
-@ExtensionPoint
-@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "format", defaultImpl = DelimitedParseSpec.class)
-@JsonSubTypes(value = {
-    @JsonSubTypes.Type(name = "json", value = JSONParseSpec.class),
-    @JsonSubTypes.Type(name = "csv", value = CSVParseSpec.class),
-    @JsonSubTypes.Type(name = "tsv", value = DelimitedParseSpec.class),
-    @JsonSubTypes.Type(name = "jsonLowercase", value = JSONLowercaseParseSpec.class),
-    @JsonSubTypes.Type(name = "timeAndDims", value = TimeAndDimsParseSpec.class),
-    @JsonSubTypes.Type(name = "regex", value = RegexParseSpec.class),
-    @JsonSubTypes.Type(name = "javascript", value = JavaScriptParseSpec.class)
-
-})
-public abstract class ParseSpec
-{
-  private final TimestampSpec timestampSpec;
-  private final DimensionsSpec dimensionsSpec;
-
-  protected ParseSpec(TimestampSpec timestampSpec, DimensionsSpec dimensionsSpec)
-  {
-    this.timestampSpec = Preconditions.checkNotNull(timestampSpec, "parseSpec requires timestampSpec");
-    this.dimensionsSpec = Preconditions.checkNotNull(dimensionsSpec, "parseSpec requires dimensionSpec");
-  }
-
-  @JsonProperty
-  public TimestampSpec getTimestampSpec()
-  {
-    return timestampSpec;
-  }
-
-  @JsonProperty
-  public DimensionsSpec getDimensionsSpec()
-  {
-    return dimensionsSpec;
-  }
-
-  @PublicApi
-  public void verify(List<String> usedCols)
-  {
-    // do nothing
-  }
-
-  public Parser<String, Object> makeParser()
-  {
-    return null;
-  }
-
-  @PublicApi
-  public ParseSpec withTimestampSpec(TimestampSpec spec)
-  {
-    throw new UnsupportedOperationException();
-  }
-
-  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
-  {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    ParseSpec parseSpec = (ParseSpec) o;
-
-    if (timestampSpec != null ? !timestampSpec.equals(parseSpec.timestampSpec) : parseSpec.timestampSpec != null) {
-      return false;
-    }
-    return !(dimensionsSpec != null
-             ? !dimensionsSpec.equals(parseSpec.dimensionsSpec)
-             : parseSpec.dimensionsSpec != null);
-
-  }
-
-  @Override
-  public int hashCode()
-  {
-    int result = timestampSpec != null ? timestampSpec.hashCode() : 0;
-    result = 31 * result + (dimensionsSpec != null ? dimensionsSpec.hashCode() : 0);
-    return result;
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/RegexParseSpec.java b/api/src/main/java/io/druid/data/input/impl/RegexParseSpec.java
deleted file mode 100644
index 5c82975b8b7..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/RegexParseSpec.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Optional;
-import com.google.common.base.Preconditions;
-import io.druid.java.util.common.parsers.Parser;
-import io.druid.java.util.common.parsers.RegexParser;
-
-import java.util.List;
-
-/**
- */
-public class RegexParseSpec extends ParseSpec
-{
-  private final String listDelimiter;
-  private final List<String> columns;
-  private final String pattern;
-
-  @JsonCreator
-  public RegexParseSpec(
-      @JsonProperty("timestampSpec") TimestampSpec timestampSpec,
-      @JsonProperty("dimensionsSpec") DimensionsSpec dimensionsSpec,
-      @JsonProperty("listDelimiter") String listDelimiter,
-      @JsonProperty("columns") List<String> columns,
-      @JsonProperty("pattern") String pattern
-  )
-  {
-    super(timestampSpec, dimensionsSpec);
-
-    this.listDelimiter = listDelimiter;
-    this.columns = columns;
-    this.pattern = pattern;
-
-    verify(dimensionsSpec.getDimensionNames());
-  }
-
-  @JsonProperty
-  public String getListDelimiter()
-  {
-    return listDelimiter;
-  }
-
-  @JsonProperty("pattern")
-  public String getPattern()
-  {
-    return pattern;
-  }
-
-  @JsonProperty
-  public List<String> getColumns()
-  {
-    return columns;
-  }
-
-  @Override
-  public void verify(List<String> usedCols)
-  {
-    if (columns != null) {
-      for (String columnName : usedCols) {
-        Preconditions.checkArgument(columns.contains(columnName), "column[%s] not in columns.", columnName);
-      }
-    }
-  }
-
-  @Override
-  public Parser<String, Object> makeParser()
-  {
-    if (columns == null) {
-      return new RegexParser(pattern, Optional.fromNullable(listDelimiter));
-    }
-    return new RegexParser(pattern, Optional.fromNullable(listDelimiter), columns);
-  }
-
-  @Override
-  public ParseSpec withTimestampSpec(TimestampSpec spec)
-  {
-    return new RegexParseSpec(spec, getDimensionsSpec(), listDelimiter, columns, pattern);
-  }
-
-  @Override
-  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
-  {
-    return new RegexParseSpec(getTimestampSpec(), spec, listDelimiter, columns, pattern);
-  }
-
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/SpatialDimensionSchema.java b/api/src/main/java/io/druid/data/input/impl/SpatialDimensionSchema.java
deleted file mode 100644
index 32f96cc6164..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/SpatialDimensionSchema.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-import java.util.List;
-
-/**
- */
-@Deprecated
-public class SpatialDimensionSchema
-{
-  private final String dimName;
-  private final List<String> dims;
-
-  @JsonCreator
-  public SpatialDimensionSchema(
-      @JsonProperty("dimName") String dimName,
-      @JsonProperty("dims") List<String> dims
-  )
-  {
-    this.dimName = dimName;
-    this.dims = dims;
-  }
-
-  @JsonProperty
-  public String getDimName()
-  {
-    return dimName;
-  }
-
-  @JsonProperty
-  public List<String> getDims()
-  {
-    return dims;
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    SpatialDimensionSchema that = (SpatialDimensionSchema) o;
-
-    if (dimName != null ? !dimName.equals(that.dimName) : that.dimName != null) {
-      return false;
-    }
-    return dims != null ? dims.equals(that.dims) : that.dims == null;
-
-  }
-
-  @Override
-  public int hashCode()
-  {
-    int result = dimName != null ? dimName.hashCode() : 0;
-    result = 31 * result + (dims != null ? dims.hashCode() : 0);
-    return result;
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/SqlFirehose.java b/api/src/main/java/io/druid/data/input/impl/SqlFirehose.java
deleted file mode 100644
index a678329144d..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/SqlFirehose.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package io.druid.data.input.impl;
-
-import com.google.common.collect.Iterators;
-import io.druid.data.input.Firehose;
-import io.druid.data.input.InputRow;
-import io.druid.data.input.impl.prefetch.JsonIterator;
-import io.druid.java.util.common.io.Closer;
-import io.druid.utils.Runnables;
-
-import javax.annotation.Nullable;
-import java.io.Closeable;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.Map;
-
-public class SqlFirehose implements Firehose
-{
-  private final Iterator<JsonIterator<Map<String, Object>>> resultIterator;
-  private final InputRowParser parser;
-  private final Closeable closer;
-  private JsonIterator<Map<String, Object>> lineIterator = null;
-
-  public SqlFirehose(
-      Iterator lineIterators,
-      InputRowParser<Map<String, Object>> parser,
-      Closeable closer
-  )
-  {
-    this.resultIterator = lineIterators;
-    this.parser = parser;
-    this.closer = closer;
-  }
-
-  @Override
-  public boolean hasMore()
-  {
-    while ((lineIterator == null || !lineIterator.hasNext()) && resultIterator.hasNext()) {
-      lineIterator = getNextLineIterator();
-    }
-
-    return lineIterator != null && lineIterator.hasNext();
-  }
-
-  @Nullable
-  @Override
-  public InputRow nextRow()
-  {
-    Map<String, Object> mapToParse = lineIterator.next();
-    return (InputRow) Iterators.getOnlyElement(parser.parseBatch(mapToParse).iterator());
-  }
-
-  private JsonIterator getNextLineIterator()
-  {
-    if (lineIterator != null) {
-      lineIterator = null;
-    }
-
-    final JsonIterator iterator = resultIterator.next();
-    return iterator;
-  }
-
-  @Override
-  public Runnable commit()
-  {
-    return Runnables.getNoopRunnable();
-  }
-
-  @Override
-  public void close() throws IOException
-  {
-    Closer firehoseCloser = Closer.create();
-    if (lineIterator != null) {
-      firehoseCloser.register(lineIterator);
-    }
-    firehoseCloser.register(closer);
-    firehoseCloser.close();
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/StringDimensionSchema.java b/api/src/main/java/io/druid/data/input/impl/StringDimensionSchema.java
deleted file mode 100644
index 7538b14b343..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/StringDimensionSchema.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-public class StringDimensionSchema extends DimensionSchema
-{
-  private static final boolean DEFAULT_CREATE_BITMAP_INDEX = true;
-
-  @JsonCreator
-  public static StringDimensionSchema create(String name)
-  {
-    return new StringDimensionSchema(name);
-  }
-
-  @JsonCreator
-  public StringDimensionSchema(
-      @JsonProperty("name") String name,
-      @JsonProperty("multiValueHandling") MultiValueHandling multiValueHandling,
-      @JsonProperty("createBitmapIndex") Boolean createBitmapIndex
-  )
-  {
-    super(name, multiValueHandling, createBitmapIndex == null ? DEFAULT_CREATE_BITMAP_INDEX : createBitmapIndex);
-  }
-
-  public StringDimensionSchema(String name)
-  {
-    this(name, null, DEFAULT_CREATE_BITMAP_INDEX);
-  }
-
-  @Override
-  public String getTypeName()
-  {
-    return DimensionSchema.STRING_TYPE_NAME;
-  }
-
-  @Override
-  @JsonIgnore
-  public ValueType getValueType()
-  {
-    return ValueType.STRING;
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/StringInputRowParser.java b/api/src/main/java/io/druid/data/input/impl/StringInputRowParser.java
deleted file mode 100644
index 3a646ead048..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/StringInputRowParser.java
+++ /dev/null
@@ -1,167 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Iterators;
-import io.druid.data.input.ByteBufferInputRowParser;
-import io.druid.data.input.InputRow;
-import io.druid.java.util.common.collect.Utils;
-import io.druid.java.util.common.parsers.ParseException;
-import io.druid.java.util.common.parsers.Parser;
-
-import javax.annotation.Nullable;
-import java.nio.ByteBuffer;
-import java.nio.CharBuffer;
-import java.nio.charset.Charset;
-import java.nio.charset.CoderResult;
-import java.nio.charset.CodingErrorAction;
-import java.nio.charset.StandardCharsets;
-import java.util.List;
-import java.util.Map;
-
-/**
- */
-public class StringInputRowParser implements ByteBufferInputRowParser
-{
-  private static final Charset DEFAULT_CHARSET = StandardCharsets.UTF_8;
-
-  private final ParseSpec parseSpec;
-  private final MapInputRowParser mapParser;
-  private final Charset charset;
-
-  private Parser<String, Object> parser;
-  private CharBuffer chars;
-
-  @JsonCreator
-  public StringInputRowParser(
-      @JsonProperty("parseSpec") ParseSpec parseSpec,
-      @JsonProperty("encoding") String encoding
-  )
-  {
-    this.parseSpec = Preconditions.checkNotNull(parseSpec, "parseSpec");
-    this.mapParser = new MapInputRowParser(parseSpec);
-
-    if (encoding != null) {
-      this.charset = Charset.forName(encoding);
-    } else {
-      this.charset = DEFAULT_CHARSET;
-    }
-  }
-
-  @Deprecated
-  public StringInputRowParser(ParseSpec parseSpec)
-  {
-    this(parseSpec, null);
-  }
-
-  @Override
-  public List<InputRow> parseBatch(ByteBuffer input)
-  {
-    return Utils.nullableListOf(parseMap(buildStringKeyMap(input)));
-  }
-
-  @JsonProperty
-  @Override
-  public ParseSpec getParseSpec()
-  {
-    return parseSpec;
-  }
-
-  @JsonProperty
-  public String getEncoding()
-  {
-    return charset.name();
-  }
-
-  @Override
-  public StringInputRowParser withParseSpec(ParseSpec parseSpec)
-  {
-    return new StringInputRowParser(parseSpec, getEncoding());
-  }
-
-  private Map<String, Object> buildStringKeyMap(ByteBuffer input)
-  {
-    int payloadSize = input.remaining();
-
-    if (chars == null || chars.remaining() < payloadSize) {
-      chars = CharBuffer.allocate(payloadSize);
-    }
-
-    final CoderResult coderResult = charset.newDecoder()
-                                           .onMalformedInput(CodingErrorAction.REPLACE)
-                                           .onUnmappableCharacter(CodingErrorAction.REPLACE)
-                                           .decode(input, chars, true);
-
-    Map<String, Object> theMap;
-    if (coderResult.isUnderflow()) {
-      chars.flip();
-      try {
-        theMap = parseString(chars.toString());
-      }
-      finally {
-        chars.clear();
-      }
-    } else {
-      throw new ParseException("Failed with CoderResult[%s]", coderResult);
-    }
-    return theMap;
-  }
-
-  public void initializeParser()
-  {
-    if (parser == null) {
-      // parser should be created when it is really used to avoid unnecessary initialization of the underlying
-      // parseSpec.
-      parser = parseSpec.makeParser();
-    }
-  }
-
-  public void startFileFromBeginning()
-  {
-    initializeParser();
-    parser.startFileFromBeginning();
-  }
-
-  @Nullable
-  public InputRow parse(@Nullable String input)
-  {
-    return parseMap(parseString(input));
-  }
-
-  @Nullable
-  private Map<String, Object> parseString(@Nullable String inputString)
-  {
-    initializeParser();
-    return parser.parseToMap(inputString);
-  }
-
-  @Nullable
-  private InputRow parseMap(@Nullable Map<String, Object> theMap)
-  {
-    // If a header is present in the data (and with proper configurations), a null is returned
-    if (theMap == null) {
-      return null;
-    }
-    return Iterators.getOnlyElement(mapParser.parseBatch(theMap).iterator());
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/TimeAndDimsParseSpec.java b/api/src/main/java/io/druid/data/input/impl/TimeAndDimsParseSpec.java
deleted file mode 100644
index 081723c4428..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/TimeAndDimsParseSpec.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import io.druid.java.util.common.parsers.Parser;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- */
-public class TimeAndDimsParseSpec extends ParseSpec
-{
-  @JsonCreator
-  public TimeAndDimsParseSpec(
-      @JsonProperty("timestampSpec") TimestampSpec timestampSpec,
-      @JsonProperty("dimensionsSpec") DimensionsSpec dimensionsSpec
-  )
-  {
-    super(
-        timestampSpec != null ? timestampSpec : new TimestampSpec(null, null, null),
-        dimensionsSpec != null ? dimensionsSpec : new DimensionsSpec(null, null, null)
-    );
-  }
-
-  @Override
-  public Parser<String, Object> makeParser()
-  {
-    return new Parser<String, Object>()
-    {
-      @Override
-      public Map<String, Object> parseToMap(String input)
-      {
-        throw new UnsupportedOperationException("not supported");
-      }
-
-      @Override
-      public void setFieldNames(Iterable<String> fieldNames)
-      {
-        throw new UnsupportedOperationException("not supported");
-      }
-
-      @Override
-      public List<String> getFieldNames()
-      {
-        throw new UnsupportedOperationException("not supported");
-      }
-    };
-  }
-
-  @Override
-  public ParseSpec withTimestampSpec(TimestampSpec spec)
-  {
-    return new TimeAndDimsParseSpec(spec, getDimensionsSpec());
-  }
-
-  @Override
-  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
-  {
-    return new TimeAndDimsParseSpec(getTimestampSpec(), spec);
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/TimestampSpec.java b/api/src/main/java/io/druid/data/input/impl/TimestampSpec.java
deleted file mode 100644
index e93d00fea26..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/TimestampSpec.java
+++ /dev/null
@@ -1,177 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Function;
-import io.druid.guice.annotations.PublicApi;
-import io.druid.java.util.common.parsers.TimestampParser;
-import org.joda.time.DateTime;
-
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-
-/**
- */
-@PublicApi
-public class TimestampSpec
-{
-  private static class ParseCtx
-  {
-    Object lastTimeObject = null;
-    DateTime lastDateTime = null;
-  }
-
-  private static final String DEFAULT_COLUMN = "timestamp";
-  private static final String DEFAULT_FORMAT = "auto";
-  private static final DateTime DEFAULT_MISSING_VALUE = null;
-
-  private final String timestampColumn;
-  private final String timestampFormat;
-  // this value should never be set for production data
-  private final DateTime missingValue;
-  /** This field is a derivative of {@link #timestampFormat}; not checked in {@link #equals} and {@link #hashCode} */
-  private final Function<Object, DateTime> timestampConverter;
-
-  // remember last value parsed
-  private static final ThreadLocal<ParseCtx> parseCtx = ThreadLocal.withInitial(ParseCtx::new);
-
-  @JsonCreator
-  public TimestampSpec(
-      @JsonProperty("column") String timestampColumn,
-      @JsonProperty("format") String format,
-      // this value should never be set for production data
-      @JsonProperty("missingValue") DateTime missingValue
-  )
-  {
-    this.timestampColumn = (timestampColumn == null) ? DEFAULT_COLUMN : timestampColumn;
-    this.timestampFormat = format == null ? DEFAULT_FORMAT : format;
-    this.timestampConverter = TimestampParser.createObjectTimestampParser(timestampFormat);
-    this.missingValue = missingValue == null
-                        ? DEFAULT_MISSING_VALUE
-                        : missingValue;
-  }
-
-  @JsonProperty("column")
-  public String getTimestampColumn()
-  {
-    return timestampColumn;
-  }
-
-  @JsonProperty("format")
-  public String getTimestampFormat()
-  {
-    return timestampFormat;
-  }
-
-  @JsonProperty("missingValue")
-  public DateTime getMissingValue()
-  {
-    return missingValue;
-  }
-
-  public DateTime extractTimestamp(Map<String, Object> input)
-  {
-    return parseDateTime(input.get(timestampColumn));
-  }
-
-  public DateTime parseDateTime(Object input)
-  {
-    DateTime extracted = missingValue;
-    if (input != null) {
-      ParseCtx ctx = parseCtx.get();
-      // Check if the input is equal to the last input, so we don't need to parse it again
-      if (input.equals(ctx.lastTimeObject)) {
-        extracted = ctx.lastDateTime;
-      } else {
-        extracted = timestampConverter.apply(input);
-        ParseCtx newCtx = new ParseCtx();
-        newCtx.lastTimeObject = input;
-        newCtx.lastDateTime = extracted;
-        parseCtx.set(newCtx);
-      }
-    }
-    return extracted;
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    TimestampSpec that = (TimestampSpec) o;
-
-    if (!timestampColumn.equals(that.timestampColumn)) {
-      return false;
-    }
-    if (!timestampFormat.equals(that.timestampFormat)) {
-      return false;
-    }
-    return !(missingValue != null ? !missingValue.equals(that.missingValue) : that.missingValue != null);
-
-  }
-
-  @Override
-  public int hashCode()
-  {
-    int result = timestampColumn.hashCode();
-    result = 31 * result + timestampFormat.hashCode();
-    result = 31 * result + (missingValue != null ? missingValue.hashCode() : 0);
-    return result;
-  }
-
-  @Override
-  public String toString()
-  {
-    return "TimestampSpec{" +
-           "timestampColumn='" + timestampColumn + '\'' +
-           ", timestampFormat='" + timestampFormat + '\'' +
-           ", missingValue=" + missingValue +
-           '}';
-  }
-
-  //simple merge strategy on timestampSpec that checks if all are equal or else
-  //returns null. this can be improved in future but is good enough for most use-cases.
-  public static TimestampSpec mergeTimestampSpec(List<TimestampSpec> toMerge)
-  {
-    if (toMerge == null || toMerge.size() == 0) {
-      return null;
-    }
-
-    TimestampSpec result = toMerge.get(0);
-    for (int i = 1; i < toMerge.size(); i++) {
-      if (toMerge.get(i) == null) {
-        continue;
-      }
-      if (!Objects.equals(result, toMerge.get(i))) {
-        return null;
-      }
-    }
-
-    return result;
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/prefetch/CacheManager.java b/api/src/main/java/io/druid/data/input/impl/prefetch/CacheManager.java
deleted file mode 100644
index 3a3031aaeb4..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/prefetch/CacheManager.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl.prefetch;
-
-import com.google.common.annotations.VisibleForTesting;
-import io.druid.java.util.common.ISE;
-import io.druid.java.util.common.logger.Logger;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * A class managing cached files used by {@link PrefetchableTextFilesFirehoseFactory}.
- */
-class CacheManager<T>
-{
-  private static final Logger LOG = new Logger(CacheManager.class);
-
-  // A roughly max size of total cached objects which means the actual cached size can be bigger. The reason is our
-  // current client implementations for cloud storages like s3 don't support range scan yet, so we must download the
-  // whole file at once. It's still possible for the size of cached data to not exceed these variables by estimating the
-  // after-fetch size, but it makes us to consider the case when any files cannot be fetched due to their large size,
-  // which makes the implementation complicated.
-  private final long maxCacheCapacityBytes;
-
-  private final List<FetchedFile<T>> files = new ArrayList<>();
-
-  private long totalCachedBytes;
-
-  CacheManager(long maxCacheCapacityBytes)
-  {
-    this.maxCacheCapacityBytes = maxCacheCapacityBytes;
-  }
-
-  boolean isEnabled()
-  {
-    return maxCacheCapacityBytes > 0;
-  }
-
-  boolean cacheable()
-  {
-    // maxCacheCapacityBytes is a rough limit, so if totalCachedBytes is larger than it, no more caching is
-    // allowed.
-    return totalCachedBytes < maxCacheCapacityBytes;
-  }
-
-  FetchedFile<T> cache(FetchedFile<T> fetchedFile)
-  {
-    if (!cacheable()) {
-      throw new ISE(
-          "Cache space is full. totalCachedBytes[%d], maxCacheCapacityBytes[%d]",
-          totalCachedBytes,
-          maxCacheCapacityBytes
-      );
-    }
-
-    final FetchedFile<T> cachedFile = fetchedFile.cache();
-    files.add(cachedFile);
-    totalCachedBytes += cachedFile.length();
-
-    LOG.info("Object[%s] is cached. Current cached bytes is [%d]", cachedFile.getObject(), totalCachedBytes);
-    return cachedFile;
-  }
-
-  List<FetchedFile<T>> getFiles()
-  {
-    return files;
-  }
-
-  @VisibleForTesting
-  long getTotalCachedBytes()
-  {
-    return totalCachedBytes;
-  }
-
-  long getMaxCacheCapacityBytes()
-  {
-    return maxCacheCapacityBytes;
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/prefetch/FetchedFile.java b/api/src/main/java/io/druid/data/input/impl/prefetch/FetchedFile.java
deleted file mode 100644
index c788371ccfe..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/prefetch/FetchedFile.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl.prefetch;
-
-import java.io.Closeable;
-import java.io.File;
-
-/**
- * A class containing meta information about fetched objects.  This class used by {@link Fetcher}.
- */
-class FetchedFile<T>
-{
-  // Original object
-  private final T object;
-  // Fetched file stored in local disk
-  private final File file;
-  // Closer which is called when the file is not needed anymore. Usually this deletes the file except for cached files.
-  private final Closeable resourceCloser;
-
-  FetchedFile(T object, File file, Closeable resourceCloser)
-  {
-    this.object = object;
-    this.file = file;
-    this.resourceCloser = resourceCloser;
-  }
-
-  long length()
-  {
-    return file.length();
-  }
-
-  T getObject()
-  {
-    return object;
-  }
-
-  File getFile()
-  {
-    return file;
-  }
-
-  Closeable getResourceCloser()
-  {
-    return resourceCloser;
-  }
-
-  FetchedFile<T> cache()
-  {
-    return new FetchedFile<>(object, file, () -> {});
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/prefetch/Fetcher.java b/api/src/main/java/io/druid/data/input/impl/prefetch/Fetcher.java
deleted file mode 100644
index f645087e4e0..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/prefetch/Fetcher.java
+++ /dev/null
@@ -1,281 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl.prefetch;
-
-import com.google.common.base.Preconditions;
-import com.google.common.base.Throwables;
-import io.druid.java.util.common.ISE;
-import io.druid.java.util.common.logger.Logger;
-
-import javax.annotation.Nullable;
-import java.io.Closeable;
-import java.io.File;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.List;
-import java.util.NoSuchElementException;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Future;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
-import java.util.concurrent.atomic.AtomicLong;
-
-/**
- * A file fetcher used by {@link PrefetchableTextFilesFirehoseFactory} and {@link PrefetchSqlFirehoseFactory}.
- * See the javadoc of {@link PrefetchableTextFilesFirehoseFactory} for more details.
- */
-public abstract class Fetcher<T> implements Iterator<OpenedObject<T>>
-{
-  private static final Logger LOG = new Logger(Fetcher.class);
-  private static final String FETCH_FILE_PREFIX = "fetch-";
-  private final CacheManager<T> cacheManager;
-  private final List<T> objects;
-  private final ExecutorService fetchExecutor;
-
-  @Nullable
-  private final File temporaryDirectory;
-
-  private final boolean prefetchEnabled;
-
-  private final LinkedBlockingQueue<FetchedFile<T>> fetchedFiles = new LinkedBlockingQueue<>();
-
-  // Number of bytes of current fetched files.
-  // This is updated when a file is successfully fetched, a fetched file is deleted, or a fetched file is
-  // cached.
-  private final AtomicLong fetchedBytes = new AtomicLong(0);
-  private Future<Void> fetchFuture;
-  private PrefetchConfig prefetchConfig;
-
-  // nextFetchIndex indicates which object should be downloaded when fetch is triggered.
-  // This variable is always read by the same thread regardless of prefetch is enabled or not.
-  private int nextFetchIndex;
-
-  private int numRemainingObjects;
-
-  Fetcher(
-      CacheManager<T> cacheManager,
-      List<T> objects,
-      ExecutorService fetchExecutor,
-      @Nullable File temporaryDirectory,
-      PrefetchConfig prefetchConfig
-  )
-  {
-    this.cacheManager = cacheManager;
-    this.objects = objects;
-    this.fetchExecutor = fetchExecutor;
-    this.temporaryDirectory = temporaryDirectory;
-    this.prefetchConfig = prefetchConfig;
-    this.prefetchEnabled = prefetchConfig.getMaxFetchCapacityBytes() > 0;
-    this.numRemainingObjects = objects.size();
-
-    // (*) If cache is initialized, put all cached files to the queue.
-    this.fetchedFiles.addAll(cacheManager.getFiles());
-    this.nextFetchIndex = fetchedFiles.size();
-    if (cacheManager.isEnabled() || prefetchEnabled) {
-      Preconditions.checkNotNull(temporaryDirectory, "temporaryDirectory");
-    }
-    if (prefetchEnabled) {
-      fetchIfNeeded(0L);
-    }
-  }
-
-  /**
-   * Submit a fetch task if remainingBytes is smaller than prefetchTriggerBytes.
-   */
-  private void fetchIfNeeded(long remainingBytes)
-  {
-    if ((fetchFuture == null || fetchFuture.isDone())
-        && remainingBytes <= prefetchConfig.getPrefetchTriggerBytes()) {
-      fetchFuture = fetchExecutor.submit(() -> {
-        fetch();
-        return null;
-      });
-    }
-  }
-
-  /**
-   * Fetch objects to a local disk up to {@link PrefetchConfig#maxFetchCapacityBytes}.
-   * This method is not thread safe and must be called by a single thread.  Note that even
-   * {@link PrefetchConfig#maxFetchCapacityBytes} is 0, at least 1 file is always fetched.
-   * This is for simplifying design, and should be improved when our client implementations for cloud storages
-   * like S3 support range scan.
-   * <p>
-   * This method is called by {@link #fetchExecutor} if prefetch is enabled.  Otherwise, it is called by the same
-   * thread.
-   */
-  private void fetch() throws Exception
-  {
-    for (; nextFetchIndex < objects.size()
-           && fetchedBytes.get() <= prefetchConfig.getMaxFetchCapacityBytes(); nextFetchIndex++) {
-      final T object = objects.get(nextFetchIndex);
-      LOG.info("Fetching [%d]th object[%s], fetchedBytes[%d]", nextFetchIndex, object, fetchedBytes.get());
-      final File outFile = File.createTempFile(FETCH_FILE_PREFIX, null, temporaryDirectory);
-      fetchedBytes.addAndGet(download(object, outFile));
-      fetchedFiles.put(new FetchedFile<>(object, outFile, getFileCloser(outFile, fetchedBytes)));
-    }
-  }
-
-  /**
-   * Downloads an object into a file. The download process could be retried depending on the object source.
-   *
-   * @param object  an object to be downloaded
-   * @param outFile a file which the object data is stored
-   *
-   * @return number of downloaded bytes
-   */
-  protected abstract long download(T object, File outFile) throws IOException;
-
-  /**
-   * Generates an instance of {@link OpenedObject} for the given object.
-   */
-  protected abstract OpenedObject<T> generateOpenObject(T object) throws IOException;
-
-
-  @Override
-  public boolean hasNext()
-  {
-    return numRemainingObjects > 0;
-  }
-
-  @Override
-  public OpenedObject<T> next()
-  {
-    if (!hasNext()) {
-      throw new NoSuchElementException();
-    }
-
-    // If fetch() fails, hasNext() always returns true and next() is always called. The below method checks that
-    // fetch() threw an exception and propagates it if exists.
-    checkFetchException(false);
-
-    try {
-      final OpenedObject<T> openedObject = prefetchEnabled ? openObjectFromLocal() : openObjectFromRemote();
-      numRemainingObjects--;
-      return openedObject;
-    }
-    catch (IOException e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  private void checkFetchException(boolean wait)
-  {
-    try {
-      if (wait) {
-        fetchFuture.get(prefetchConfig.getFetchTimeout(), TimeUnit.MILLISECONDS);
-        fetchFuture = null;
-      } else if (fetchFuture != null && fetchFuture.isDone()) {
-        fetchFuture.get();
-        fetchFuture = null;
-      }
-    }
-    catch (InterruptedException | ExecutionException e) {
-      throw new RuntimeException(e);
-    }
-    catch (TimeoutException e) {
-      throw new ISE(e, "Failed to fetch, but cannot check the reason in [%d] ms", prefetchConfig.getFetchTimeout());
-    }
-  }
-
-  private OpenedObject<T> openObjectFromLocal() throws IOException
-  {
-    final FetchedFile<T> fetchedFile;
-
-    if (!fetchedFiles.isEmpty()) {
-      // If there are already fetched files, use them
-      fetchedFile = fetchedFiles.poll();
-    } else {
-      // Otherwise, wait for fetching
-      try {
-        fetchIfNeeded(fetchedBytes.get());
-        fetchedFile = fetchedFiles.poll(prefetchConfig.getFetchTimeout(), TimeUnit.MILLISECONDS);
-        if (fetchedFile == null) {
-          // Check the latest fetch is failed
-          checkFetchException(true);
-          // Or throw a timeout exception
-          throw new RuntimeException(new TimeoutException());
-        }
-      }
-      catch (InterruptedException e) {
-        throw Throwables.propagate(e);
-      }
-    }
-    final FetchedFile<T> maybeCached = cacheIfPossible(fetchedFile);
-    // trigger fetch again for subsequent next() calls
-    fetchIfNeeded(fetchedBytes.get());
-    return new OpenedObject<>(maybeCached);
-  }
-
-  private OpenedObject<T> openObjectFromRemote() throws IOException
-  {
-    if (fetchedFiles.size() > 0) {
-      // If fetchedFiles is not empty even though prefetching is disabled, they should be cached files.
-      // We use them first. See (*).
-      return new OpenedObject<>(fetchedFiles.poll());
-    } else if (cacheManager.cacheable()) {
-      // If cache is enabled, first download an object to local storage and cache it.
-      try {
-        // Since maxFetchCapacityBytes is 0, at most one file is fetched.
-        fetch();
-        FetchedFile<T> fetchedFile = fetchedFiles.poll();
-        if (fetchedFile == null) {
-          throw new ISE("Cannot fetch object[%s]", objects.get(nextFetchIndex - 1));
-        }
-        final FetchedFile<T> cached = cacheIfPossible(fetchedFile);
-        return new OpenedObject<>(cached);
-      }
-      catch (Exception e) {
-        throw Throwables.propagate(e);
-      }
-    } else {
-      final T object = objects.get(nextFetchIndex);
-      LOG.info("Reading [%d]th object[%s]", nextFetchIndex, object);
-      nextFetchIndex++;
-      return generateOpenObject(object);
-    }
-  }
-
-  private FetchedFile<T> cacheIfPossible(FetchedFile<T> fetchedFile)
-  {
-    if (cacheManager.cacheable()) {
-      final FetchedFile<T> cachedFile = cacheManager.cache(fetchedFile);
-      // If the fetchedFile is cached, make a room for fetching more data immediately.
-      // This is because cache space and fetch space are separated.
-      fetchedBytes.addAndGet(-fetchedFile.length());
-      return cachedFile;
-    } else {
-      return fetchedFile;
-    }
-  }
-
-  private static Closeable getFileCloser(
-      final File file,
-      final AtomicLong fetchedBytes
-  )
-  {
-    return () -> {
-      final long fileSize = file.length();
-      file.delete();
-      fetchedBytes.addAndGet(-fileSize);
-    };
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/prefetch/FileFetcher.java b/api/src/main/java/io/druid/data/input/impl/prefetch/FileFetcher.java
deleted file mode 100644
index 7173fded854..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/prefetch/FileFetcher.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl.prefetch;
-
-import com.google.common.base.Predicate;
-
-import io.druid.java.util.common.RetryUtils;
-import io.druid.java.util.common.StringUtils;
-import org.apache.commons.io.IOUtils;
-
-import javax.annotation.Nullable;
-import java.io.Closeable;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.List;
-
-import java.util.concurrent.ExecutorService;
-
-/**
- * A file fetcher used by {@link PrefetchableTextFilesFirehoseFactory}.
- * See the javadoc of {@link PrefetchableTextFilesFirehoseFactory} for more details.
- */
-public class FileFetcher<T> extends Fetcher<T>
-
-{
-  private static final int BUFFER_SIZE = 1024 * 4;
-  private final ObjectOpenFunction<T> openObjectFunction;
-  private final Predicate<Throwable> retryCondition;
-  private final byte[] buffer;
-  // maximum retry for fetching an object from the remote site
-  private final int maxFetchRetry;
-
-  public int getMaxFetchRetry()
-  {
-    return maxFetchRetry;
-  }
-
-  FileFetcher(
-      CacheManager<T> cacheManager,
-      List<T> objects,
-      ExecutorService fetchExecutor,
-      @Nullable File temporaryDirectory,
-      PrefetchConfig prefetchConfig,
-      ObjectOpenFunction<T> openObjectFunction,
-      Predicate<Throwable> retryCondition,
-      Integer maxFetchRetries
-  )
-  {
-
-    super(
-        cacheManager,
-        objects,
-        fetchExecutor,
-        temporaryDirectory,
-        prefetchConfig
-    );
-
-    this.openObjectFunction = openObjectFunction;
-    this.retryCondition = retryCondition;
-    this.buffer = new byte[BUFFER_SIZE];
-    this.maxFetchRetry = maxFetchRetries;
-  }
-
-  /**
-   * Downloads an object. It retries downloading {@link #maxFetchRetry}
-   * times and throws an exception.
-   *
-   * @param object  an object to be downloaded
-   * @param outFile a file which the object data is stored
-   *
-   * @return number of downloaded bytes
-   */
-  @Override
-  protected long download(T object, File outFile) throws IOException
-  {
-    try {
-      return RetryUtils.retry(
-          () -> {
-            try (final InputStream is = openObjectFunction.open(object);
-                 final OutputStream os = new FileOutputStream(outFile)) {
-              return IOUtils.copyLarge(is, os, buffer);
-            }
-          },
-          retryCondition,
-          outFile::delete,
-          maxFetchRetry + 1,
-          StringUtils.format("Failed to download object[%s]", object)
-      );
-    }
-    catch (Exception e) {
-      throw new IOException(e);
-    }
-  }
-
-  /**
-   * Generates an instance of {@link OpenedObject} for which the underlying stream may be re-opened and retried
-   * based on the exception and retry condition.
-   */
-  @Override
-  protected OpenedObject<T> generateOpenObject(T object) throws IOException
-  {
-    return new OpenedObject<>(
-        object,
-        new RetryingInputStream<>(object, openObjectFunction, retryCondition, getMaxFetchRetry()),
-        getNoopCloser()
-    );
-  }
-
-  private static Closeable getNoopCloser()
-  {
-    return () -> {
-    };
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/prefetch/JsonIterator.java b/api/src/main/java/io/druid/data/input/impl/prefetch/JsonIterator.java
deleted file mode 100644
index cf73abcb6be..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/prefetch/JsonIterator.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package io.druid.data.input.impl.prefetch;
-
-import com.fasterxml.jackson.core.JsonParser;
-import com.fasterxml.jackson.core.JsonToken;
-import com.fasterxml.jackson.core.ObjectCodec;
-import com.fasterxml.jackson.core.type.TypeReference;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import io.druid.java.util.common.IAE;
-import io.druid.java.util.common.guava.CloseQuietly;
-import io.druid.java.util.common.io.Closer;
-
-import java.io.Closeable;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Iterator;
-import java.util.NoSuchElementException;
-
-/**
- * An iterator over an array of JSON objects. Uses {@link ObjectCodec} to deserialize regular Java objects.
- *
- * @param <T> the type of object returned by this iterator
- */
-public class JsonIterator<T> implements Iterator<T>, Closeable
-{
-  private JsonParser jp;
-  private ObjectCodec objectCodec;
-  private final TypeReference typeRef;
-  private final InputStream inputStream;
-  private final Closeable resourceCloser;
-  private final ObjectMapper objectMapper;
-
-  /**
-   * @param typeRef        the object type that the JSON object should be deserialized into
-   * @param inputStream    stream containing an array of JSON objects
-   * @param resourceCloser a {@code Closeable} implementation to release resources that the object is holding
-   * @param objectMapper   object mapper, used for deserialization
-   */
-  public JsonIterator(
-      TypeReference typeRef,
-      InputStream inputStream,
-      Closeable resourceCloser,
-      ObjectMapper objectMapper
-  )
-  {
-    this.typeRef = typeRef;
-    this.inputStream = inputStream;
-    this.resourceCloser = resourceCloser;
-    this.objectMapper = objectMapper;
-    init();
-  }
-
-  /**
-   * Returns {@code true} if there are more objects to be read.
-   *
-   * @return {@code true} if there are more objects to be read, else return {@code false}
-   */
-  @Override
-  public boolean hasNext()
-  {
-    if (jp.isClosed()) {
-      return false;
-    }
-    if (jp.getCurrentToken() == JsonToken.END_ARRAY) {
-      CloseQuietly.close(jp);
-      return false;
-    }
-    return true;
-  }
-
-  /**
-   * Retrieves the next deserialized object from the stream of JSON objects.
-   *
-   * @return the next deserialized object from the stream of JSON ovbjects
-   */
-  @Override
-  public T next()
-  {
-    if (!hasNext()) {
-      throw new NoSuchElementException("No more objects to read!");
-    }
-    try {
-      final T retVal = objectCodec.readValue(jp, typeRef);
-      jp.nextToken();
-      return retVal;
-    }
-    catch (IOException e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  private void init()
-  {
-    try {
-      if (inputStream == null) {
-        throw new UnsupportedOperationException();
-      } else {
-        jp = objectMapper.getFactory().createParser(inputStream);
-      }
-      final JsonToken nextToken = jp.nextToken();
-      if (nextToken != JsonToken.START_ARRAY) {
-        throw new IAE("First token should be START_ARRAY", jp.getCurrentToken());
-      } else {
-        jp.nextToken();
-        objectCodec = jp.getCodec();
-      }
-    }
-    catch (IOException e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  @Override
-  public void close() throws IOException
-  {
-    Closer closer = Closer.create();
-    if (jp != null) {
-      closer.register(jp);
-    }
-    closer.register(resourceCloser);
-    closer.close();
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/prefetch/ObjectOpenFunction.java b/api/src/main/java/io/druid/data/input/impl/prefetch/ObjectOpenFunction.java
deleted file mode 100644
index 43d47df013b..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/prefetch/ObjectOpenFunction.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl.prefetch;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-
-interface ObjectOpenFunction<T>
-{
-  InputStream open(T object) throws IOException;
-
-  default InputStream open(T object, long start) throws IOException
-  {
-    return open(object);
-  }
-
-  default InputStream open(T object, File outFile) throws IOException
-  {
-    return open(object);
-  }
-
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/prefetch/OpenedObject.java b/api/src/main/java/io/druid/data/input/impl/prefetch/OpenedObject.java
deleted file mode 100644
index 631d1712818..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/prefetch/OpenedObject.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl.prefetch;
-
-import org.apache.commons.io.FileUtils;
-
-import java.io.Closeable;
-import java.io.IOException;
-import java.io.InputStream;
-
-/**
- * A class containing meta information about an opened object.  This class is used to put related objects together.  It
- * contains an original object, an objectStream from the object, and a resourceCloser which knows how to release
- * associated resources on closing.
- *
- * {@link PrefetchableTextFilesFirehoseFactory.ResourceCloseableLineIterator} consumes the objectStream and closes
- * it with the resourceCloser.
- */
-class OpenedObject<T>
-{
-  // Original object
-  private final T object;
-  // Input stream from the object
-  private final InputStream objectStream;
-  // Closer which is called when the file is not needed anymore. Usually this deletes the file except for cached files.
-  private final Closeable resourceCloser;
-
-  OpenedObject(FetchedFile<T> fetchedFile) throws IOException
-  {
-    this(fetchedFile.getObject(), FileUtils.openInputStream(fetchedFile.getFile()), fetchedFile.getResourceCloser());
-  }
-
-  OpenedObject(T object, InputStream objectStream, Closeable resourceCloser)
-  {
-    this.object = object;
-    this.objectStream = objectStream;
-    this.resourceCloser = resourceCloser;
-  }
-
-  T getObject()
-  {
-    return object;
-  }
-
-  InputStream getObjectStream()
-  {
-    return objectStream;
-  }
-
-  Closeable getResourceCloser()
-  {
-    return resourceCloser;
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/prefetch/PrefetchConfig.java b/api/src/main/java/io/druid/data/input/impl/prefetch/PrefetchConfig.java
deleted file mode 100644
index 8b6c8cb53da..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/prefetch/PrefetchConfig.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package io.druid.data.input.impl.prefetch;
-
-import java.util.concurrent.TimeUnit;
-
-/**
- * Holds the essential configuration required by {@link Fetcher} for prefetching purposes.
- */
-public class PrefetchConfig
-{
-  public static final long DEFAULT_MAX_CACHE_CAPACITY_BYTES = 1024 * 1024 * 1024; // 1GB
-  public static final long DEFAULT_MAX_FETCH_CAPACITY_BYTES = 1024 * 1024 * 1024; // 1GB
-  public static final long DEFAULT_FETCH_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(60);
-
-  // A roughly max size of total fetched objects, but the actual fetched size can be bigger. The reason is our current
-  // client implementations for cloud storages like s3 don't support range scan yet, so we must download the whole file
-  // at once. It's still possible for the size of cached/fetched data to not exceed these variables by estimating the
-  // after-fetch size, but it makes us consider the case when any files cannot be fetched due to their large size, which
-  // makes the implementation complicated.
-  private final long maxFetchCapacityBytes;
-
-  private final long maxCacheCapacityBytes;
-
-  private final long prefetchTriggerBytes;
-
-  // timeout for fetching an object from the remote site
-  private final long fetchTimeout;
-
-
-  public PrefetchConfig(
-      Long maxCacheCapacityBytes,
-      Long maxFetchCapacityBytes,
-      Long prefetchTriggerBytes,
-      Long fetchTimeout
-  )
-  {
-    this.maxCacheCapacityBytes = maxCacheCapacityBytes == null
-                                 ? DEFAULT_MAX_CACHE_CAPACITY_BYTES
-                                 : maxCacheCapacityBytes;
-    this.maxFetchCapacityBytes = maxFetchCapacityBytes == null
-                                 ? DEFAULT_MAX_FETCH_CAPACITY_BYTES
-                                 : maxFetchCapacityBytes;
-    this.prefetchTriggerBytes = prefetchTriggerBytes == null
-                                ? this.maxFetchCapacityBytes / 2
-                                : prefetchTriggerBytes;
-    this.fetchTimeout = fetchTimeout == null ? DEFAULT_FETCH_TIMEOUT_MS : fetchTimeout;
-  }
-
-  public long getMaxCacheCapacityBytes()
-  {
-    return maxCacheCapacityBytes;
-  }
-
-  public long getMaxFetchCapacityBytes()
-  {
-    return maxFetchCapacityBytes;
-  }
-
-  public long getPrefetchTriggerBytes()
-  {
-    return prefetchTriggerBytes;
-  }
-
-  public long getFetchTimeout()
-  {
-    return fetchTimeout;
-  }
-
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/prefetch/PrefetchSqlFirehoseFactory.java b/api/src/main/java/io/druid/data/input/impl/prefetch/PrefetchSqlFirehoseFactory.java
deleted file mode 100644
index 31699617d98..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/prefetch/PrefetchSqlFirehoseFactory.java
+++ /dev/null
@@ -1,253 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl.prefetch;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.core.type.TypeReference;
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.ImmutableList;
-import io.druid.data.input.Firehose;
-import io.druid.data.input.FirehoseFactory;
-import io.druid.data.input.impl.InputRowParser;
-import io.druid.data.input.impl.SqlFirehose;
-import io.druid.java.util.common.ISE;
-import io.druid.java.util.common.concurrent.Execs;
-import io.druid.java.util.common.logger.Logger;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.commons.io.LineIterator;
-
-import javax.annotation.Nullable;
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.NoSuchElementException;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.TimeUnit;
-
-/**
- * PrefetchSqlFirehoseFactory is an abstract firehose factory for reading prefetched sql resultset data. Regardless
- * of whether prefetching is enabled or not, for each sql object the entire result set is fetched into a file in the local disk.
- * This class defines prefetching as caching the resultsets into local disk in case multiple sql queries are present.
- * When prefetching is enabled, the following functionalities are provided:
- * <p/>
- * <p>
- * - Caching: for the first call of {@link #connect(InputRowParser, File)}, it caches objects in a local disk
- * up to maxCacheCapacityBytes.  These caches are NOT deleted until the process terminates, and thus can be used for
- * future reads.
- * <br/>
- * - Fetching: when it reads all cached data, it fetches remaining objects into a local disk and reads data from
- * them.  For the performance reason, prefetch technique is used, that is, when the size of remaining fetched data is
- * smaller than {@link PrefetchConfig#prefetchTriggerBytes}, a background prefetch thread automatically starts to fetch remaining
- * objects.
- * <br/>
- * <p/>
- * <p>
- * This implementation aims to avoid maintaining a persistent connection to the database by prefetching the resultset into disk.
- * <br/>
- * Prefetching can be turned on/off by setting maxFetchCapacityBytes.  Depending on prefetching is enabled or
- * disabled, the behavior of the firehose is different like below.
- * <p/>
- * <p>
- * 1. If prefetch is enabled this firehose can fetch input objects in background.
- * <br/>
- * 2. When next() is called, it first checks that there are already fetched files in local storage.
- * <br/>
- * 2.1 If exists, it simply chooses a fetched file and returns a {@link LineIterator} reading that file.
- * <br/>
- * 2.2 If there is no fetched files in local storage but some objects are still remained to be read, the firehose
- * fetches one of input objects in background immediately. Finally, the firehose returns an iterator of {@link JsonIterator}
- * for deserializing the saved resultset.
- * <br/>
- * 3. If prefetch is disabled, the firehose saves the resultset to file and returns an iterator of {@link JsonIterator}
- * which directly reads the stream opened by {@link #openObjectStream}. If there is an IOException, it will throw it
- * and the read will fail.
- */
-public abstract class PrefetchSqlFirehoseFactory<T>
-    implements FirehoseFactory<InputRowParser<Map<String, Object>>>
-{
-  private static final Logger LOG = new Logger(PrefetchSqlFirehoseFactory.class);
-
-  private final PrefetchConfig prefetchConfig;
-  private final CacheManager<T> cacheManager;
-  private List<T> objects;
-  private ObjectMapper objectMapper;
-
-
-  public PrefetchSqlFirehoseFactory(
-      Long maxCacheCapacityBytes,
-      Long maxFetchCapacityBytes,
-      Long prefetchTriggerBytes,
-      Long fetchTimeout,
-      ObjectMapper objectMapper
-  )
-  {
-    this.prefetchConfig = new PrefetchConfig(
-        maxCacheCapacityBytes,
-        maxFetchCapacityBytes,
-        prefetchTriggerBytes,
-        fetchTimeout
-    );
-    this.cacheManager = new CacheManager<>(
-        prefetchConfig.getMaxCacheCapacityBytes()
-    );
-    this.objectMapper = objectMapper;
-  }
-
-  @JsonProperty
-  public long getMaxCacheCapacityBytes()
-  {
-    return cacheManager.getMaxCacheCapacityBytes();
-  }
-
-  @JsonProperty
-  public long getMaxFetchCapacityBytes()
-  {
-    return prefetchConfig.getMaxFetchCapacityBytes();
-  }
-
-  @JsonProperty
-  public long getPrefetchTriggerBytes()
-  {
-    return prefetchConfig.getPrefetchTriggerBytes();
-  }
-
-  @JsonProperty
-  public long getFetchTimeout()
-  {
-    return prefetchConfig.getFetchTimeout();
-  }
-
-  @VisibleForTesting
-  CacheManager<T> getCacheManager()
-  {
-    return cacheManager;
-  }
-
-  @Override
-  public Firehose connect(InputRowParser<Map<String, Object>> firehoseParser, @Nullable File temporaryDirectory)
-      throws IOException
-  {
-    if (objects == null) {
-      objects = ImmutableList.copyOf(Preconditions.checkNotNull(initObjects(), "objects"));
-    }
-    if (cacheManager.isEnabled() || prefetchConfig.getMaxFetchCapacityBytes() > 0) {
-      Preconditions.checkNotNull(temporaryDirectory, "temporaryDirectory");
-      Preconditions.checkArgument(
-          temporaryDirectory.exists(),
-          "temporaryDirectory[%s] does not exist",
-          temporaryDirectory
-      );
-      Preconditions.checkArgument(
-          temporaryDirectory.isDirectory(),
-          "temporaryDirectory[%s] is not a directory",
-          temporaryDirectory
-      );
-    }
-
-    LOG.info("Create a new firehose for [%d] queries", objects.size());
-
-    // fetchExecutor is responsible for background data fetching
-    final ExecutorService fetchExecutor = Execs.singleThreaded("firehose_fetch_%d");
-    final Fetcher<T> fetcher = new SqlFetcher<>(
-        cacheManager,
-        objects,
-        fetchExecutor,
-        temporaryDirectory,
-        prefetchConfig,
-        new ObjectOpenFunction<T>()
-        {
-          @Override
-          public InputStream open(T object, File outFile) throws IOException
-          {
-            return openObjectStream(object, outFile);
-          }
-
-          @Override
-          public InputStream open(T object) throws IOException
-          {
-            final File outFile = File.createTempFile("sqlresults_", null, temporaryDirectory);
-            return openObjectStream(object, outFile);
-          }
-        }
-    );
-
-    return new SqlFirehose(
-        new Iterator<JsonIterator<Map<String, Object>>>()
-        {
-          @Override
-          public boolean hasNext()
-          {
-            return fetcher.hasNext();
-          }
-
-          @Override
-          public JsonIterator<Map<String, Object>> next()
-          {
-            if (!hasNext()) {
-              throw new NoSuchElementException();
-            }
-            try {
-              TypeReference<Map<String, Object>> type = new TypeReference<Map<String, Object>>()
-              {
-              };
-              final OpenedObject<T> openedObject = fetcher.next();
-              final InputStream stream = openedObject.getObjectStream();
-              return new JsonIterator<>(type, stream, openedObject.getResourceCloser(), objectMapper);
-            }
-            catch (Exception ioe) {
-              throw new RuntimeException(ioe);
-            }
-          }
-        },
-        firehoseParser,
-        () -> {
-          fetchExecutor.shutdownNow();
-          try {
-            Preconditions.checkState(fetchExecutor.awaitTermination(
-                prefetchConfig.getFetchTimeout(),
-                TimeUnit.MILLISECONDS
-            ));
-          }
-          catch (InterruptedException e) {
-            Thread.currentThread().interrupt();
-            throw new ISE("Failed to shutdown fetch executor during close");
-          }
-        }
-    );
-  }
-
-  /**
-   * Open an input stream from the given object.  The object is fetched into the file and an input
-   * stream to the file is provided.
-   *
-   * @param object   an object to be read
-   * @param filename file to which the object is fetched into
-   *
-   * @return an input stream to the file
-   */
-  protected abstract InputStream openObjectStream(T object, File filename) throws IOException;
-
-  protected abstract Collection<T> initObjects();
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactory.java b/api/src/main/java/io/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactory.java
deleted file mode 100644
index 10406eea364..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactory.java
+++ /dev/null
@@ -1,300 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl.prefetch;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
-import com.google.common.base.Predicate;
-import com.google.common.collect.ImmutableList;
-import io.druid.data.input.Firehose;
-import io.druid.data.input.impl.AbstractTextFilesFirehoseFactory;
-import io.druid.data.input.impl.FileIteratingFirehose;
-import io.druid.data.input.impl.StringInputRowParser;
-import io.druid.java.util.common.ISE;
-import io.druid.java.util.common.concurrent.Execs;
-import io.druid.java.util.common.logger.Logger;
-import org.apache.commons.io.LineIterator;
-
-import javax.annotation.Nullable;
-import java.io.Closeable;
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.nio.charset.StandardCharsets;
-import java.util.Iterator;
-import java.util.List;
-import java.util.NoSuchElementException;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.TimeUnit;
-
-/**
- * PrefetchableTextFilesFirehoseFactory is an abstract firehose factory for reading text files.  The firehose returned
- * by this class provides three key functionalities.
- * <p/>
- * <p>
- * - Caching: for the first call of {@link #connect(StringInputRowParser, File)}, it caches objects in a local disk
- * up to maxCacheCapacityBytes.  These caches are NOT deleted until the process terminates, and thus can be used for
- * future reads.
- * <br/>
- * - Fetching: when it reads all cached data, it fetches remaining objects into a local disk and reads data from
- * them.  For the performance reason, prefetch technique is used, that is, when the size of remaining fetched data is
- * smaller than {@link PrefetchConfig#prefetchTriggerBytes}, a background prefetch thread automatically starts to fetch remaining
- * objects.
- * <br/>
- * - Retry: if an exception occurs while downloading an object, it retries again up to {@link #maxFetchRetry}.
- * <p/>
- * <p>
- * This implementation can be useful when the cost for reading input objects is large as reading from AWS S3 because
- * batch tasks like IndexTask or HadoopIndexTask can read the whole data twice for determining partition specs and
- * generating segments if the intervals of GranularitySpec is not specified.
- * <br/>
- * Prefetching can be turned on/off by setting maxFetchCapacityBytes.  Depending on prefetching is enabled or
- * disabled, the behavior of the firehose is different like below.
- * <p/>
- * <p>
- * 1. If prefetch is enabled, this firehose can fetch input objects in background.
- * <br/>
- * 2. When next() is called, it first checks that there are already fetched files in local storage.
- * <br/>
- * 2.1 If exists, it simply chooses a fetched file and returns a {@link LineIterator} reading that file.
- * <br/>
- * 2.2 If there is no fetched files in local storage but some objects are still remained to be read, the firehose
- * fetches one of input objects in background immediately. If an IOException occurs while downloading the object,
- * it retries up to the maximum retry count. Finally, the firehose returns a {@link LineIterator} only when the
- * download operation is successfully finished.
- * <br/>
- * 3. If prefetch is disabled, the firehose returns a {@link LineIterator} which directly reads the stream opened by
- * {@link #openObjectStream}. If there is an IOException, it will throw it and the read will fail.
- */
-public abstract class PrefetchableTextFilesFirehoseFactory<T>
-    extends AbstractTextFilesFirehoseFactory<T>
-{
-  private static final Logger LOG = new Logger(PrefetchableTextFilesFirehoseFactory.class);
-
-  public static final int DEFAULT_MAX_FETCH_RETRY = 3;
-
-  private final CacheManager<T> cacheManager;
-  private final PrefetchConfig prefetchConfig;
-
-  private List<T> objects;
-  private final int maxFetchRetry;
-
-  public PrefetchableTextFilesFirehoseFactory(
-      Long maxCacheCapacityBytes,
-      Long maxFetchCapacityBytes,
-      Long prefetchTriggerBytes,
-      Long fetchTimeout,
-      Integer maxFetchRetry
-  )
-  {
-    this.prefetchConfig = new PrefetchConfig(
-        maxCacheCapacityBytes,
-        maxFetchCapacityBytes,
-        prefetchTriggerBytes,
-        fetchTimeout
-    );
-    this.cacheManager = new CacheManager<>(
-        prefetchConfig.getMaxCacheCapacityBytes()
-    );
-    this.maxFetchRetry = maxFetchRetry == null ? DEFAULT_MAX_FETCH_RETRY : maxFetchRetry;
-  }
-
-  @JsonProperty
-  public long getMaxCacheCapacityBytes()
-  {
-    return cacheManager.getMaxCacheCapacityBytes();
-  }
-
-  @JsonProperty
-  public long getMaxFetchCapacityBytes()
-  {
-    return prefetchConfig.getMaxFetchCapacityBytes();
-  }
-
-  @JsonProperty
-  public long getPrefetchTriggerBytes()
-  {
-    return prefetchConfig.getPrefetchTriggerBytes();
-  }
-
-  @JsonProperty
-  public long getFetchTimeout()
-  {
-    return prefetchConfig.getFetchTimeout();
-  }
-
-  @JsonProperty
-  public int getMaxFetchRetry()
-  {
-    return maxFetchRetry;
-  }
-
-  @VisibleForTesting
-  CacheManager<T> getCacheManager()
-  {
-    return cacheManager;
-  }
-
-  @Override
-  public Firehose connect(StringInputRowParser firehoseParser, @Nullable File temporaryDirectory) throws IOException
-  {
-    if (objects == null) {
-      objects = ImmutableList.copyOf(Preconditions.checkNotNull(initObjects(), "objects"));
-    }
-
-    if (cacheManager.isEnabled() || prefetchConfig.getMaxFetchCapacityBytes() > 0) {
-      Preconditions.checkNotNull(temporaryDirectory, "temporaryDirectory");
-      Preconditions.checkArgument(
-          temporaryDirectory.exists(),
-          "temporaryDirectory[%s] does not exist",
-          temporaryDirectory
-      );
-      Preconditions.checkArgument(
-          temporaryDirectory.isDirectory(),
-          "temporaryDirectory[%s] is not a directory",
-          temporaryDirectory
-      );
-    }
-
-    LOG.info("Create a new firehose for [%d] objects", objects.size());
-
-    // fetchExecutor is responsible for background data fetching
-    final ExecutorService fetchExecutor = Execs.singleThreaded("firehose_fetch_%d");
-    final FileFetcher<T> fetcher = new FileFetcher<T>(
-        cacheManager,
-        objects,
-        fetchExecutor,
-        temporaryDirectory,
-        prefetchConfig,
-        new ObjectOpenFunction<T>()
-        {
-          @Override
-          public InputStream open(T object) throws IOException
-          {
-            return openObjectStream(object);
-          }
-
-          @Override
-          public InputStream open(T object, long start) throws IOException
-          {
-            return openObjectStream(object, start);
-          }
-        },
-        getRetryCondition(),
-        getMaxFetchRetry()
-    );
-
-    return new FileIteratingFirehose(
-        new Iterator<LineIterator>()
-        {
-          @Override
-          public boolean hasNext()
-          {
-            return fetcher.hasNext();
-          }
-
-          @Override
-          public LineIterator next()
-          {
-            if (!hasNext()) {
-              throw new NoSuchElementException();
-            }
-
-            final OpenedObject<T> openedObject = fetcher.next();
-            final InputStream stream;
-            try {
-              stream = wrapObjectStream(
-                  openedObject.getObject(),
-                  openedObject.getObjectStream()
-              );
-            }
-            catch (IOException e) {
-              throw new RuntimeException(e);
-            }
-
-            return new ResourceCloseableLineIterator(
-                new InputStreamReader(stream, StandardCharsets.UTF_8),
-                openedObject.getResourceCloser()
-            );
-          }
-        },
-        firehoseParser,
-        () -> {
-          fetchExecutor.shutdownNow();
-          try {
-            Preconditions.checkState(fetchExecutor.awaitTermination(
-                prefetchConfig.getFetchTimeout(),
-                TimeUnit.MILLISECONDS
-            ));
-          }
-          catch (InterruptedException e) {
-            Thread.currentThread().interrupt();
-            throw new ISE("Failed to shutdown fetch executor during close");
-          }
-        }
-    );
-  }
-
-  /**
-   * Returns a predicate describing retry conditions. {@link Fetcher} and {@link RetryingInputStream} will retry on the
-   * errors satisfying this condition.
-   */
-  protected abstract Predicate<Throwable> getRetryCondition();
-
-  /**
-   * Open an input stream from the given object.  If the object is compressed, this method should return a byte stream
-   * as it is compressed.  The object compression should be handled in {@link #wrapObjectStream(Object, InputStream)}.
-   *
-   * @param object an object to be read
-   * @param start  start offset
-   *
-   * @return an input stream for the object
-   */
-  protected abstract InputStream openObjectStream(T object, long start) throws IOException;
-
-  /**
-   * This class calls the {@link Closeable#close()} method of the resourceCloser when it is closed.
-   */
-  static class ResourceCloseableLineIterator extends LineIterator
-  {
-    private final Closeable resourceCloser;
-
-    ResourceCloseableLineIterator(Reader reader, Closeable resourceCloser) throws IllegalArgumentException
-    {
-      super(reader);
-      this.resourceCloser = resourceCloser;
-    }
-
-    @Override
-    public void close()
-    {
-      super.close();
-      try {
-        resourceCloser.close();
-      }
-      catch (IOException e) {
-        throw new RuntimeException(e);
-      }
-    }
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/prefetch/RetryingInputStream.java b/api/src/main/java/io/druid/data/input/impl/prefetch/RetryingInputStream.java
deleted file mode 100644
index 31f3611d7bf..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/prefetch/RetryingInputStream.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl.prefetch;
-
-import com.google.common.base.Predicate;
-import com.google.common.base.Throwables;
-import com.google.common.io.CountingInputStream;
-import io.druid.java.util.common.RetryUtils;
-import io.druid.java.util.common.logger.Logger;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.SocketException;
-
-/**
- * This class is used by {@link Fetcher} when prefetch is disabled. It's responsible for re-opening the underlying input
- * stream for the input object on the socket connection reset as well as the given {@link #retryCondition}.
- *
- * @param <T> object type
- */
-class RetryingInputStream<T> extends InputStream
-{
-  private static final Logger log = new Logger(RetryingInputStream.class);
-
-  private final T object;
-  private final ObjectOpenFunction<T> objectOpenFunction;
-  private final Predicate<Throwable> retryCondition;
-  private final int maxRetry;
-
-  private CountingInputStream delegate;
-  private long startOffset;
-
-  RetryingInputStream(
-      T object,
-      ObjectOpenFunction<T> objectOpenFunction,
-      Predicate<Throwable> retryCondition,
-      int maxRetry
-  ) throws IOException
-  {
-    this.object = object;
-    this.objectOpenFunction = objectOpenFunction;
-    this.retryCondition = retryCondition;
-    this.maxRetry = maxRetry;
-    this.delegate = new CountingInputStream(objectOpenFunction.open(object));
-  }
-
-  private boolean isConnectionReset(Throwable t)
-  {
-    return (t instanceof SocketException && (t.getMessage() != null && t.getMessage().contains("Connection reset"))) ||
-           (t.getCause() != null && isConnectionReset(t.getCause()));
-  }
-
-  private void waitOrThrow(Throwable t, int nTry) throws IOException
-  {
-    final boolean isConnectionReset = isConnectionReset(t);
-    if (isConnectionReset || retryCondition.apply(t)) {
-      if (isConnectionReset) {
-        // Re-open the input stream on connection reset
-        startOffset += delegate.getCount();
-        try {
-          delegate.close();
-        }
-        catch (IOException e) {
-          // ignore this exception
-          log.warn(e, "Error while closing the delegate input stream");
-        }
-      }
-      try {
-        // Wait for the next try
-        RetryUtils.awaitNextRetry(t, null, nTry + 1, maxRetry, false);
-
-        if (isConnectionReset) {
-          log.info("retrying from offset[%d]", startOffset);
-          delegate = new CountingInputStream(objectOpenFunction.open(object, startOffset));
-        }
-      }
-      catch (InterruptedException | IOException e) {
-        t.addSuppressed(e);
-        throwAsIOException(t);
-      }
-    } else {
-      throwAsIOException(t);
-    }
-  }
-
-  private static void throwAsIOException(Throwable t) throws IOException
-  {
-    Throwables.propagateIfInstanceOf(t, IOException.class);
-    throw new IOException(t);
-  }
-
-  @Override
-  public int read() throws IOException
-  {
-    for (int nTry = 0; nTry < maxRetry; nTry++) {
-      try {
-        return delegate.read();
-      }
-      catch (Throwable t) {
-        waitOrThrow(t, nTry);
-      }
-    }
-    return delegate.read();
-  }
-
-  @Override
-  public int read(byte b[]) throws IOException
-  {
-    for (int nTry = 0; nTry < maxRetry; nTry++) {
-      try {
-        return delegate.read(b);
-      }
-      catch (Throwable t) {
-        waitOrThrow(t, nTry);
-      }
-    }
-    return delegate.read(b);
-  }
-
-  @Override
-  public int read(byte b[], int off, int len) throws IOException
-  {
-    for (int nTry = 0; nTry < maxRetry; nTry++) {
-      try {
-        return delegate.read(b, off, len);
-      }
-      catch (Throwable t) {
-        waitOrThrow(t, nTry);
-      }
-    }
-    return delegate.read(b, off, len);
-  }
-
-  @Override
-  public long skip(long n) throws IOException
-  {
-    for (int nTry = 0; nTry < maxRetry; nTry++) {
-      try {
-        return delegate.skip(n);
-      }
-      catch (Throwable t) {
-        waitOrThrow(t, nTry);
-      }
-    }
-    return delegate.skip(n);
-  }
-
-  @Override
-  public int available() throws IOException
-  {
-    for (int nTry = 0; nTry < maxRetry; nTry++) {
-      try {
-        return delegate.available();
-      }
-      catch (Throwable t) {
-        waitOrThrow(t, nTry);
-      }
-    }
-    return delegate.available();
-  }
-
-  @Override
-  public void close() throws IOException
-  {
-    delegate.close();
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/prefetch/SqlFetcher.java b/api/src/main/java/io/druid/data/input/impl/prefetch/SqlFetcher.java
deleted file mode 100644
index 9ac098e9790..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/prefetch/SqlFetcher.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl.prefetch;
-
-import javax.annotation.Nullable;
-import java.io.File;
-import java.io.IOException;
-import java.util.List;
-import java.util.concurrent.ExecutorService;
-
-
-/**
- * A file fetcher used by {@link PrefetchSqlFirehoseFactory}.
- * See the javadoc of {@link PrefetchSqlFirehoseFactory} for more details.
- */
-public class SqlFetcher<T> extends Fetcher<T>
-
-{
-  private static final String FETCH_FILE_PREFIX = "sqlfetch-";
-
-  @Nullable
-  private final File temporaryDirectory;
-
-  private final ObjectOpenFunction<T> openObjectFunction;
-
-  SqlFetcher(
-      CacheManager<T> cacheManager,
-      List<T> objects,
-      ExecutorService fetchExecutor,
-      @Nullable File temporaryDirectory,
-      PrefetchConfig prefetchConfig,
-      ObjectOpenFunction<T> openObjectFunction
-  )
-  {
-
-    super(
-        cacheManager,
-        objects,
-        fetchExecutor,
-        temporaryDirectory,
-        prefetchConfig
-    );
-    this.temporaryDirectory = temporaryDirectory;
-    this.openObjectFunction = openObjectFunction;
-  }
-
-  /**
-   * Downloads the entire resultset object into a file. This avoids maintaining a
-   * persistent connection to the database. The retry is performed at the query execution layer.
-   *
-   * @param object  sql query for which the resultset is to be downloaded
-   * @param outFile a file which the object data is stored
-   *
-   * @return size of downloaded resultset
-   */
-
-  @Override
-  protected long download(T object, File outFile) throws IOException
-  {
-    openObjectFunction.open(object, outFile);
-    return outFile.length();
-  }
-
-  /**
-   * Generates an instance of {@link OpenedObject} for the given object. This is usually called
-   * when prefetching is disabled. The retry is performed at the query execution layer.
-   */
-
-  @Override
-  protected OpenedObject<T> generateOpenObject(T object) throws IOException
-  {
-    final File outFile = File.createTempFile(FETCH_FILE_PREFIX, null, temporaryDirectory);
-    return new OpenedObject<>(
-        object,
-        openObjectFunction.open(object, outFile),
-        outFile::delete
-    );
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/Binders.java b/api/src/main/java/io/druid/guice/Binders.java
deleted file mode 100644
index 3795b560ef6..00000000000
--- a/api/src/main/java/io/druid/guice/Binders.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.inject.Binder;
-import com.google.inject.Key;
-import com.google.inject.multibindings.MapBinder;
-import io.druid.guice.annotations.PublicApi;
-import io.druid.segment.loading.DataSegmentArchiver;
-import io.druid.segment.loading.DataSegmentFinder;
-import io.druid.segment.loading.DataSegmentKiller;
-import io.druid.segment.loading.DataSegmentMover;
-import io.druid.segment.loading.DataSegmentPusher;
-import io.druid.tasklogs.TaskLogs;
-
-/**
- */
-@PublicApi
-public class Binders
-{
-
-  public static MapBinder<String, DataSegmentKiller> dataSegmentKillerBinder(Binder binder)
-  {
-    return MapBinder.newMapBinder(binder, String.class, DataSegmentKiller.class);
-  }
-
-  public static MapBinder<String, DataSegmentMover> dataSegmentMoverBinder(Binder binder)
-  {
-    return MapBinder.newMapBinder(binder, String.class, DataSegmentMover.class);
-  }
-
-  public static MapBinder<String, DataSegmentArchiver> dataSegmentArchiverBinder(Binder binder)
-  {
-    return MapBinder.newMapBinder(binder, String.class, DataSegmentArchiver.class);
-  }
-
-  public static MapBinder<String, DataSegmentPusher> dataSegmentPusherBinder(Binder binder)
-  {
-    return PolyBind.optionBinder(binder, Key.get(DataSegmentPusher.class));
-  }
-
-  public static MapBinder<String, DataSegmentFinder> dataSegmentFinderBinder(Binder binder)
-  {
-    return PolyBind.optionBinder(binder, Key.get(DataSegmentFinder.class));
-  }
-
-  public static MapBinder<String, TaskLogs> taskLogsBinder(Binder binder)
-  {
-    return PolyBind.optionBinder(binder, Key.get(TaskLogs.class));
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/ConditionalMultibind.java b/api/src/main/java/io/druid/guice/ConditionalMultibind.java
deleted file mode 100644
index 6ac9a09d040..00000000000
--- a/api/src/main/java/io/druid/guice/ConditionalMultibind.java
+++ /dev/null
@@ -1,247 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.common.base.Predicate;
-import com.google.inject.Binder;
-import com.google.inject.TypeLiteral;
-import com.google.inject.multibindings.Multibinder;
-import io.druid.guice.annotations.PublicApi;
-
-import java.lang.annotation.Annotation;
-import java.util.Properties;
-
-/**
- * Provides the ability to conditionally bind an item to a set. The condition is based on the value set in the
- * runtime.properties.
- *
- * Usage example:
- *
- * ConditionalMultibind.create(props, binder, Animal.class)
- *                     .addConditionBinding("animal.type", Predicates.equalTo("cat"), Cat.class)
- *                     .addConditionBinding("animal.type", Predicates.equalTo("dog"), Dog.class);
- *
- * At binding time, this will check the value set for property "animal.type" in props. If the value is "cat", it will
- * add a binding to Cat.class. If the value is "dog", it will add a binding to Dog.class.
- *
- * At injection time, you will get the items that satisfy their corresponding predicates by calling
- * injector.getInstance(Key.get(new TypeLiteral<Set<Animal>>(){}))
- */
-@PublicApi
-public class ConditionalMultibind<T>
-{
-
-  /**
-   * Create a ConditionalMultibind that resolves items to be added to the set at "binding" time.
-   *
-   * @param properties the runtime properties.
-   * @param binder     the binder for the injector that is being configured.
-   * @param type       the type that will be injected.
-   * @param <T>        interface type.
-   *
-   * @return An instance of ConditionalMultibind that can be used to add conditional bindings.
-   */
-  public static <T> ConditionalMultibind<T> create(Properties properties, Binder binder, Class<T> type)
-  {
-    return new ConditionalMultibind<T>(properties, Multibinder.newSetBinder(binder, type));
-  }
-
-  /**
-   * Create a ConditionalMultibind that resolves items to be added to the set at "binding" time.
-   *
-   * @param properties     the runtime properties.
-   * @param binder         the binder for the injector that is being configured.
-   * @param type           the type that will be injected.
-   * @param <T>            interface type.
-   * @param annotationType the binding annotation.
-   *
-   * @return An instance of ConditionalMultibind that can be used to add conditional bindings.
-   */
-  public static <T> ConditionalMultibind<T> create(
-      Properties properties,
-      Binder binder,
-      Class<T> type,
-      Class<? extends Annotation> annotationType
-  )
-  {
-    return new ConditionalMultibind<T>(properties, Multibinder.newSetBinder(binder, type, annotationType));
-  }
-
-  /**
-   * Create a ConditionalMultibind that resolves items to be added to the set at "binding" time.
-   *
-   * @param properties the runtime properties.
-   * @param binder     the binder for the injector that is being configured.
-   * @param type       the type that will be injected.
-   * @param <T>        interface type.
-   *
-   * @return An instance of ConditionalMultibind that can be used to add conditional bindings.
-   */
-  public static <T> ConditionalMultibind<T> create(Properties properties, Binder binder, TypeLiteral<T> type)
-  {
-    return new ConditionalMultibind<T>(properties, Multibinder.newSetBinder(binder, type));
-  }
-
-  /**
-   * Create a ConditionalMultibind that resolves items to be added to the set at "binding" time.
-   *
-   * @param properties     the runtime properties.
-   * @param binder         the binder for the injector that is being configured.
-   * @param type           the type that will be injected.
-   * @param <T>            interface type.
-   * @param annotationType the binding annotation.
-   *
-   * @return An instance of ConditionalMultibind that can be used to add conditional bindings.
-   */
-  public static <T> ConditionalMultibind<T> create(
-      Properties properties,
-      Binder binder,
-      TypeLiteral<T> type,
-      Class<? extends Annotation> annotationType
-  )
-  {
-    return new ConditionalMultibind<T>(properties, Multibinder.newSetBinder(binder, type, annotationType));
-  }
-
-
-  private final Properties properties;
-  private final Multibinder<T> multibinder;
-
-  public ConditionalMultibind(Properties properties, Multibinder<T> multibinder)
-  {
-    this.properties = properties;
-    this.multibinder = multibinder;
-  }
-
-  /**
-   * Unconditionally bind target to the set.
-   *
-   * @param target the target class to which it adds a binding.
-   *
-   * @return self to support a continuous syntax for adding more conditional bindings.
-   */
-  public ConditionalMultibind<T> addBinding(Class<? extends T> target)
-  {
-    multibinder.addBinding().to(target);
-    return this;
-  }
-
-  /**
-   * Unconditionally bind target to the set.
-   *
-   * @param target the target instance to which it adds a binding.
-   *
-   * @return self to support a continuous syntax for adding more conditional bindings.
-   */
-  public ConditionalMultibind<T> addBinding(T target)
-  {
-    multibinder.addBinding().toInstance(target);
-    return this;
-  }
-
-  /**
-   * Unconditionally bind target to the set.
-   *
-   * @param target the target type to which it adds a binding.
-   *
-   * @return self to support a continuous syntax for adding more conditional bindings.
-   */
-  public ConditionalMultibind<T> addBinding(TypeLiteral<T> target)
-  {
-    multibinder.addBinding().to(target);
-    return this;
-  }
-
-  /**
-   * Conditionally bind target to the set. If "condition" returns true, add a binding to "target".
-   *
-   * @param property  the property to inspect on
-   * @param condition the predicate used to verify whether to add a binding to "target"
-   * @param target    the target class to which it adds a binding.
-   *
-   * @return self to support a continuous syntax for adding more conditional bindings.
-   */
-  public ConditionalMultibind<T> addConditionBinding(
-      String property,
-      Predicate<String> condition,
-      Class<? extends T> target
-  )
-  {
-    final String value = properties.getProperty(property);
-    if (value == null) {
-      return this;
-    }
-    if (condition.apply(value)) {
-      multibinder.addBinding().to(target);
-    }
-    return this;
-  }
-
-  /**
-   * Conditionally bind target to the set. If "condition" returns true, add a binding to "target".
-   *
-   * @param property  the property to inspect on
-   * @param condition the predicate used to verify whether to add a binding to "target"
-   * @param target    the target instance to which it adds a binding.
-   *
-   * @return self to support a continuous syntax for adding more conditional bindings.
-   */
-  public ConditionalMultibind<T> addConditionBinding(
-      String property,
-      Predicate<String> condition,
-      T target
-  )
-  {
-    final String value = properties.getProperty(property);
-    if (value == null) {
-      return this;
-    }
-    if (condition.apply(value)) {
-      multibinder.addBinding().toInstance(target);
-    }
-    return this;
-  }
-
-  /**
-   * Conditionally bind target to the set. If "condition" returns true, add a binding to "target".
-   *
-   * @param property  the property to inspect on
-   * @param condition the predicate used to verify whether to add a binding to "target"
-   * @param target    the target type to which it adds a binding.
-   *
-   * @return self to support a continuous syntax for adding more conditional bindings.
-   */
-  @PublicApi
-  public ConditionalMultibind<T> addConditionBinding(
-      String property,
-      Predicate<String> condition,
-      TypeLiteral<T> target
-  )
-  {
-    final String value = properties.getProperty(property);
-    if (value == null) {
-      return this;
-    }
-    if (condition.apply(value)) {
-      multibinder.addBinding().to(target);
-    }
-    return this;
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/DruidGuiceExtensions.java b/api/src/main/java/io/druid/guice/DruidGuiceExtensions.java
deleted file mode 100644
index a4f75b966ae..00000000000
--- a/api/src/main/java/io/druid/guice/DruidGuiceExtensions.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.inject.Binder;
-import com.google.inject.Module;
-import io.druid.guice.annotations.PublicApi;
-
-/**
- */
-@PublicApi
-public class DruidGuiceExtensions implements Module
-{
-  @Override
-  public void configure(Binder binder)
-  {
-    binder.bindScope(LazySingleton.class, DruidScopes.SINGLETON);
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/DruidScopes.java b/api/src/main/java/io/druid/guice/DruidScopes.java
deleted file mode 100644
index b815109208d..00000000000
--- a/api/src/main/java/io/druid/guice/DruidScopes.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.inject.Key;
-import com.google.inject.Provider;
-import com.google.inject.Scope;
-import com.google.inject.Scopes;
-import io.druid.guice.annotations.PublicApi;
-
-/**
- */
-@PublicApi
-public class DruidScopes
-{
-  public static final Scope SINGLETON = new Scope()
-  {
-    @Override
-    public <T> Provider<T> scope(Key<T> key, Provider<T> unscoped)
-    {
-      return Scopes.SINGLETON.scope(key, unscoped);
-    }
-
-    @Override
-    public String toString()
-    {
-      return "DruidScopes.SINGLETON";
-    }
-  };
-}
diff --git a/api/src/main/java/io/druid/guice/Jerseys.java b/api/src/main/java/io/druid/guice/Jerseys.java
deleted file mode 100644
index f6b7f138b75..00000000000
--- a/api/src/main/java/io/druid/guice/Jerseys.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.inject.Binder;
-import com.google.inject.TypeLiteral;
-import com.google.inject.multibindings.Multibinder;
-import io.druid.guice.annotations.JSR311Resource;
-import io.druid.guice.annotations.PublicApi;
-
-/**
- */
-@PublicApi
-public class Jerseys
-{
-  public static void addResource(Binder binder, Class<?> resourceClazz)
-  {
-    Multibinder.newSetBinder(binder, new TypeLiteral<Class<?>>(){}, JSR311Resource.class)
-               .addBinding()
-               .toInstance(resourceClazz);
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/JsonConfigProvider.java b/api/src/main/java/io/druid/guice/JsonConfigProvider.java
deleted file mode 100644
index b1b94e6cfd2..00000000000
--- a/api/src/main/java/io/druid/guice/JsonConfigProvider.java
+++ /dev/null
@@ -1,213 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.common.base.Supplier;
-import com.google.common.base.Suppliers;
-import com.google.inject.Binder;
-import com.google.inject.Inject;
-import com.google.inject.Key;
-import com.google.inject.Provider;
-import com.google.inject.util.Types;
-import io.druid.guice.annotations.PublicApi;
-
-import java.lang.annotation.Annotation;
-import java.lang.reflect.ParameterizedType;
-import java.util.Properties;
-
-
-/**
- * Provides a singleton value of type {@code <T>} from {@code Properties} bound in guice.
- * <br/>
- * <h3>Usage</h3>
- * To install this provider, bind it in your guice module, like below.
- *
- * <pre>
- * JsonConfigProvider.bind(binder, "druid.server", DruidServerConfig.class);
- * </pre>
- * <br/>
- * In the above case, {@code druid.server} should be a key found in the {@code Properties} bound elsewhere.
- * The value of that key should directly relate to the fields in {@code DruidServerConfig.class}.
- *
- * <h3>Implementation</h3>
- * <br/>
- * The state of {@code <T>} is defined by the value of the property {@code propertyBase}.
- * This value is a json structure, decoded via {@link JsonConfigurator#configurate(Properties, String, Class)}.
- * <br/>
- *
- * An example might be if DruidServerConfig.class were
- *
- * <pre>
- *   public class DruidServerConfig
- *   {
- *     @JsonProperty @NotNull public String hostname = null;
- *     @JsonProperty @Min(1025) public int port = 8080;
- *   }
- * </pre>
- *
- * And your Properties object had in it
- *
- * <pre>
- *   druid.server.hostname=0.0.0.0
- *   druid.server.port=3333
- * </pre>
- *
- * Then this would bind a singleton instance of a DruidServerConfig object with hostname = "0.0.0.0" and port = 3333.
- *
- * If the port weren't set in the properties, then the default of 8080 would be taken.  Essentially, it is the same as
- * subtracting the "druid.server" prefix from the properties and building a Map which is then passed into
- * ObjectMapper.convertValue()
- *
- * @param <T> type of config object to provide.
- */
-@PublicApi
-public class JsonConfigProvider<T> implements Provider<Supplier<T>>
-{
-  @SuppressWarnings("unchecked")
-  public static <T> void bind(Binder binder, String propertyBase, Class<T> classToProvide)
-  {
-    bind(
-        binder,
-        propertyBase,
-        classToProvide,
-        Key.get(classToProvide),
-        (Key) Key.get(Types.newParameterizedType(Supplier.class, classToProvide))
-    );
-  }
-
-  @SuppressWarnings("unchecked")
-  public static <T> void bind(Binder binder, String propertyBase, Class<T> classToProvide, Annotation annotation)
-  {
-    bind(
-        binder,
-        propertyBase,
-        classToProvide,
-        Key.get(classToProvide, annotation),
-        (Key) Key.get(Types.newParameterizedType(Supplier.class, classToProvide), annotation)
-    );
-  }
-
-  @SuppressWarnings("unchecked")
-  public static <T> void bind(
-      Binder binder,
-      String propertyBase,
-      Class<T> classToProvide,
-      Class<? extends Annotation> annotation
-  )
-  {
-    bind(
-        binder,
-        propertyBase,
-        classToProvide,
-        Key.get(classToProvide, annotation),
-        (Key) Key.get(Types.newParameterizedType(Supplier.class, classToProvide), annotation)
-    );
-  }
-
-  @SuppressWarnings("unchecked")
-  public static <T> void bind(
-      Binder binder,
-      String propertyBase,
-      Class<T> clazz,
-      Key<T> instanceKey,
-      Key<Supplier<T>> supplierKey
-  )
-  {
-    binder.bind(supplierKey).toProvider(of(propertyBase, clazz)).in(LazySingleton.class);
-    binder.bind(instanceKey).toProvider(new SupplierProvider<>(supplierKey));
-  }
-
-  @SuppressWarnings("unchecked")
-  public static <T> void bindInstance(
-      Binder binder,
-      Key<T> bindKey,
-      T instance
-  )
-  {
-    binder.bind(bindKey).toInstance(instance);
-
-    final ParameterizedType supType = Types.newParameterizedType(Supplier.class, bindKey.getTypeLiteral().getType());
-    final Key supplierKey;
-
-    if (bindKey.getAnnotationType() != null) {
-      supplierKey = Key.get(supType, bindKey.getAnnotationType());
-    } else if (bindKey.getAnnotation() != null) {
-      supplierKey = Key.get(supType, bindKey.getAnnotation());
-    } else {
-      supplierKey = Key.get(supType);
-    }
-
-    binder.bind(supplierKey).toInstance(Suppliers.ofInstance(instance));
-  }
-
-  public static <T> JsonConfigProvider<T> of(String propertyBase, Class<T> classToProvide)
-  {
-    return new JsonConfigProvider<T>(propertyBase, classToProvide);
-  }
-
-  private final String propertyBase;
-  private final Class<T> classToProvide;
-
-  private Properties props;
-  private JsonConfigurator configurator;
-
-  private Supplier<T> retVal = null;
-
-  public JsonConfigProvider(
-      String propertyBase,
-      Class<T> classToProvide
-  )
-  {
-    this.propertyBase = propertyBase;
-    this.classToProvide = classToProvide;
-  }
-
-  @Inject
-  public void inject(
-      Properties props,
-      JsonConfigurator configurator
-  )
-  {
-    this.props = props;
-    this.configurator = configurator;
-  }
-
-  @Override
-  public Supplier<T> get()
-  {
-    if (retVal != null) {
-      return retVal;
-    }
-
-    try {
-      final T config = configurator.configurate(props, propertyBase, classToProvide);
-      retVal = Suppliers.ofInstance(config);
-    }
-    catch (RuntimeException e) {
-      // When a runtime exception gets thrown out, this provider will get called again if the object is asked for again.
-      // This will have the same failed result, 'cause when it's called no parameters will have actually changed.
-      // Guice will then report the same error multiple times, which is pretty annoying. Cache a null supplier and
-      // return that instead.  This is technically enforcing a singleton, but such is life.
-      retVal = Suppliers.ofInstance(null);
-      throw e;
-    }
-    return retVal;
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/JsonConfigurator.java b/api/src/main/java/io/druid/guice/JsonConfigurator.java
deleted file mode 100644
index 845f274eaa6..00000000000
--- a/api/src/main/java/io/druid/guice/JsonConfigurator.java
+++ /dev/null
@@ -1,226 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.fasterxml.jackson.annotation.JacksonInject;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.introspect.AnnotatedField;
-import com.fasterxml.jackson.databind.introspect.BeanPropertyDefinition;
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Function;
-import com.google.common.base.Strings;
-import com.google.common.base.Throwables;
-import com.google.common.collect.Iterables;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.inject.Inject;
-import com.google.inject.ProvisionException;
-import com.google.inject.spi.Message;
-import io.druid.java.util.common.StringUtils;
-import io.druid.java.util.common.logger.Logger;
-
-import javax.validation.ConstraintViolation;
-import javax.validation.ElementKind;
-import javax.validation.Path;
-import javax.validation.Validator;
-import java.io.IOException;
-import java.lang.reflect.Field;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-
-/**
- */
-public class JsonConfigurator
-{
-  private static final Logger log = new Logger(JsonConfigurator.class);
-
-  private final ObjectMapper jsonMapper;
-  private final Validator validator;
-
-  @Inject
-  public JsonConfigurator(
-      ObjectMapper jsonMapper,
-      Validator validator
-  )
-  {
-    this.jsonMapper = jsonMapper;
-    this.validator = validator;
-  }
-
-  public <T> T configurate(Properties props, String propertyPrefix, Class<T> clazz) throws ProvisionException
-  {
-    verifyClazzIsConfigurable(jsonMapper, clazz);
-
-    // Make it end with a period so we only include properties with sub-object thingies.
-    final String propertyBase = propertyPrefix.endsWith(".") ? propertyPrefix : propertyPrefix + ".";
-
-    Map<String, Object> jsonMap = Maps.newHashMap();
-    for (String prop : props.stringPropertyNames()) {
-      if (prop.startsWith(propertyBase)) {
-        final String propValue = props.getProperty(prop);
-        Object value;
-        try {
-          // If it's a String Jackson wants it to be quoted, so check if it's not an object or array and quote.
-          String modifiedPropValue = propValue;
-          if (!(modifiedPropValue.startsWith("[") || modifiedPropValue.startsWith("{"))) {
-            modifiedPropValue = jsonMapper.writeValueAsString(propValue);
-          }
-          value = jsonMapper.readValue(modifiedPropValue, Object.class);
-        }
-        catch (IOException e) {
-          log.info(e, "Unable to parse [%s]=[%s] as a json object, using as is.", prop, propValue);
-          value = propValue;
-        }
-        hieraricalPutValue(propertyPrefix, prop, prop.substring(propertyBase.length()), value, jsonMap);
-      }
-    }
-
-    final T config;
-    try {
-      config = jsonMapper.convertValue(jsonMap, clazz);
-    }
-    catch (IllegalArgumentException e) {
-      throw new ProvisionException(
-          StringUtils.format("Problem parsing object at prefix[%s]: %s.", propertyPrefix, e.getMessage()), e
-      );
-    }
-
-    final Set<ConstraintViolation<T>> violations = validator.validate(config);
-    if (!violations.isEmpty()) {
-      List<String> messages = Lists.newArrayList();
-
-      for (ConstraintViolation<T> violation : violations) {
-        StringBuilder path = new StringBuilder();
-        try {
-          Class<?> beanClazz = violation.getRootBeanClass();
-          final Iterator<Path.Node> iter = violation.getPropertyPath().iterator();
-          while (iter.hasNext()) {
-            Path.Node next = iter.next();
-            if (next.getKind() == ElementKind.PROPERTY) {
-              final String fieldName = next.getName();
-              final Field theField = beanClazz.getDeclaredField(fieldName);
-
-              if (theField.getAnnotation(JacksonInject.class) != null) {
-                path = new StringBuilder(StringUtils.format(" -- Injected field[%s] not bound!?", fieldName));
-                break;
-              }
-
-              JsonProperty annotation = theField.getAnnotation(JsonProperty.class);
-              final boolean noAnnotationValue = annotation == null || Strings.isNullOrEmpty(annotation.value());
-              final String pathPart = noAnnotationValue ? fieldName : annotation.value();
-              if (path.length() == 0) {
-                path.append(pathPart);
-              } else {
-                path.append(".").append(pathPart);
-              }
-            }
-          }
-        }
-        catch (NoSuchFieldException e) {
-          throw Throwables.propagate(e);
-        }
-
-        messages.add(StringUtils.format("%s - %s", path.toString(), violation.getMessage()));
-      }
-
-      throw new ProvisionException(
-          Iterables.transform(
-              messages,
-              new Function<String, Message>()
-              {
-                @Override
-                public Message apply(String input)
-                {
-                  return new Message(StringUtils.format("%s%s", propertyBase, input));
-                }
-              }
-          )
-      );
-    }
-
-    log.info("Loaded class[%s] from props[%s] as [%s]", clazz, propertyBase, config);
-
-    return config;
-  }
-
-  private static void hieraricalPutValue(
-      String propertyPrefix,
-      String originalProperty,
-      String property,
-      Object value,
-      Map<String, Object> targetMap
-  )
-  {
-    int dotIndex = property.indexOf('.');
-    // Always put property with name even if it is of form a.b. This will make sure the property is available for classes
-    // where JsonProperty names are of the form a.b
-    // Note:- this will cause more than required properties to be present in the jsonMap.
-    targetMap.put(property, value);
-    if (dotIndex < 0) {
-      return;
-    }
-    if (dotIndex == 0) {
-      throw new ProvisionException(StringUtils.format("Double dot in property: %s", originalProperty));
-    }
-    if (dotIndex == property.length() - 1) {
-      throw new ProvisionException(StringUtils.format("Dot at the end of property: %s", originalProperty));
-    }
-    String nestedKey = property.substring(0, dotIndex);
-    Object nested = targetMap.computeIfAbsent(nestedKey, k -> new HashMap<String, Object>());
-    if (!(nested instanceof Map)) {
-      // Clash is possible between properties, which are used to configure different objects: e. g.
-      // druid.emitter=parametrized is used to configure Emitter class, and druid.emitter.parametrized.xxx=yyy is used
-      // to configure ParametrizedUriEmitterConfig object. So skipping xxx=yyy key-value pair when configuring Emitter
-      // doesn't make any difference. That is why we just log this situation, instead of throwing an exception.
-      log.info(
-          "Skipping %s property: one of it's prefixes is also used as a property key. Prefix: %s",
-          originalProperty,
-          propertyPrefix
-      );
-      return;
-    }
-    Map<String, Object> nestedMap = (Map<String, Object>) nested;
-    hieraricalPutValue(propertyPrefix, originalProperty, property.substring(dotIndex + 1), value, nestedMap);
-  }
-
-  @VisibleForTesting
-  public static <T> void verifyClazzIsConfigurable(ObjectMapper mapper, Class<T> clazz)
-  {
-    final List<BeanPropertyDefinition> beanDefs = mapper.getSerializationConfig()
-                                                        .introspect(mapper.constructType(clazz))
-                                                        .findProperties();
-    for (BeanPropertyDefinition beanDef : beanDefs) {
-      final AnnotatedField field = beanDef.getField();
-      if (field == null || !field.hasAnnotation(JsonProperty.class)) {
-        throw new ProvisionException(
-            StringUtils.format(
-                "JsonConfigurator requires Jackson-annotated Config objects to have field annotations. %s doesn't",
-                clazz
-            )
-        );
-      }
-    }
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/KeyHolder.java b/api/src/main/java/io/druid/guice/KeyHolder.java
deleted file mode 100644
index 5f9a328bef1..00000000000
--- a/api/src/main/java/io/druid/guice/KeyHolder.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.inject.Key;
-
-/**
- */
-public class KeyHolder<T>
-{
-  private final Key<? extends T> key;
-
-  public KeyHolder(
-      Key<? extends T> key
-  )
-  {
-    this.key = key;
-  }
-
-  public Key<? extends T> getKey()
-  {
-    return key;
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/LazySingleton.java b/api/src/main/java/io/druid/guice/LazySingleton.java
deleted file mode 100644
index 11a86a12847..00000000000
--- a/api/src/main/java/io/druid/guice/LazySingleton.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.inject.ScopeAnnotation;
-import io.druid.guice.annotations.PublicApi;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- */
-@Target({ElementType.TYPE, ElementType.METHOD})
-@Retention(RetentionPolicy.RUNTIME)
-@ScopeAnnotation
-@PublicApi
-public @interface LazySingleton
-{
-}
diff --git a/api/src/main/java/io/druid/guice/LifecycleModule.java b/api/src/main/java/io/druid/guice/LifecycleModule.java
deleted file mode 100644
index a16f9e0eab8..00000000000
--- a/api/src/main/java/io/druid/guice/LifecycleModule.java
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.inject.Binder;
-import com.google.inject.Injector;
-import com.google.inject.Key;
-import com.google.inject.Module;
-import com.google.inject.Provides;
-import com.google.inject.TypeLiteral;
-import com.google.inject.multibindings.Multibinder;
-import com.google.inject.name.Names;
-import io.druid.java.util.common.lifecycle.Lifecycle;
-
-import java.lang.annotation.Annotation;
-import java.util.Set;
-
-/**
- * A Module to add lifecycle management to the injector.  {@link DruidGuiceExtensions} must also be included.
- */
-public class LifecycleModule implements Module
-{
-  private final LifecycleScope scope = new LifecycleScope(Lifecycle.Stage.NORMAL);
-  private final LifecycleScope lastScope = new LifecycleScope(Lifecycle.Stage.LAST);
-
-  /**
-   * Registers a class to instantiate eagerly.  Classes mentioned here will be pulled out of
-   * the injector with an injector.getInstance() call when the lifecycle is created.
-   *
-   * Eagerly loaded classes will *not* be automatically added to the Lifecycle unless they are bound to the proper
-   * scope.  That is, they are generally eagerly loaded because the loading operation will produce some beneficial
-   * side-effect even if nothing actually directly depends on the instance.
-   *
-   * This mechanism exists to allow the {@link Lifecycle} to be the primary entry point from the injector, not to
-   * auto-register things with the {@link Lifecycle}.  It is also possible to just bind things eagerly with Guice,
-   * it is not clear which is actually the best approach.  This is more explicit, but eager bindings inside of modules
-   * is less error-prone.
-   *
-   * @param clazz, the class to instantiate
-   * @return this, for chaining.
-   */
-  public static void register(Binder binder, Class<?> clazz)
-  {
-    registerKey(binder, Key.get(clazz));
-  }
-
-  /**
-   * Registers a class/annotation combination to instantiate eagerly.  Classes mentioned here will be pulled out of
-   * the injector with an injector.getInstance() call when the lifecycle is created.
-   *
-   * Eagerly loaded classes will *not* be automatically added to the Lifecycle unless they are bound to the proper
-   * scope.  That is, they are generally eagerly loaded because the loading operation will produce some beneficial
-   * side-effect even if nothing actually directly depends on the instance.
-   *
-   * This mechanism exists to allow the {@link Lifecycle} to be the primary entry point from the injector, not to
-   * auto-register things with the {@link Lifecycle}.  It is also possible to just bind things eagerly with Guice,
-   * it is not clear which is actually the best approach.  This is more explicit, but eager bindings inside of modules
-   * is less error-prone.
-   *
-   * @param clazz, the class to instantiate
-   * @param annotation The annotation class to register with Guice
-   * @return this, for chaining
-   */
-  public static void register(Binder binder, Class<?> clazz, Class<? extends Annotation> annotation)
-  {
-    registerKey(binder, Key.get(clazz, annotation));
-  }
-
-  /**
-   * Registers a key to instantiate eagerly.  {@link Key}s mentioned here will be pulled out of
-   * the injector with an injector.getInstance() call when the lifecycle is created.
-   *
-   * Eagerly loaded classes will *not* be automatically added to the Lifecycle unless they are bound to the proper
-   * scope.  That is, they are generally eagerly loaded because the loading operation will produce some beneficial
-   * side-effect even if nothing actually directly depends on the instance.
-   *
-   * This mechanism exists to allow the {@link Lifecycle} to be the primary entry point
-   * from the injector, not to auto-register things with the {@link Lifecycle}.  It is
-   * also possible to just bind things eagerly with Guice, it is not clear which is actually the best approach.
-   * This is more explicit, but eager bindings inside of modules is less error-prone.
-   *
-   * @param key The key to use in finding the DruidNode instance
-   */
-  public static void registerKey(Binder binder, Key<?> key)
-  {
-    getEagerBinder(binder).addBinding().toInstance(new KeyHolder<Object>(key));
-  }
-
-  private static Multibinder<KeyHolder> getEagerBinder(Binder binder)
-  {
-    return Multibinder.newSetBinder(binder, KeyHolder.class, Names.named("lifecycle"));
-  }
-
-  @Override
-  public void configure(Binder binder)
-  {
-    getEagerBinder(binder); // Load up the eager binder so that it will inject the empty set at a minimum.
-
-    binder.bindScope(ManageLifecycle.class, scope);
-    binder.bindScope(ManageLifecycleLast.class, lastScope);
-  }
-
-  @Provides @LazySingleton
-  public Lifecycle getLifecycle(final Injector injector)
-  {
-    final Key<Set<KeyHolder>> keyHolderKey = Key.get(new TypeLiteral<Set<KeyHolder>>(){}, Names.named("lifecycle"));
-    final Set<KeyHolder> eagerClasses = injector.getInstance(keyHolderKey);
-
-    Lifecycle lifecycle = new Lifecycle()
-    {
-      @Override
-      public void start() throws Exception
-      {
-        for (KeyHolder<?> holder : eagerClasses) {
-          injector.getInstance(holder.getKey()); // Pull the key so as to "eagerly" load up the class.
-        }
-        super.start();
-      }
-    };
-    scope.setLifecycle(lifecycle);
-    lastScope.setLifecycle(lifecycle);
-
-    return lifecycle;
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/LifecycleScope.java b/api/src/main/java/io/druid/guice/LifecycleScope.java
deleted file mode 100644
index f4d6d0140d0..00000000000
--- a/api/src/main/java/io/druid/guice/LifecycleScope.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.common.collect.Lists;
-import com.google.inject.Key;
-import com.google.inject.Provider;
-import com.google.inject.Scope;
-import io.druid.java.util.common.lifecycle.Lifecycle;
-import io.druid.java.util.common.logger.Logger;
-
-import java.util.List;
-
-/**
- * A scope that adds objects to the Lifecycle.  This is by definition also a lazy singleton scope.
- */
-public class LifecycleScope implements Scope
-{
-  private static final Logger log = new Logger(LifecycleScope.class);
-  private final Lifecycle.Stage stage;
-
-  private Lifecycle lifecycle;
-  private final List<Object> instances = Lists.newLinkedList();
-
-  public LifecycleScope(Lifecycle.Stage stage)
-  {
-    this.stage = stage;
-  }
-
-  public void setLifecycle(Lifecycle lifecycle)
-  {
-    synchronized (instances) {
-      this.lifecycle = lifecycle;
-      for (Object instance : instances) {
-        lifecycle.addManagedInstance(instance, stage);
-      }
-    }
-  }
-
-  @Override
-  public <T> Provider<T> scope(final Key<T> key, final Provider<T> unscoped)
-  {
-    return new Provider<T>()
-    {
-      private volatile T value = null;
-
-      @Override
-      public synchronized T get()
-      {
-        if (value == null) {
-          final T retVal = unscoped.get();
-
-          synchronized (instances) {
-            if (lifecycle == null) {
-              instances.add(retVal);
-            } else {
-              try {
-                lifecycle.addMaybeStartManagedInstance(retVal, stage);
-              }
-              catch (Exception e) {
-                log.warn(e, "Caught exception when trying to create a[%s]", key);
-                return null;
-              }
-            }
-          }
-
-          value = retVal;
-        }
-
-        return value;
-      }
-    };
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/ManageLifecycle.java b/api/src/main/java/io/druid/guice/ManageLifecycle.java
deleted file mode 100644
index 67536e16f9a..00000000000
--- a/api/src/main/java/io/druid/guice/ManageLifecycle.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.inject.ScopeAnnotation;
-import io.druid.guice.annotations.PublicApi;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- * Marks the object to be managed by {@link io.druid.java.util.common.lifecycle.Lifecycle}
- *
- * This Scope gets defined by {@link LifecycleModule}
- */
-@Target({ ElementType.TYPE, ElementType.METHOD })
-@Retention(RetentionPolicy.RUNTIME)
-@ScopeAnnotation
-@PublicApi
-public @interface ManageLifecycle
-{
-}
diff --git a/api/src/main/java/io/druid/guice/ManageLifecycleLast.java b/api/src/main/java/io/druid/guice/ManageLifecycleLast.java
deleted file mode 100644
index 2857f439173..00000000000
--- a/api/src/main/java/io/druid/guice/ManageLifecycleLast.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.inject.ScopeAnnotation;
-import io.druid.guice.annotations.PublicApi;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- * Marks the object to be managed by {@link io.druid.java.util.common.lifecycle.Lifecycle} and set to be on Stage.LAST
- *
- * This Scope gets defined by {@link LifecycleModule}
- */
-@Target({ ElementType.TYPE, ElementType.METHOD })
-@Retention(RetentionPolicy.RUNTIME)
-@ScopeAnnotation
-@PublicApi
-public @interface ManageLifecycleLast
-{
-}
diff --git a/api/src/main/java/io/druid/guice/PolyBind.java b/api/src/main/java/io/druid/guice/PolyBind.java
deleted file mode 100644
index 41c265a66bf..00000000000
--- a/api/src/main/java/io/druid/guice/PolyBind.java
+++ /dev/null
@@ -1,207 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.common.base.Preconditions;
-import com.google.inject.Binder;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
-import com.google.inject.Key;
-import com.google.inject.Provider;
-import com.google.inject.ProvisionException;
-import com.google.inject.TypeLiteral;
-import com.google.inject.binder.ScopedBindingBuilder;
-import com.google.inject.multibindings.MapBinder;
-import com.google.inject.util.Types;
-import io.druid.guice.annotations.PublicApi;
-import io.druid.java.util.common.StringUtils;
-
-import javax.annotation.Nullable;
-import java.lang.reflect.ParameterizedType;
-import java.util.Map;
-import java.util.Properties;
-
-/**
- * Provides the ability to create "polymorphic" bindings.  Where the polymorphism is actually just making a decision
- * based on a value in a Properties.
- *
- * The workflow is that you first create a choice by calling createChoice().  Then you create options using the binder
- * returned by the optionBinder() method.  Multiple different modules can call optionBinder and all options will be
- * reflected at injection time as long as equivalent interface Key objects are passed into the various methods.
- */
-@PublicApi
-public class PolyBind
-{
-  /**
-   * Sets up a "choice" for the injector to resolve at injection time.
-   *
-   * @param binder the binder for the injector that is being configured
-   * @param property the property that will be checked to determine the implementation choice
-   * @param interfaceKey the interface that will be injected using this choice
-   * @param defaultKey the default instance to be injected if the property doesn't match a choice.  Can be null
-   * @param <T> interface type
-   * @return A ScopedBindingBuilder so that scopes can be added to the binding, if required.
-   */
-  public static <T> ScopedBindingBuilder createChoice(
-      Binder binder,
-      String property,
-      Key<T> interfaceKey,
-      @Nullable Key<? extends T> defaultKey
-  )
-  {
-    ConfiggedProvider<T> provider = new ConfiggedProvider<>(interfaceKey, property, defaultKey, null);
-    return binder.bind(interfaceKey).toProvider(provider);
-  }
-
-  /**
-   * @deprecated use {@link #createChoiceWithDefault(Binder, String, Key, String)}
-   * instead. {@code defaultKey} argument is ignored.
-   */
-  @Deprecated
-  public static <T> ScopedBindingBuilder createChoiceWithDefault(
-      Binder binder,
-      String property,
-      Key<T> interfaceKey,
-      Key<? extends T> defaultKey,
-      String defaultPropertyValue
-  )
-  {
-    return createChoiceWithDefault(binder, property, interfaceKey, defaultPropertyValue);
-  }
-
-  /**
-   * Sets up a "choice" for the injector to resolve at injection time.
-   *
-   * @param binder the binder for the injector that is being configured
-   * @param property the property that will be checked to determine the implementation choice
-   * @param interfaceKey the interface that will be injected using this choice
-   * @param defaultPropertyValue the default property value to use if the property is not set.
-   * @param <T> interface type
-   * @return A ScopedBindingBuilder so that scopes can be added to the binding, if required.
-   */
-  public static <T> ScopedBindingBuilder createChoiceWithDefault(
-      Binder binder,
-      String property,
-      Key<T> interfaceKey,
-      String defaultPropertyValue
-  )
-  {
-    Preconditions.checkNotNull(defaultPropertyValue);
-    ConfiggedProvider<T> provider = new ConfiggedProvider<>(interfaceKey, property, null, defaultPropertyValue);
-    return binder.bind(interfaceKey).toProvider(provider);
-  }
-
-  /**
-   * Binds an option for a specific choice.  The choice must already be registered on the injector for this to work.
-   *
-   * @param binder the binder for the injector that is being configured
-   * @param interfaceKey the interface that will have an option added to it.  This must equal the
-   *                     Key provided to createChoice
-   * @param <T> interface type
-   * @return A MapBinder that can be used to create the actual option bindings.
-   */
-  public static <T> MapBinder<String, T> optionBinder(Binder binder, Key<T> interfaceKey)
-  {
-    final TypeLiteral<T> interfaceType = interfaceKey.getTypeLiteral();
-
-    if (interfaceKey.getAnnotation() != null) {
-      return MapBinder.newMapBinder(
-          binder, TypeLiteral.get(String.class), interfaceType, interfaceKey.getAnnotation()
-      );
-    } else if (interfaceKey.getAnnotationType() != null) {
-      return MapBinder.newMapBinder(
-          binder, TypeLiteral.get(String.class), interfaceType, interfaceKey.getAnnotationType()
-      );
-    } else {
-      return MapBinder.newMapBinder(binder, TypeLiteral.get(String.class), interfaceType);
-    }
-  }
-
-  static class ConfiggedProvider<T> implements Provider<T>
-  {
-    private final Key<T> key;
-    private final String property;
-    @Nullable
-    private final Key<? extends T> defaultKey;
-    @Nullable
-    private final String defaultPropertyValue;
-
-    private Injector injector;
-    private Properties props;
-
-    ConfiggedProvider(
-        Key<T> key,
-        String property,
-        @Nullable Key<? extends T> defaultKey,
-        @Nullable String defaultPropertyValue
-    )
-    {
-      this.key = key;
-      this.property = property;
-      this.defaultKey = defaultKey;
-      this.defaultPropertyValue = defaultPropertyValue;
-    }
-
-    @Inject
-    void configure(Injector injector, Properties props)
-    {
-      this.injector = injector;
-      this.props = props;
-    }
-
-    @Override
-    @SuppressWarnings("unchecked")
-    public T get()
-    {
-      final ParameterizedType mapType = Types.mapOf(
-          String.class, Types.newParameterizedType(Provider.class, key.getTypeLiteral().getType())
-      );
-
-      final Map<String, Provider<T>> implsMap;
-      if (key.getAnnotation() != null) {
-        implsMap = (Map<String, Provider<T>>) injector.getInstance(Key.get(mapType, key.getAnnotation()));
-      } else if (key.getAnnotationType() != null) {
-        implsMap = (Map<String, Provider<T>>) injector.getInstance(Key.get(mapType, key.getAnnotation()));
-      } else {
-        implsMap = (Map<String, Provider<T>>) injector.getInstance(Key.get(mapType));
-      }
-
-      String implName = props.getProperty(property);
-      if (implName == null) {
-        if (defaultPropertyValue == null) {
-          if (defaultKey == null) {
-            throw new ProvisionException(StringUtils.format("Some value must be configured for [%s]", key));
-          }
-          return injector.getInstance(defaultKey);
-        }
-        implName = defaultPropertyValue;
-      }
-      final Provider<T> provider = implsMap.get(implName);
-
-      if (provider == null) {
-        throw new ProvisionException(
-            StringUtils.format("Unknown provider[%s] of %s, known options[%s]", implName, key, implsMap.keySet())
-        );
-      }
-
-      return provider.get();
-    }
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/SupplierProvider.java b/api/src/main/java/io/druid/guice/SupplierProvider.java
deleted file mode 100644
index 205d1b1516d..00000000000
--- a/api/src/main/java/io/druid/guice/SupplierProvider.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.common.base.Supplier;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
-import com.google.inject.Key;
-import com.google.inject.Provider;
-
-/**
- */
-public class SupplierProvider<T> implements Provider<T>
-{
-  private final Key<Supplier<T>> supplierKey;
-
-  private Provider<Supplier<T>> supplierProvider;
-
-  public SupplierProvider(
-      Key<Supplier<T>> supplierKey
-  )
-  {
-    this.supplierKey = supplierKey;
-  }
-
-  @Inject
-  public void configure(Injector injector)
-  {
-    this.supplierProvider = injector.getProvider(supplierKey);
-  }
-
-  @Override
-  public T get()
-  {
-    return supplierProvider.get().get();
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/annotations/EscalatedGlobal.java b/api/src/main/java/io/druid/guice/annotations/EscalatedGlobal.java
deleted file mode 100644
index ffca49b35f4..00000000000
--- a/api/src/main/java/io/druid/guice/annotations/EscalatedGlobal.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice.annotations;
-
-import com.google.inject.BindingAnnotation;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- */
-@BindingAnnotation
-@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
-@Retention(RetentionPolicy.RUNTIME)
-@PublicApi
-public @interface EscalatedGlobal
-{
-}
diff --git a/api/src/main/java/io/druid/guice/annotations/Global.java b/api/src/main/java/io/druid/guice/annotations/Global.java
deleted file mode 100644
index b81e737a01e..00000000000
--- a/api/src/main/java/io/druid/guice/annotations/Global.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice.annotations;
-
-import com.google.inject.BindingAnnotation;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- */
-@BindingAnnotation
-@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
-@Retention(RetentionPolicy.RUNTIME)
-@PublicApi
-public @interface Global
-{
-}
diff --git a/api/src/main/java/io/druid/guice/annotations/JSR311Resource.java b/api/src/main/java/io/druid/guice/annotations/JSR311Resource.java
deleted file mode 100644
index d7631816f41..00000000000
--- a/api/src/main/java/io/druid/guice/annotations/JSR311Resource.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice.annotations;
-
-import com.google.inject.BindingAnnotation;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- */
-@BindingAnnotation
-@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
-@Retention(RetentionPolicy.RUNTIME)
-@PublicApi
-public @interface JSR311Resource
-{
-}
diff --git a/api/src/main/java/io/druid/guice/annotations/Json.java b/api/src/main/java/io/druid/guice/annotations/Json.java
deleted file mode 100644
index 378819ff4e1..00000000000
--- a/api/src/main/java/io/druid/guice/annotations/Json.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice.annotations;
-
-import com.google.inject.BindingAnnotation;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- */
-@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
-@Retention(RetentionPolicy.RUNTIME)
-@BindingAnnotation
-@PublicApi
-public @interface Json
-{
-}
diff --git a/api/src/main/java/io/druid/guice/annotations/Self.java b/api/src/main/java/io/druid/guice/annotations/Self.java
deleted file mode 100644
index f26b2580951..00000000000
--- a/api/src/main/java/io/druid/guice/annotations/Self.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice.annotations;
-
-import com.google.inject.BindingAnnotation;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- */
-@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
-@Retention(RetentionPolicy.RUNTIME)
-@BindingAnnotation
-@PublicApi
-public @interface Self
-{
-}
diff --git a/api/src/main/java/io/druid/guice/annotations/Smile.java b/api/src/main/java/io/druid/guice/annotations/Smile.java
deleted file mode 100644
index 4dc02b1f838..00000000000
--- a/api/src/main/java/io/druid/guice/annotations/Smile.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice.annotations;
-
-import com.google.inject.BindingAnnotation;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- */
-@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
-@Retention(RetentionPolicy.RUNTIME)
-@BindingAnnotation
-@PublicApi
-public @interface Smile
-{
-}
diff --git a/api/src/main/java/io/druid/indexer/IngestionState.java b/api/src/main/java/io/druid/indexer/IngestionState.java
deleted file mode 100644
index 1c78c4e5afe..00000000000
--- a/api/src/main/java/io/druid/indexer/IngestionState.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.indexer;
-
-public enum IngestionState
-{
-  NOT_STARTED,
-  DETERMINE_PARTITIONS,
-  BUILD_SEGMENTS,
-  COMPLETED
-}
diff --git a/api/src/main/java/io/druid/indexer/RunnerTaskState.java b/api/src/main/java/io/druid/indexer/RunnerTaskState.java
deleted file mode 100644
index 11efe7daf38..00000000000
--- a/api/src/main/java/io/druid/indexer/RunnerTaskState.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package io.druid.indexer;
-/**
- * This includes the state of a task in the task runner not covered by {@link TaskState}, this state is not stored in database
- */
-public enum RunnerTaskState
-{
-  WAITING,
-  PENDING,
-  RUNNING,
-  NONE // is used for a completed task
-}
diff --git a/api/src/main/java/io/druid/indexer/TaskInfo.java b/api/src/main/java/io/druid/indexer/TaskInfo.java
deleted file mode 100644
index 32b27a245b2..00000000000
--- a/api/src/main/java/io/druid/indexer/TaskInfo.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package io.druid.indexer;
-
-import com.google.common.base.Preconditions;
-import org.joda.time.DateTime;
-
-import javax.annotation.Nullable;
-
-/**
- * This class is used to store task info from runner query and cache in OverlordResource
- */
-public class TaskInfo<EntryType, StatusType>
-{
-  private final String id;
-  private final DateTime createdTime;
-  private final StatusType status;
-  private final String dataSource;
-  @Nullable
-  private final EntryType task;
-
-  public TaskInfo(
-      String id,
-      DateTime createdTime,
-      StatusType status,
-      String dataSource,
-      @Nullable EntryType task
-  )
-  {
-    this.id = Preconditions.checkNotNull(id, "id");
-    this.createdTime = Preconditions.checkNotNull(createdTime, "createdTime");
-    this.status = Preconditions.checkNotNull(status, "status");
-    this.dataSource = Preconditions.checkNotNull(dataSource, "dataSource");
-    this.task = task;
-  }
-
-  public String getId()
-  {
-    return id;
-  }
-
-  public DateTime getCreatedTime()
-  {
-    return createdTime;
-  }
-
-  public StatusType getStatus()
-  {
-    return status;
-  }
-
-  public String getDataSource()
-  {
-    return dataSource;
-  }
-
-  @Nullable
-  public EntryType getTask()
-  {
-    return task;
-  }
-}
-
diff --git a/api/src/main/java/io/druid/indexer/TaskLocation.java b/api/src/main/java/io/druid/indexer/TaskLocation.java
deleted file mode 100644
index 7cbb02b391b..00000000000
--- a/api/src/main/java/io/druid/indexer/TaskLocation.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.indexer;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-import java.util.Objects;
-
-public class TaskLocation
-{
-  private static final TaskLocation UNKNOWN = new TaskLocation(null, -1, -1);
-
-  private final String host;
-  private final int port;
-  private final int tlsPort;
-
-  public static TaskLocation create(String host, int port, int tlsPort)
-  {
-    return new TaskLocation(host, port, tlsPort);
-  }
-
-  public static TaskLocation unknown()
-  {
-    return TaskLocation.UNKNOWN;
-  }
-
-  @JsonCreator
-  public TaskLocation(
-      @JsonProperty("host") String host,
-      @JsonProperty("port") int port,
-      @JsonProperty("tlsPort") int tlsPort
-  )
-  {
-    this.host = host;
-    this.port = port;
-    this.tlsPort = tlsPort;
-  }
-
-  @JsonProperty
-  public String getHost()
-  {
-    return host;
-  }
-
-  @JsonProperty
-  public int getPort()
-  {
-    return port;
-  }
-
-  @JsonProperty
-  public int getTlsPort()
-  {
-    return tlsPort;
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    TaskLocation that = (TaskLocation) o;
-
-    return port == that.port && tlsPort == that.tlsPort &&
-           Objects.equals(host, that.host);
-  }
-
-  @Override
-  public int hashCode()
-  {
-    int result = host.hashCode();
-    result = 31 * result + port;
-    result = 31 * result + tlsPort;
-    return result;
-  }
-
-  @Override
-  public String toString()
-  {
-    return "TaskLocation{" +
-           "host='" + host + '\'' +
-           ", port=" + port +
-           ", tlsPort=" + tlsPort +
-           '}';
-  }
-}
diff --git a/api/src/main/java/io/druid/indexer/TaskMetricsGetter.java b/api/src/main/java/io/druid/indexer/TaskMetricsGetter.java
deleted file mode 100644
index ba1469a1494..00000000000
--- a/api/src/main/java/io/druid/indexer/TaskMetricsGetter.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.indexer;
-
-import java.util.List;
-import java.util.Map;
-
-public interface TaskMetricsGetter
-{
-  List<String> getKeys();
-  Map<String, Number> getTotalMetrics();
-}
diff --git a/api/src/main/java/io/druid/indexer/TaskMetricsUtils.java b/api/src/main/java/io/druid/indexer/TaskMetricsUtils.java
deleted file mode 100644
index a4203404fe4..00000000000
--- a/api/src/main/java/io/druid/indexer/TaskMetricsUtils.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.indexer;
-
-import com.google.common.collect.Maps;
-
-import java.util.Map;
-
-public class TaskMetricsUtils
-{
-  public static final String ROWS_PROCESSED = "rowsProcessed";
-  public static final String ROWS_PROCESSED_WITH_ERRORS = "rowsProcessedWithErrors";
-  public static final String ROWS_UNPARSEABLE = "rowsUnparseable";
-  public static final String ROWS_THROWN_AWAY = "rowsThrownAway";
-
-  public static Map<String, Object> makeIngestionRowMetrics(
-      long rowsProcessed,
-      long rowsProcessedWithErrors,
-      long rowsUnparseable,
-      long rowsThrownAway
-  )
-  {
-    Map<String, Object> metricsMap = Maps.newHashMap();
-    metricsMap.put(ROWS_PROCESSED, rowsProcessed);
-    metricsMap.put(ROWS_PROCESSED_WITH_ERRORS, rowsProcessedWithErrors);
-    metricsMap.put(ROWS_UNPARSEABLE, rowsUnparseable);
-    metricsMap.put(ROWS_THROWN_AWAY, rowsThrownAway);
-    return metricsMap;
-  }
-}
diff --git a/api/src/main/java/io/druid/indexer/TaskState.java b/api/src/main/java/io/druid/indexer/TaskState.java
deleted file mode 100644
index b6c96389727..00000000000
--- a/api/src/main/java/io/druid/indexer/TaskState.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.indexer;
-
-public enum TaskState
-{
-  RUNNING,
-  SUCCESS,
-  FAILED;
-
-  public boolean isRunnable()
-  {
-    return this == RUNNING;
-  }
-
-  public boolean isComplete()
-  {
-    return this != RUNNING;
-  }
-
-  public boolean isSuccess()
-  {
-    return this == SUCCESS;
-  }
-
-  public boolean isFailure()
-  {
-    return this == FAILED;
-  }
-}
diff --git a/api/src/main/java/io/druid/indexer/TaskStatus.java b/api/src/main/java/io/druid/indexer/TaskStatus.java
deleted file mode 100644
index e6573894398..00000000000
--- a/api/src/main/java/io/druid/indexer/TaskStatus.java
+++ /dev/null
@@ -1,210 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.indexer;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Objects;
-import com.google.common.base.Preconditions;
-
-/**
- * Represents the status of a task from the perspective of the coordinator. The task may be ongoing
- * ({@link #isComplete()} false) or it may be complete ({@link #isComplete()} true).
- *
- * TaskStatus objects are immutable.
- */
-public class TaskStatus
-{
-  public static final int MAX_ERROR_MSG_LENGTH = 100;
-
-  public static TaskStatus running(String taskId)
-  {
-    return new TaskStatus(taskId, TaskState.RUNNING, -1, null);
-  }
-
-  public static TaskStatus success(String taskId)
-  {
-    return new TaskStatus(taskId, TaskState.SUCCESS, -1, null);
-  }
-
-  public static TaskStatus success(String taskId, String errorMsg)
-  {
-    return new TaskStatus(taskId, TaskState.SUCCESS, -1, errorMsg);
-  }
-
-  public static TaskStatus failure(String taskId)
-  {
-    return new TaskStatus(taskId, TaskState.FAILED, -1, null);
-  }
-
-  public static TaskStatus failure(String taskId, String errorMsg)
-  {
-    return new TaskStatus(taskId, TaskState.FAILED, -1, errorMsg);
-  }
-
-  public static TaskStatus fromCode(String taskId, TaskState code)
-  {
-    return new TaskStatus(taskId, code, -1, null);
-  }
-
-  // The error message can be large, so truncate it to avoid storing large objects in zookeeper/metadata storage.
-  // The full error message will be available via a TaskReport.
-  private static String truncateErrorMsg(String errorMsg)
-  {
-    if (errorMsg != null && errorMsg.length() > MAX_ERROR_MSG_LENGTH) {
-      return errorMsg.substring(0, MAX_ERROR_MSG_LENGTH) + "...";
-    } else {
-      return errorMsg;
-    }
-  }
-
-  private final String id;
-  private final TaskState status;
-  private final long duration;
-  private final String errorMsg;
-
-  @JsonCreator
-  protected TaskStatus(
-      @JsonProperty("id") String id,
-      @JsonProperty("status") TaskState status,
-      @JsonProperty("duration") long duration,
-      @JsonProperty("errorMsg") String errorMsg
-  )
-  {
-    this.id = id;
-    this.status = status;
-    this.duration = duration;
-    this.errorMsg = truncateErrorMsg(errorMsg);
-
-    // Check class invariants.
-    Preconditions.checkNotNull(id, "id");
-    Preconditions.checkNotNull(status, "status");
-  }
-
-  @JsonProperty("id")
-  public String getId()
-  {
-    return id;
-  }
-
-  @JsonProperty("status")
-  public TaskState getStatusCode()
-  {
-    return status;
-  }
-
-  @JsonProperty("duration")
-  public long getDuration()
-  {
-    return duration;
-  }
-
-  @JsonProperty("errorMsg")
-  public String getErrorMsg()
-  {
-    return errorMsg;
-  }
-
-  /**
-   * Signals that a task is not yet complete, and is still runnable on a worker. Exactly one of isRunnable,
-   * isSuccess, or isFailure will be true at any one time.
-   *
-   * @return whether the task is runnable.
-   */
-  @JsonIgnore
-  public boolean isRunnable()
-  {
-    return status == TaskState.RUNNING;
-  }
-
-  /**
-   * Inverse of {@link #isRunnable}.
-   *
-   * @return whether the task is complete.
-   */
-  @JsonIgnore
-  public boolean isComplete()
-  {
-    return !isRunnable();
-  }
-
-  /**
-   * Returned by tasks when they spawn subtasks. Exactly one of isRunnable, isSuccess, or isFailure will
-   * be true at any one time.
-   *
-   * @return whether the task succeeded.
-   */
-  @JsonIgnore
-  public boolean isSuccess()
-  {
-    return status == TaskState.SUCCESS;
-  }
-
-  /**
-   * Returned by tasks when they complete unsuccessfully. Exactly one of isRunnable, isSuccess, or
-   * isFailure will be true at any one time.
-   *
-   * @return whether the task failed
-   */
-  @JsonIgnore
-  public boolean isFailure()
-  {
-    return status == TaskState.FAILED;
-  }
-
-  public TaskStatus withDuration(long _duration)
-  {
-    return new TaskStatus(id, status, _duration, errorMsg);
-  }
-
-  @Override
-  public String toString()
-  {
-    return Objects.toStringHelper(this)
-                  .add("id", id)
-                  .add("status", status)
-                  .add("duration", duration)
-                  .add("errorMsg", errorMsg)
-                  .toString();
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-    TaskStatus that = (TaskStatus) o;
-    return getDuration() == that.getDuration() &&
-           java.util.Objects.equals(getId(), that.getId()) &&
-           status == that.status &&
-           java.util.Objects.equals(getErrorMsg(), that.getErrorMsg());
-  }
-
-  @Override
-  public int hashCode()
-  {
-    return java.util.Objects.hash(getId(), status, getDuration(), getErrorMsg());
-  }
-}
diff --git a/api/src/main/java/io/druid/indexer/TaskStatusPlus.java b/api/src/main/java/io/druid/indexer/TaskStatusPlus.java
deleted file mode 100644
index 2f762810411..00000000000
--- a/api/src/main/java/io/druid/indexer/TaskStatusPlus.java
+++ /dev/null
@@ -1,191 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.indexer;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Preconditions;
-import org.joda.time.DateTime;
-
-import javax.annotation.Nullable;
-import java.util.Objects;
-
-public class TaskStatusPlus
-{
-  private final String id;
-  private final String type;
-  private final DateTime createdTime;
-  private final DateTime queueInsertionTime;
-  private final TaskState state;
-  private final RunnerTaskState runnerTaskState;
-  private final Long duration;
-  private final TaskLocation location;
-  private final String dataSource;
-
-  @Nullable
-  private final String errorMsg;
-
-  @JsonCreator
-  public TaskStatusPlus(
-      @JsonProperty("id") String id,
-      @JsonProperty("type") @Nullable String type, // nullable for backward compatibility
-      @JsonProperty("createdTime") DateTime createdTime,
-      @JsonProperty("queueInsertionTime") DateTime queueInsertionTime,
-      @JsonProperty("statusCode") @Nullable TaskState state,
-      @JsonProperty("runnerStatusCode") @Nullable RunnerTaskState runnerTaskState,
-      @JsonProperty("duration") @Nullable Long duration,
-      @JsonProperty("location") TaskLocation location,
-      @JsonProperty("dataSource") @Nullable String dataSource, // nullable for backward compatibility
-      @JsonProperty("errorMsg") @Nullable String errorMsg
-  )
-  {
-    if (state != null && state.isComplete()) {
-      Preconditions.checkNotNull(duration, "duration");
-    }
-    this.id = Preconditions.checkNotNull(id, "id");
-    this.type = type;
-    this.createdTime = Preconditions.checkNotNull(createdTime, "createdTime");
-    this.queueInsertionTime = Preconditions.checkNotNull(queueInsertionTime, "queueInsertionTime");
-    this.state = state;
-    this.runnerTaskState = runnerTaskState;
-    this.duration = duration;
-    this.location = Preconditions.checkNotNull(location, "location");
-    this.dataSource = dataSource;
-    this.errorMsg = errorMsg;
-  }
-
-  @JsonProperty
-  public String getId()
-  {
-    return id;
-  }
-
-  @Nullable
-  @JsonProperty
-  public String getType()
-  {
-    return type;
-  }
-
-  @JsonProperty
-  public DateTime getCreatedTime()
-  {
-    return createdTime;
-  }
-
-  @JsonProperty
-  public DateTime getQueueInsertionTime()
-  {
-    return queueInsertionTime;
-  }
-
-  @Nullable
-  @JsonProperty("statusCode")
-  public TaskState getState()
-  {
-    return state;
-  }
-
-  @Nullable
-  @JsonProperty("runnerStatusCode")
-  public RunnerTaskState getRunnerTaskState()
-  {
-    return runnerTaskState;
-  }
-
-  @Nullable
-  @JsonProperty
-  public Long getDuration()
-  {
-    return duration;
-  }
-
-  @JsonProperty
-  public TaskLocation getLocation()
-  {
-    return location;
-  }
-
-  @JsonProperty
-  public String getDataSource()
-  {
-    return dataSource;
-  }
-
-  @Nullable
-  @JsonProperty("errorMsg")
-  public String getErrorMsg()
-  {
-    return errorMsg;
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-    TaskStatusPlus that = (TaskStatusPlus) o;
-    return Objects.equals(getId(), that.getId()) &&
-           Objects.equals(getType(), that.getType()) &&
-           Objects.equals(getCreatedTime(), that.getCreatedTime()) &&
-           Objects.equals(getQueueInsertionTime(), that.getQueueInsertionTime()) &&
-           getState() == that.getState() &&
-           Objects.equals(getDuration(), that.getDuration()) &&
-           Objects.equals(getLocation(), that.getLocation()) &&
-           Objects.equals(getDataSource(), that.getDataSource()) &&
-           Objects.equals(getErrorMsg(), that.getErrorMsg());
-  }
-
-  @Override
-  public int hashCode()
-  {
-    return Objects.hash(
-        getId(),
-        getType(),
-        getCreatedTime(),
-        getQueueInsertionTime(),
-        getState(),
-        getDuration(),
-        getLocation(),
-        getDataSource(),
-        getErrorMsg()
-    );
-  }
-
-  @Override
-  public String toString()
-  {
-    return "TaskStatusPlus{" +
-           "id='" + id + '\'' +
-           ", type='" + type + '\'' +
-           ", createdTime=" + createdTime +
-           ", queueInsertionTime=" + queueInsertionTime +
-           ", state=" + state +
-           ", duration=" + duration +
-           ", location=" + location +
-           ", dataSource='" + dataSource + '\'' +
-           ", errorMsg='" + errorMsg + '\'' +
-           '}';
-  }
-}
diff --git a/api/src/main/java/io/druid/initialization/DruidModule.java b/api/src/main/java/io/druid/initialization/DruidModule.java
deleted file mode 100644
index f6f70703d39..00000000000
--- a/api/src/main/java/io/druid/initialization/DruidModule.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.initialization;
-
-import com.fasterxml.jackson.databind.Module;
-import io.druid.guice.annotations.ExtensionPoint;
-
-import java.util.List;
-
-/**
- */
-@ExtensionPoint
-public interface DruidModule extends com.google.inject.Module
-{
-  List<? extends Module> getJacksonModules();
-}
diff --git a/api/src/main/java/io/druid/jackson/CommaListJoinDeserializer.java b/api/src/main/java/io/druid/jackson/CommaListJoinDeserializer.java
deleted file mode 100644
index 5511fbf3b71..00000000000
--- a/api/src/main/java/io/druid/jackson/CommaListJoinDeserializer.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.jackson;
-
-import com.fasterxml.jackson.core.JsonParser;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.DeserializationContext;
-import com.fasterxml.jackson.databind.deser.std.StdScalarDeserializer;
-
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.List;
-
-/**
- */
-public class CommaListJoinDeserializer extends StdScalarDeserializer<List<String>>
-{
-  protected CommaListJoinDeserializer()
-  {
-    super(List.class);
-  }
-
-  @Override
-  public List<String> deserialize(JsonParser jsonParser, DeserializationContext deserializationContext)
-      throws IOException, JsonProcessingException
-  {
-    return Arrays.asList(jsonParser.getText().split(","));
-  }
-}
diff --git a/api/src/main/java/io/druid/jackson/CommaListJoinSerializer.java b/api/src/main/java/io/druid/jackson/CommaListJoinSerializer.java
deleted file mode 100644
index 8dceb482e28..00000000000
--- a/api/src/main/java/io/druid/jackson/CommaListJoinSerializer.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.jackson;
-
-import com.fasterxml.jackson.core.JsonGenerationException;
-import com.fasterxml.jackson.core.JsonGenerator;
-import com.fasterxml.jackson.databind.SerializerProvider;
-import com.fasterxml.jackson.databind.ser.std.StdScalarSerializer;
-import com.google.common.base.Joiner;
-
-import java.io.IOException;
-import java.util.List;
-
-/**
- */
-public class CommaListJoinSerializer extends StdScalarSerializer<List<String>>
-{
-  private static final Joiner joiner = Joiner.on(",");
-
-  protected CommaListJoinSerializer()
-  {
-    super(List.class, true);
-  }
-
-  @Override
-  public void serialize(List<String> value, JsonGenerator jgen, SerializerProvider provider)
-      throws IOException, JsonGenerationException
-  {
-    jgen.writeString(joiner.join(value));
-  }
-}
diff --git a/api/src/main/java/io/druid/js/JavaScriptConfig.java b/api/src/main/java/io/druid/js/JavaScriptConfig.java
deleted file mode 100644
index 35dc71ae4e1..00000000000
--- a/api/src/main/java/io/druid/js/JavaScriptConfig.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.js;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import io.druid.guice.annotations.PublicApi;
-
-/**
- * Should be used by extension filters, aggregators, etc, that use JavaScript to determine if JavaScript is enabled
- * or not.
- */
-@PublicApi
-public class JavaScriptConfig
-{
-  public static final int DEFAULT_OPTIMIZATION_LEVEL = 9;
-
-  private static final JavaScriptConfig ENABLED_INSTANCE = new JavaScriptConfig(true);
-
-  @JsonProperty
-  private final boolean enabled;
-
-  @JsonCreator
-  public JavaScriptConfig(
-      @JsonProperty("enabled") boolean enabled
-  )
-  {
-    this.enabled = enabled;
-  }
-
-  public boolean isEnabled()
-  {
-    return enabled;
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    JavaScriptConfig that = (JavaScriptConfig) o;
-
-    return enabled == that.enabled;
-
-  }
-
-  @Override
-  public int hashCode()
-  {
-    return (enabled ? 1 : 0);
-  }
-
-  @Override
-  public String toString()
-  {
-    return "JavaScriptConfig{" +
-           "enabled=" + enabled +
-           '}';
-  }
-
-  public static JavaScriptConfig getEnabledInstance()
-  {
-    return ENABLED_INSTANCE;
-  }
-}
diff --git a/api/src/main/java/io/druid/query/SegmentDescriptor.java b/api/src/main/java/io/druid/query/SegmentDescriptor.java
deleted file mode 100644
index bda3543f4d3..00000000000
--- a/api/src/main/java/io/druid/query/SegmentDescriptor.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.query;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import org.joda.time.Interval;
-
-/**
-*/
-public class SegmentDescriptor
-{
-  private final Interval interval;
-  private final String version;
-  private final int partitionNumber;
-
-  @JsonCreator
-  public SegmentDescriptor(
-      @JsonProperty("itvl") Interval interval,
-      @JsonProperty("ver") String version,
-      @JsonProperty("part") int partitionNumber
-  )
-  {
-    this.interval = interval;
-    this.version = version;
-    this.partitionNumber = partitionNumber;
-  }
-
-  @JsonProperty("itvl")
-  public Interval getInterval()
-  {
-    return interval;
-  }
-
-  @JsonProperty("ver")
-  public String getVersion()
-  {
-    return version;
-  }
-
-  @JsonProperty("part")
-  public int getPartitionNumber()
-  {
-    return partitionNumber;
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    SegmentDescriptor that = (SegmentDescriptor) o;
-
-    if (partitionNumber != that.partitionNumber) {
-      return false;
-    }
-    if (interval != null ? !interval.equals(that.interval) : that.interval != null) {
-      return false;
-    }
-    if (version != null ? !version.equals(that.version) : that.version != null) {
-      return false;
-    }
-
-    return true;
-  }
-
-  @Override
-  public int hashCode()
-  {
-    int result = interval != null ? interval.hashCode() : 0;
-    result = 31 * result + (version != null ? version.hashCode() : 0);
-    result = 31 * result + partitionNumber;
-    return result;
-  }
-
-  @Override
-  public String toString()
-  {
-    return "SegmentDescriptor{" +
-           "interval=" + interval +
-           ", version='" + version + '\'' +
-           ", partitionNumber=" + partitionNumber +
-           '}';
-  }
-}
diff --git a/api/src/main/java/io/druid/segment/SegmentUtils.java b/api/src/main/java/io/druid/segment/SegmentUtils.java
deleted file mode 100644
index 76bdca204ec..00000000000
--- a/api/src/main/java/io/druid/segment/SegmentUtils.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.segment;
-
-import com.google.common.io.Files;
-import com.google.common.primitives.Ints;
-import io.druid.guice.annotations.PublicApi;
-import io.druid.java.util.common.IOE;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-
-/**
- * Utility methods useful for implementing deep storage extensions.
- */
-@PublicApi
-public class SegmentUtils
-{
-  public static int getVersionFromDir(File inDir) throws IOException
-  {
-    File versionFile = new File(inDir, "version.bin");
-    if (versionFile.exists()) {
-      return Ints.fromByteArray(Files.toByteArray(versionFile));
-    }
-
-    final File indexFile = new File(inDir, "index.drd");
-    int version;
-    if (indexFile.exists()) {
-      try (InputStream in = new FileInputStream(indexFile)) {
-        version = in.read();
-      }
-      return version;
-    }
-
-    throw new IOE("Invalid segment dir [%s]. Can't find either of version.bin or index.drd.", inDir);
-  }
-}
diff --git a/api/src/main/java/io/druid/segment/loading/DataSegmentArchiver.java b/api/src/main/java/io/druid/segment/loading/DataSegmentArchiver.java
deleted file mode 100644
index 922595ad447..00000000000
--- a/api/src/main/java/io/druid/segment/loading/DataSegmentArchiver.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.segment.loading;
-
-import io.druid.guice.annotations.ExtensionPoint;
-import io.druid.timeline.DataSegment;
-
-import javax.annotation.Nullable;
-
-@ExtensionPoint
-public interface DataSegmentArchiver
-{
-  /**
-   * Perform an archive task on the segment and return the resulting segment or null if there was no action needed.
-   *
-   * @param segment The source segment
-   *
-   * @return The segment after archiving or `null` if there was no archiving performed.
-   *
-   * @throws SegmentLoadingException on error
-   */
-  @Nullable
-  DataSegment archive(DataSegment segment) throws SegmentLoadingException;
-
-  /**
-   * Perform the restore from an archived segment and return the resulting segment or null if there was no action
-   *
-   * @param segment The source (archived) segment
-   *
-   * @return The segment after it has been unarchived
-   *
-   * @throws SegmentLoadingException on error
-   */
-  @Nullable
-  DataSegment restore(DataSegment segment) throws SegmentLoadingException;
-}
diff --git a/api/src/main/java/io/druid/segment/loading/DataSegmentFinder.java b/api/src/main/java/io/druid/segment/loading/DataSegmentFinder.java
deleted file mode 100644
index aa6cadf0031..00000000000
--- a/api/src/main/java/io/druid/segment/loading/DataSegmentFinder.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.segment.loading;
-
-import io.druid.guice.annotations.ExtensionPoint;
-import io.druid.java.util.common.Pair;
-import io.druid.java.util.common.logger.Logger;
-import io.druid.timeline.DataSegment;
-
-import java.util.Map;
-import java.util.Set;
-
-/**
- * A DataSegmentFinder is responsible for finding Druid segments underneath a specified directory and optionally updates
- * all descriptor.json files on deep storage with correct loadSpec.
- */
-@ExtensionPoint
-public interface DataSegmentFinder
-{
-  Logger log = new Logger(DataSegmentFinder.class);
-
-  /**
-   * This method should first recursively look for descriptor.json (partitionNum_descriptor.json for HDFS data storage) underneath
-   * workingDirPath and then verify that index.zip (partitionNum_index.zip for HDFS data storage) exists in the same folder.
-   * If not, it should throw SegmentLoadingException to let the caller know that descriptor.json exists
-   * while index.zip doesn't. If a segment is found and updateDescriptor is set, then this method should update the
-   * loadSpec in descriptor.json to reflect the location from where it was found. After the search, this method
-   * should return the set of segments that were found.
-   *
-   * @param workingDirPath   the String representation of the working directory path
-   * @param updateDescriptor if true, update loadSpec in descriptor.json if loadSpec's location is different from where
-   *                         desciptor.json was found
-   *
-   * @return a set of segments that were found underneath workingDirPath
-   */
-  Set<DataSegment> findSegments(String workingDirPath, boolean updateDescriptor) throws SegmentLoadingException;
-
-  /**
-   * Adds dataSegment if it does not exist in timestampedSegments. If it exists, replaces entry if segmentModifiedAt is
-   * newer than stored timestamp.
-   *
-   * @param timestampedSegments map of <segmentID, Pair<segment, modifiedAt>> containing segments with modified time
-   * @param dataSegment         segment to add
-   * @param segmentModifiedAt   segment modified timestamp
-   */
-  static void putInMapRetainingNewest(
-      Map<String, Pair<DataSegment, Long>> timestampedSegments, DataSegment dataSegment, long segmentModifiedAt
-  )
-  {
-    timestampedSegments.merge(
-        dataSegment.getIdentifier(),
-        Pair.of(dataSegment, segmentModifiedAt),
-        (previous, current) -> {
-          log.warn("Multiple copies of segmentId [%s] found, using newest version", current.lhs.getIdentifier());
-          return previous.rhs > current.rhs ? previous : current;
-        }
-    );
-  }
-}
diff --git a/api/src/main/java/io/druid/segment/loading/DataSegmentKiller.java b/api/src/main/java/io/druid/segment/loading/DataSegmentKiller.java
deleted file mode 100644
index 8622c4ff917..00000000000
--- a/api/src/main/java/io/druid/segment/loading/DataSegmentKiller.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.segment.loading;
-
-import io.druid.guice.annotations.ExtensionPoint;
-import io.druid.java.util.common.logger.Logger;
-import io.druid.timeline.DataSegment;
-
-import java.io.IOException;
-
-@ExtensionPoint
-public interface DataSegmentKiller
-{
-  Logger log = new Logger(DataSegmentKiller.class);
-
-  /**
-   * Removes segment files (index and metadata) from deep storage.
-   * @param segment the segment to kill
-   * @throws SegmentLoadingException if the segment could not be completely removed
-   */
-  void kill(DataSegment segment) throws SegmentLoadingException;
-
-  /**
-   * A more stoic killer who doesn't throw a tantrum if things get messy. Use when killing segments for best-effort
-   * cleanup.
-   * @param segment the segment to kill
-   */
-  default void killQuietly(DataSegment segment)
-  {
-    try {
-      kill(segment);
-    }
-    catch (Exception e) {
-      log.debug(e, "Failed to kill segment %s", segment);
-    }
-  }
-
-  /**
-   * Like a nuke. Use wisely. Used by the 'reset-cluster' command, and of the built-in deep storage implementations, it
-   * is only implemented by local and HDFS.
-   */
-  void killAll() throws IOException;
-}
diff --git a/api/src/main/java/io/druid/segment/loading/DataSegmentMover.java b/api/src/main/java/io/druid/segment/loading/DataSegmentMover.java
deleted file mode 100644
index 2ee54334b8f..00000000000
--- a/api/src/main/java/io/druid/segment/loading/DataSegmentMover.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.segment.loading;
-
-import io.druid.guice.annotations.ExtensionPoint;
-import io.druid.timeline.DataSegment;
-
-import java.util.Map;
-
-@ExtensionPoint
-public interface DataSegmentMover
-{
-  DataSegment move(DataSegment segment, Map<String, Object> targetLoadSpec) throws SegmentLoadingException;
-}
diff --git a/api/src/main/java/io/druid/segment/loading/DataSegmentPusher.java b/api/src/main/java/io/druid/segment/loading/DataSegmentPusher.java
deleted file mode 100644
index 5f3466dbc39..00000000000
--- a/api/src/main/java/io/druid/segment/loading/DataSegmentPusher.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.segment.loading;
-
-import com.google.common.base.Joiner;
-import io.druid.guice.annotations.ExtensionPoint;
-import io.druid.java.util.common.StringUtils;
-import io.druid.timeline.DataSegment;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.URI;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.UUID;
-
-@ExtensionPoint
-public interface DataSegmentPusher
-{
-  Joiner JOINER = Joiner.on("/").skipNulls();
-
-  @Deprecated
-  String getPathForHadoop(String dataSource);
-  String getPathForHadoop();
-
-  /**
-   * Pushes index files and segment descriptor to deep storage.
-   * @param file directory containing index files
-   * @param segment segment descriptor
-   * @param useUniquePath if true, pushes to a unique file path. This prevents situations where task failures or replica
-   *                      tasks can either overwrite or fail to overwrite existing segments leading to the possibility
-   *                      of different versions of the same segment ID containing different data. As an example, a Kafka
-   *                      indexing task starting at offset A and ending at offset B may push a segment to deep storage
-   *                      and then fail before writing the loadSpec to the metadata table, resulting in a replacement
-   *                      task being spawned. This replacement will also start at offset A but will read to offset C and
-   *                      will then push a segment to deep storage and write the loadSpec metadata. Without unique file
-   *                      paths, this can only work correctly if new segments overwrite existing segments. Suppose that
-   *                      at this point the task then fails so that the supervisor retries again from offset A. This 3rd
-   *                      attempt will overwrite the segments in deep storage before failing to write the loadSpec
-   *                      metadata, resulting in inconsistencies in the segment data now in deep storage and copies of
-   *                      the segment already loaded by historicals.
-   *
-   *                      If unique paths are used, caller is responsible for cleaning up segments that were pushed but
-   *                      were not written to the metadata table (for example when using replica tasks).
-   * @return segment descriptor
-   * @throws IOException
-   */
-  DataSegment push(File file, DataSegment segment, boolean useUniquePath) throws IOException;
-
-  //use map instead of LoadSpec class to avoid dependency pollution.
-  Map<String, Object> makeLoadSpec(URI finalIndexZipFilePath);
-
-  /**
-   * @deprecated backward-compatibiliy shim that should be removed on next major release;
-   * use {@link #getStorageDir(DataSegment, boolean)} instead.
-   */
-  @Deprecated
-  default String getStorageDir(DataSegment dataSegment)
-  {
-    return getStorageDir(dataSegment, false);
-  }
-
-  default String getStorageDir(DataSegment dataSegment, boolean useUniquePath)
-  {
-    return getDefaultStorageDir(dataSegment, useUniquePath);
-  }
-
-  default String makeIndexPathName(DataSegment dataSegment, String indexName)
-  {
-    // This is only called from Hadoop batch which doesn't require unique segment paths so set useUniquePath=false
-    return StringUtils.format("./%s/%s", getStorageDir(dataSegment, false), indexName);
-  }
-
-  /**
-   * Property prefixes that should be added to the "allowedHadoopPrefix" config for passing down to Hadoop jobs. These
-   * should be property prefixes like "druid.xxx", which means to include "druid.xxx" and "druid.xxx.*".
-   */
-  default List<String> getAllowedPropertyPrefixesForHadoop()
-  {
-    return Collections.emptyList();
-  }
-
-  // Note: storage directory structure format = .../dataSource/interval/version/partitionNumber/
-  // If above format is ever changed, make sure to change it appropriately in other places
-  // e.g. HDFSDataSegmentKiller uses this information to clean the version, interval and dataSource directories
-  // on segment deletion if segment being deleted was the only segment
-  static String getDefaultStorageDir(DataSegment segment, boolean useUniquePath)
-  {
-    return JOINER.join(
-        segment.getDataSource(),
-        StringUtils.format("%s_%s", segment.getInterval().getStart(), segment.getInterval().getEnd()),
-        segment.getVersion(),
-        segment.getShardSpec().getPartitionNum(),
-        useUniquePath ? generateUniquePath() : null
-    );
-  }
-
-  static String generateUniquePath()
-  {
-    return UUID.randomUUID().toString();
-  }
-}
diff --git a/api/src/main/java/io/druid/segment/loading/LoadSpec.java b/api/src/main/java/io/druid/segment/loading/LoadSpec.java
deleted file mode 100644
index 23f5536bb69..00000000000
--- a/api/src/main/java/io/druid/segment/loading/LoadSpec.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.segment.loading;
-
-import com.fasterxml.jackson.annotation.JsonTypeInfo;
-import io.druid.guice.annotations.ExtensionPoint;
-
-import java.io.File;
-
-/**
- * A means of pulling segment files into a destination directory
- */
-@ExtensionPoint
-@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
-public interface LoadSpec
-{
-  /**
-   * Method should put the segment files in the directory passed
-   * @param destDir The destination directory
-   * @return The byte count of data put in the destination directory
-   */
-  LoadSpecResult loadSegment(File destDir) throws SegmentLoadingException;
-
-  // Hold interesting data about the results of the segment load
-  class LoadSpecResult
-  {
-    private final long size;
-
-    public LoadSpecResult(long size)
-    {
-      this.size = size;
-    }
-
-    public long getSize()
-    {
-      return this.size;
-    }
-  }
-}
diff --git a/api/src/main/java/io/druid/segment/loading/NoopDataSegmentPusher.java b/api/src/main/java/io/druid/segment/loading/NoopDataSegmentPusher.java
deleted file mode 100644
index f7aa07f9615..00000000000
--- a/api/src/main/java/io/druid/segment/loading/NoopDataSegmentPusher.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.segment.loading;
-
-import com.google.common.collect.ImmutableMap;
-import io.druid.timeline.DataSegment;
-
-import java.io.File;
-import java.net.URI;
-import java.util.Map;
-
-/**
- * Mostly used for test purpose.
- */
-public class NoopDataSegmentPusher implements DataSegmentPusher
-{
-  @Override
-  public String getPathForHadoop()
-  {
-    return "noop";
-  }
-
-  @Deprecated
-  @Override
-  public String getPathForHadoop(String dataSource)
-  {
-    return getPathForHadoop();
-  }
-
-  @Override
-  public DataSegment push(File file, DataSegment segment, boolean replaceExisting)
-  {
-    return segment;
-  }
-
-  @Override
-  public Map<String, Object> makeLoadSpec(URI uri)
-  {
-    return ImmutableMap.of();
-  }
-}
diff --git a/api/src/main/java/io/druid/segment/loading/SegmentLoadingException.java b/api/src/main/java/io/druid/segment/loading/SegmentLoadingException.java
deleted file mode 100644
index 26048c8736a..00000000000
--- a/api/src/main/java/io/druid/segment/loading/SegmentLoadingException.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.segment.loading;
-
-import io.druid.guice.annotations.PublicApi;
-import io.druid.java.util.common.StringUtils;
-
-/**
- */
-@PublicApi
-public class SegmentLoadingException extends Exception
-{
-  public SegmentLoadingException(
-      String formatString,
-      Object... objs
-  )
-  {
-    super(StringUtils.nonStrictFormat(formatString, objs));
-  }
-
-  public SegmentLoadingException(
-      Throwable cause,
-      String formatString,
-      Object... objs
-  )
-  {
-    super(StringUtils.nonStrictFormat(formatString, objs), cause);
-  }
-}
diff --git a/api/src/main/java/io/druid/segment/loading/URIDataPuller.java b/api/src/main/java/io/druid/segment/loading/URIDataPuller.java
deleted file mode 100644
index 5936bd8a2d9..00000000000
--- a/api/src/main/java/io/druid/segment/loading/URIDataPuller.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.segment.loading;
-
-import com.google.common.base.Predicate;
-import io.druid.guice.annotations.ExtensionPoint;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.URI;
-
-/**
- * A URIDataPuller has handlings for URI based data
- */
-@ExtensionPoint
-public interface URIDataPuller
-{
-  /**
-   * Create a new InputStream based on the URI
-   *
-   * @param uri The URI to open an Input Stream to
-   *
-   * @return A new InputStream which streams the URI in question
-   *
-   * @throws IOException
-   */
-  InputStream getInputStream(URI uri) throws IOException;
-
-  /**
-   * Returns an abstract "version" for the URI. The exact meaning of the version is left up to the implementation.
-   *
-   * @param uri The URI to check
-   *
-   * @return A "version" as interpreted by the URIDataPuller implementation
-   *
-   * @throws IOException on error
-   */
-  String getVersion(URI uri) throws IOException;
-
-  /**
-   * Evaluates a Throwable to see if it is recoverable. This is expected to be used in conjunction with the other methods
-   * to determine if anything thrown from the method should be retried.
-   *
-   * @return Predicate function indicating if the Throwable is recoverable
-   */
-  Predicate<Throwable> shouldRetryPredicate();
-}
diff --git a/api/src/main/java/io/druid/tasklogs/NoopTaskLogs.java b/api/src/main/java/io/druid/tasklogs/NoopTaskLogs.java
deleted file mode 100644
index 9b53adcd372..00000000000
--- a/api/src/main/java/io/druid/tasklogs/NoopTaskLogs.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.tasklogs;
-
-import com.google.common.base.Optional;
-import com.google.common.io.ByteSource;
-import io.druid.java.util.common.logger.Logger;
-
-import java.io.File;
-import java.io.IOException;
-
-public class NoopTaskLogs implements TaskLogs
-{
-  private final Logger log = new Logger(TaskLogs.class);
-
-  @Override
-  public Optional<ByteSource> streamTaskLog(String taskid, long offset)
-  {
-    return Optional.absent();
-  }
-
-  @Override
-  public void pushTaskLog(String taskid, File logFile)
-  {
-    log.info("Not pushing logs for task: %s", taskid);
-  }
-
-  @Override
-  public void pushTaskReports(String taskid, File reportFile) throws IOException
-  {
-    log.info("Not pushing reports for task: %s", taskid);
-  }
-
-  @Override
-  public void killAll()
-  {
-    log.info("Noop: No task logs are deleted.");
-  }
-
-  @Override
-  public void killOlderThan(long timestamp)
-  {
-    log.info("Noop: No task logs are deleted.");
-  }
-}
diff --git a/api/src/main/java/io/druid/tasklogs/TaskLogKiller.java b/api/src/main/java/io/druid/tasklogs/TaskLogKiller.java
deleted file mode 100644
index fca5fe78d2c..00000000000
--- a/api/src/main/java/io/druid/tasklogs/TaskLogKiller.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.tasklogs;
-
-import io.druid.guice.annotations.ExtensionPoint;
-
-import java.io.IOException;
-
-/**
- */
-@ExtensionPoint
-public interface TaskLogKiller
-{
-  void killAll() throws IOException;
-  void killOlderThan(long timestamp) throws IOException;
-}
diff --git a/api/src/main/java/io/druid/tasklogs/TaskLogPusher.java b/api/src/main/java/io/druid/tasklogs/TaskLogPusher.java
deleted file mode 100644
index c50b0028dae..00000000000
--- a/api/src/main/java/io/druid/tasklogs/TaskLogPusher.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.tasklogs;
-
-import io.druid.guice.annotations.ExtensionPoint;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * Something that knows how to persist local task logs to some form of long-term storage.
- */
-@ExtensionPoint
-public interface TaskLogPusher
-{
-  void pushTaskLog(String taskid, File logFile) throws IOException;
-
-  default void pushTaskReports(String taskid, File reportFile) throws IOException
-  {
-  }
-}
diff --git a/api/src/main/java/io/druid/tasklogs/TaskLogStreamer.java b/api/src/main/java/io/druid/tasklogs/TaskLogStreamer.java
deleted file mode 100644
index 06410987dd4..00000000000
--- a/api/src/main/java/io/druid/tasklogs/TaskLogStreamer.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.tasklogs;
-
-import com.google.common.base.Optional;
-import com.google.common.io.ByteSource;
-import io.druid.guice.annotations.ExtensionPoint;
-
-import java.io.IOException;
-
-/**
- * Something that knows how to stream logs for tasks.
- */
-@ExtensionPoint
-public interface TaskLogStreamer
-{
-  /**
-   * Stream log for a task.
-   *
-   * @param offset If zero, stream the entire log. If positive, attempt to read from this position onwards. If
-   *               negative, attempt to read this many bytes from the end of the file (like <tt>tail -n</tt>).
-   *
-   * @return input supplier for this log, if available from this provider
-   */
-  Optional<ByteSource> streamTaskLog(String taskid, long offset) throws IOException;
-
-  default Optional<ByteSource> streamTaskReports(final String taskid) throws IOException
-  {
-    return Optional.absent();
-  }
-}
diff --git a/api/src/main/java/io/druid/tasklogs/TaskLogs.java b/api/src/main/java/io/druid/tasklogs/TaskLogs.java
deleted file mode 100644
index 1852195d4b9..00000000000
--- a/api/src/main/java/io/druid/tasklogs/TaskLogs.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.tasklogs;
-
-import io.druid.guice.annotations.ExtensionPoint;
-
-@ExtensionPoint
-public interface TaskLogs extends TaskLogStreamer, TaskLogPusher, TaskLogKiller
-{
-}
diff --git a/api/src/main/java/io/druid/timeline/DataSegment.java b/api/src/main/java/io/druid/timeline/DataSegment.java
deleted file mode 100644
index 332e5cc7038..00000000000
--- a/api/src/main/java/io/druid/timeline/DataSegment.java
+++ /dev/null
@@ -1,494 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.timeline;
-
-import com.fasterxml.jackson.annotation.JacksonInject;
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
-import com.fasterxml.jackson.databind.annotation.JsonSerialize;
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Interner;
-import com.google.common.collect.Interners;
-import com.google.inject.Inject;
-import io.druid.guice.annotations.PublicApi;
-import io.druid.jackson.CommaListJoinDeserializer;
-import io.druid.jackson.CommaListJoinSerializer;
-import io.druid.java.util.common.granularity.Granularities;
-import io.druid.query.SegmentDescriptor;
-import io.druid.timeline.partition.NoneShardSpec;
-import io.druid.timeline.partition.ShardSpec;
-import it.unimi.dsi.fastutil.objects.Object2ObjectArrayMap;
-import org.joda.time.DateTime;
-import org.joda.time.Interval;
-
-import javax.annotation.Nullable;
-import java.util.Comparator;
-import java.util.List;
-import java.util.Map;
-import java.util.stream.Collectors;
-
-/**
- */
-@PublicApi
-public class DataSegment implements Comparable<DataSegment>
-{
-  public static String delimiter = "_";
-  private final Integer binaryVersion;
-  private static final Interner<String> STRING_INTERNER = Interners.newWeakInterner();
-  private static final Interner<List<String>> DIMENSIONS_INTERNER = Interners.newWeakInterner();
-  private static final Interner<List<String>> METRICS_INTERNER = Interners.newWeakInterner();
-  private static final Map<String, Object> PRUNED_LOAD_SPEC = ImmutableMap.of(
-      "load spec is pruned, because it's not needed on Brokers, but eats a lot of heap space",
-      ""
-  );
-
-  public static String makeDataSegmentIdentifier(
-      String dataSource,
-      DateTime start,
-      DateTime end,
-      String version,
-      ShardSpec shardSpec
-  )
-  {
-    StringBuilder sb = new StringBuilder();
-
-    sb.append(dataSource).append(delimiter)
-      .append(start).append(delimiter)
-      .append(end).append(delimiter)
-      .append(version);
-
-    if (shardSpec.getPartitionNum() != 0) {
-      sb.append(delimiter).append(shardSpec.getPartitionNum());
-    }
-
-    return sb.toString();
-  }
-
-  /**
-   * This class is needed for optional injection of pruneLoadSpec, see
-   * github.com/google/guice/wiki/FrequentlyAskedQuestions#how-can-i-inject-optional-parameters-into-a-constructor
-   */
-  @VisibleForTesting
-  public static class PruneLoadSpecHolder
-  {
-    @VisibleForTesting
-    public static final PruneLoadSpecHolder DEFAULT = new PruneLoadSpecHolder();
-
-    @Inject(optional = true) @PruneLoadSpec boolean pruneLoadSpec = false;
-  }
-
-  private final String dataSource;
-  private final Interval interval;
-  private final String version;
-  @Nullable
-  private final Map<String, Object> loadSpec;
-  private final List<String> dimensions;
-  private final List<String> metrics;
-  private final ShardSpec shardSpec;
-  private final long size;
-  private final String identifier;
-
-  public DataSegment(
-      String dataSource,
-      Interval interval,
-      String version,
-      Map<String, Object> loadSpec,
-      List<String> dimensions,
-      List<String> metrics,
-      ShardSpec shardSpec,
-      Integer binaryVersion,
-      long size
-  )
-  {
-    this(
-        dataSource,
-        interval,
-        version,
-        loadSpec,
-        dimensions,
-        metrics,
-        shardSpec,
-        binaryVersion,
-        size,
-        PruneLoadSpecHolder.DEFAULT
-    );
-  }
-
-  @JsonCreator
-  public DataSegment(
-      @JsonProperty("dataSource") String dataSource,
-      @JsonProperty("interval") Interval interval,
-      @JsonProperty("version") String version,
-      // use `Map` *NOT* `LoadSpec` because we want to do lazy materialization to prevent dependency pollution
-      @JsonProperty("loadSpec") @Nullable Map<String, Object> loadSpec,
-      @JsonProperty("dimensions")
-      @JsonDeserialize(using = CommaListJoinDeserializer.class)
-      @Nullable
-          List<String> dimensions,
-      @JsonProperty("metrics")
-      @JsonDeserialize(using = CommaListJoinDeserializer.class)
-      @Nullable
-          List<String> metrics,
-      @JsonProperty("shardSpec") @Nullable ShardSpec shardSpec,
-      @JsonProperty("binaryVersion") Integer binaryVersion,
-      @JsonProperty("size") long size,
-      @JacksonInject PruneLoadSpecHolder pruneLoadSpecHolder
-  )
-  {
-    // dataSource, dimensions & metrics are stored as canonical string values to decrease memory required for storing
-    // large numbers of segments.
-    this.dataSource = STRING_INTERNER.intern(dataSource);
-    this.interval = interval;
-    this.loadSpec = pruneLoadSpecHolder.pruneLoadSpec ? PRUNED_LOAD_SPEC : prepareLoadSpec(loadSpec);
-    this.version = version;
-    // Deduplicating dimensions and metrics lists as a whole because they are very likely the same for the same
-    // dataSource
-    this.dimensions = prepareDimensionsOrMetrics(dimensions, DIMENSIONS_INTERNER);
-    this.metrics = prepareDimensionsOrMetrics(metrics, METRICS_INTERNER);
-    this.shardSpec = (shardSpec == null) ? NoneShardSpec.instance() : shardSpec;
-    this.binaryVersion = binaryVersion;
-    this.size = size;
-
-    this.identifier = makeDataSegmentIdentifier(
-        this.dataSource,
-        this.interval.getStart(),
-        this.interval.getEnd(),
-        this.version,
-        this.shardSpec
-    );
-  }
-
-  @Nullable
-  private Map<String, Object> prepareLoadSpec(@Nullable Map<String, Object> loadSpec)
-  {
-    if (loadSpec == null) {
-      return null;
-    }
-    // Load spec is just of 3 entries on average; HashMap/LinkedHashMap consumes much more memory than ArrayMap
-    Map<String, Object> result = new Object2ObjectArrayMap<>(loadSpec.size());
-    for (Map.Entry<String, Object> e : loadSpec.entrySet()) {
-      result.put(STRING_INTERNER.intern(e.getKey()), e.getValue());
-    }
-    return result;
-  }
-
-  private List<String> prepareDimensionsOrMetrics(@Nullable List<String> list, Interner<List<String>> interner)
-  {
-    if (list == null) {
-      return ImmutableList.of();
-    } else {
-      List<String> result = list
-          .stream()
-          .filter(s -> !Strings.isNullOrEmpty(s))
-          .map(STRING_INTERNER::intern)
-          // TODO replace with ImmutableList.toImmutableList() when updated to Guava 21+
-          .collect(Collectors.collectingAndThen(Collectors.toList(), ImmutableList::copyOf));
-      return interner.intern(result);
-    }
-  }
-
-  /**
-   * Get dataSource
-   *
-   * @return the dataSource
-   */
-  @JsonProperty
-  public String getDataSource()
-  {
-    return dataSource;
-  }
-
-  @JsonProperty
-  public Interval getInterval()
-  {
-    return interval;
-  }
-
-  @Nullable
-  @JsonProperty
-  public Map<String, Object> getLoadSpec()
-  {
-    return loadSpec;
-  }
-
-  @JsonProperty
-  public String getVersion()
-  {
-    return version;
-  }
-
-  @JsonProperty
-  @JsonSerialize(using = CommaListJoinSerializer.class)
-  public List<String> getDimensions()
-  {
-    return dimensions;
-  }
-
-  @JsonProperty
-  @JsonSerialize(using = CommaListJoinSerializer.class)
-  public List<String> getMetrics()
-  {
-    return metrics;
-  }
-
-  @JsonProperty
-  public ShardSpec getShardSpec()
-  {
-    return shardSpec;
-  }
-
-  @JsonProperty
-  public Integer getBinaryVersion()
-  {
-    return binaryVersion;
-  }
-
-  @JsonProperty
-  public long getSize()
-  {
-    return size;
-  }
-
-  @JsonProperty
-  public String getIdentifier()
-  {
-    return identifier;
-  }
-
-  public SegmentDescriptor toDescriptor()
-  {
-    return new SegmentDescriptor(interval, version, shardSpec.getPartitionNum());
-  }
-
-  public DataSegment withLoadSpec(Map<String, Object> loadSpec)
-  {
-    return builder(this).loadSpec(loadSpec).build();
-  }
-
-  public DataSegment withDimensions(List<String> dimensions)
-  {
-    return builder(this).dimensions(dimensions).build();
-  }
-
-  public DataSegment withMetrics(List<String> metrics)
-  {
-    return builder(this).metrics(metrics).build();
-  }
-
-  public DataSegment withSize(long size)
-  {
-    return builder(this).size(size).build();
-  }
-
-  public DataSegment withVersion(String version)
-  {
-    return builder(this).version(version).build();
-  }
-
-  public DataSegment withBinaryVersion(int binaryVersion)
-  {
-    return builder(this).binaryVersion(binaryVersion).build();
-  }
-
-  @Override
-  public int compareTo(DataSegment dataSegment)
-  {
-    return getIdentifier().compareTo(dataSegment.getIdentifier());
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (o instanceof DataSegment) {
-      return getIdentifier().equals(((DataSegment) o).getIdentifier());
-    }
-    return false;
-  }
-
-  @Override
-  public int hashCode()
-  {
-    return getIdentifier().hashCode();
-  }
-
-  @Override
-  public String toString()
-  {
-    return "DataSegment{" +
-           "size=" + size +
-           ", shardSpec=" + shardSpec +
-           ", metrics=" + metrics +
-           ", dimensions=" + dimensions +
-           ", version='" + version + '\'' +
-           ", loadSpec=" + loadSpec +
-           ", interval=" + interval +
-           ", dataSource='" + dataSource + '\'' +
-           ", binaryVersion='" + binaryVersion + '\'' +
-           '}';
-  }
-
-  public static Comparator<DataSegment> bucketMonthComparator()
-  {
-    return new Comparator<DataSegment>()
-    {
-      @Override
-      public int compare(DataSegment lhs, DataSegment rhs)
-      {
-        int retVal;
-
-        DateTime lhsMonth = Granularities.MONTH.bucketStart(lhs.getInterval().getStart());
-        DateTime rhsMonth = Granularities.MONTH.bucketStart(rhs.getInterval().getStart());
-
-        retVal = lhsMonth.compareTo(rhsMonth);
-
-        if (retVal != 0) {
-          return retVal;
-        }
-
-        return lhs.compareTo(rhs);
-      }
-    };
-  }
-
-  public static Builder builder()
-  {
-    return new Builder();
-  }
-
-  public static Builder builder(DataSegment segment)
-  {
-    return new Builder(segment);
-  }
-
-  public static class Builder
-  {
-    private String dataSource;
-    private Interval interval;
-    private String version;
-    private Map<String, Object> loadSpec;
-    private List<String> dimensions;
-    private List<String> metrics;
-    private ShardSpec shardSpec;
-    private Integer binaryVersion;
-    private long size;
-
-    public Builder()
-    {
-      this.loadSpec = ImmutableMap.of();
-      this.dimensions = ImmutableList.of();
-      this.metrics = ImmutableList.of();
-      this.shardSpec = NoneShardSpec.instance();
-      this.size = -1;
-    }
-
-    public Builder(DataSegment segment)
-    {
-      this.dataSource = segment.getDataSource();
-      this.interval = segment.getInterval();
-      this.version = segment.getVersion();
-      this.loadSpec = segment.getLoadSpec();
-      this.dimensions = segment.getDimensions();
-      this.metrics = segment.getMetrics();
-      this.shardSpec = segment.getShardSpec();
-      this.binaryVersion = segment.getBinaryVersion();
-      this.size = segment.getSize();
-    }
-
-    public Builder dataSource(String dataSource)
-    {
-      this.dataSource = dataSource;
-      return this;
-    }
-
-    public Builder interval(Interval interval)
-    {
-      this.interval = interval;
-      return this;
-    }
-
-    public Builder version(String version)
-    {
-      this.version = version;
-      return this;
-    }
-
-    public Builder loadSpec(Map<String, Object> loadSpec)
-    {
-      this.loadSpec = loadSpec;
-      return this;
-    }
-
-    public Builder dimensions(List<String> dimensions)
-    {
-      this.dimensions = dimensions;
-      return this;
-    }
-
-    public Builder metrics(List<String> metrics)
-    {
-      this.metrics = metrics;
-      return this;
-    }
-
-    public Builder shardSpec(ShardSpec shardSpec)
-    {
-      this.shardSpec = shardSpec;
-      return this;
-    }
-
-    public Builder binaryVersion(Integer binaryVersion)
-    {
-      this.binaryVersion = binaryVersion;
-      return this;
-    }
-
-    public Builder size(long size)
-    {
-      this.size = size;
-      return this;
-    }
-
-    public DataSegment build()
-    {
-      // Check stuff that goes into the identifier, at least.
-      Preconditions.checkNotNull(dataSource, "dataSource");
-      Preconditions.checkNotNull(interval, "interval");
-      Preconditions.checkNotNull(version, "version");
-      Preconditions.checkNotNull(shardSpec, "shardSpec");
-
-      return new DataSegment(
-          dataSource,
-          interval,
-          version,
-          loadSpec,
-          dimensions,
-          metrics,
-          shardSpec,
-          binaryVersion,
-          size
-      );
-    }
-  }
-}
diff --git a/api/src/main/java/io/druid/timeline/DataSegmentUtils.java b/api/src/main/java/io/druid/timeline/DataSegmentUtils.java
deleted file mode 100644
index 04e5b46b71e..00000000000
--- a/api/src/main/java/io/druid/timeline/DataSegmentUtils.java
+++ /dev/null
@@ -1,205 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.timeline;
-
-import com.google.common.base.Function;
-import io.druid.guice.annotations.PublicApi;
-import io.druid.java.util.common.DateTimes;
-import io.druid.java.util.common.IAE;
-import io.druid.java.util.common.StringUtils;
-import io.druid.java.util.common.logger.Logger;
-import org.joda.time.DateTime;
-import org.joda.time.Interval;
-
-import java.util.Objects;
-
-@PublicApi
-public class DataSegmentUtils
-{
-  private static final Logger LOGGER = new Logger(DataSegmentUtils.class);
-
-  public static Function<String, Interval> INTERVAL_EXTRACTOR(final String datasource)
-  {
-    return new Function<String, Interval>()
-    {
-      @Override
-      public Interval apply(String identifier)
-      {
-        SegmentIdentifierParts segmentIdentifierParts = valueOf(datasource, identifier);
-        if (segmentIdentifierParts == null) {
-          throw new IAE("Invalid identifier [%s]", identifier);
-        }
-
-        return segmentIdentifierParts.getInterval();
-      }
-    };
-  }
-
-  /**
-   * Parses a segment identifier into its components: dataSource, interval, version, and any trailing tags. Ignores
-   * shard spec.
-   *
-   * It is possible that this method may incorrectly parse an identifier, for example if the dataSource name in the
-   * identifier contains a DateTime parseable string such as 'datasource_2000-01-01T00:00:00.000Z' and dataSource was
-   * provided as 'datasource'. The desired behavior in this case would be to return null since the identifier does not
-   * actually belong to the provided dataSource but a non-null result would be returned. This is an edge case that would
-   * currently only affect paged select queries with a union dataSource of two similarly-named dataSources as in the
-   * given example.
-   *
-   * @param dataSource the dataSource corresponding to this identifier
-   * @param identifier segment identifier
-   * @return a {@link DataSegmentUtils.SegmentIdentifierParts} object if the identifier could be parsed, null otherwise
-   */
-  public static SegmentIdentifierParts valueOf(String dataSource, String identifier)
-  {
-    if (!identifier.startsWith(StringUtils.format("%s_", dataSource))) {
-      return null;
-    }
-
-    String remaining = identifier.substring(dataSource.length() + 1);
-    String[] splits = remaining.split(DataSegment.delimiter);
-    if (splits.length < 3) {
-      return null;
-    }
-
-    try {
-      DateTime start = DateTimes.ISO_DATE_TIME.parse(splits[0]);
-      DateTime end = DateTimes.ISO_DATE_TIME.parse(splits[1]);
-      String version = splits[2];
-      String trail = splits.length > 3 ? join(splits, DataSegment.delimiter, 3, splits.length) : null;
-
-      return new SegmentIdentifierParts(
-          dataSource,
-          new Interval(start, end),
-          version,
-          trail
-      );
-    }
-    catch (IllegalArgumentException e) {
-      return null;
-    }
-  }
-
-  public static String withInterval(final String dataSource, final String identifier, Interval newInterval)
-  {
-    SegmentIdentifierParts segmentDesc = DataSegmentUtils.valueOf(dataSource, identifier);
-    if (segmentDesc == null) {
-      // happens for test segments which has invalid segment id.. ignore for now
-      LOGGER.warn("Invalid segment identifier " + identifier);
-      return identifier;
-    }
-    return segmentDesc.withInterval(newInterval).toString();
-  }
-
-  public static class SegmentIdentifierParts
-  {
-    private final String dataSource;
-    private final Interval interval;
-    private final String version;
-    private final String trail;
-
-    public SegmentIdentifierParts(String dataSource, Interval interval, String version, String trail)
-    {
-      this.dataSource = dataSource;
-      this.interval = interval;
-      this.version = version;
-      this.trail = trail;
-    }
-
-    @PublicApi
-    public String getDataSource()
-    {
-      return dataSource;
-    }
-
-    public Interval getInterval()
-    {
-      return interval;
-    }
-
-    @PublicApi
-    public String getVersion()
-    {
-      return version;
-    }
-
-    public SegmentIdentifierParts withInterval(Interval interval)
-    {
-      return new SegmentIdentifierParts(dataSource, interval, version, trail);
-    }
-
-    @Override
-    public boolean equals(Object o)
-    {
-      if (this == o) {
-        return true;
-      }
-      if (o == null || getClass() != o.getClass()) {
-        return false;
-      }
-
-      SegmentIdentifierParts that = (SegmentIdentifierParts) o;
-
-      if (!Objects.equals(dataSource, that.dataSource)) {
-        return false;
-      }
-      if (!Objects.equals(interval, that.interval)) {
-        return false;
-      }
-      if (!Objects.equals(version, that.version)) {
-        return false;
-      }
-      if (!Objects.equals(trail, that.trail)) {
-        return false;
-      }
-
-      return true;
-    }
-
-    @Override
-    public int hashCode()
-    {
-      return Objects.hash(dataSource, interval, version, trail);
-    }
-
-    @Override
-    public String toString()
-    {
-      return join(
-          new Object[]{dataSource, interval.getStart(), interval.getEnd(), version, trail},
-          DataSegment.delimiter, 0, version == null ? 3 : trail == null ? 4 : 5
-      );
-    }
-  }
-
-  private static String join(Object[] input, String delimiter, int start, int end)
-  {
-    StringBuilder builder = new StringBuilder();
-    for (int i = start; i < end; i++) {
-      if (i > start) {
-        builder.append(delimiter);
-      }
-      if (input[i] != null) {
-        builder.append(input[i]);
-      }
-    }
-    return builder.toString();
-  }
-}
diff --git a/api/src/main/java/io/druid/timeline/PruneLoadSpec.java b/api/src/main/java/io/druid/timeline/PruneLoadSpec.java
deleted file mode 100644
index 2be099b74ba..00000000000
--- a/api/src/main/java/io/druid/timeline/PruneLoadSpec.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.timeline;
-
-import com.google.inject.BindingAnnotation;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- * This annnotation is used to inject a boolean parameter into a {@link DataSegment} constructor, which prescribes to
- * drop deserialized "loadSpec" and don't store it in a field of a {@link DataSegment}. It's very useful on Brokers,
- * because they store a lot of DataSegments in their heap, and loadSpec takes a lot of space, while it's not used on
- * Brokers.
- */
-@Target({ElementType.PARAMETER, ElementType.FIELD})
-@Retention(RetentionPolicy.RUNTIME)
-@BindingAnnotation
-public @interface PruneLoadSpec
-{
-}
diff --git a/api/src/main/java/io/druid/timeline/partition/NoneShardSpec.java b/api/src/main/java/io/druid/timeline/partition/NoneShardSpec.java
deleted file mode 100644
index c78f9b3cf43..00000000000
--- a/api/src/main/java/io/druid/timeline/partition/NoneShardSpec.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.timeline.partition;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.RangeSet;
-import io.druid.data.input.InputRow;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- */
-public class NoneShardSpec implements ShardSpec
-{
-  private static final NoneShardSpec INSTANCE = new NoneShardSpec();
-
-  @JsonCreator
-  public static NoneShardSpec instance()
-  {
-    return INSTANCE;
-  }
-
-  /**
-   * @deprecated use {@link #instance()} instead
-   */
-  @Deprecated
-  public NoneShardSpec()
-  {
-    // empty
-  }
-
-  @Override
-  public <T> PartitionChunk<T> createChunk(T obj)
-  {
-    return new SingleElementPartitionChunk<T>(obj);
-  }
-
-  @Override
-  public boolean isInChunk(long timestamp, InputRow inputRow)
-  {
-    return true;
-  }
-
-  @Override
-  @JsonIgnore
-  public int getPartitionNum()
-  {
-    return 0;
-  }
-
-  @Override
-  public ShardSpecLookup getLookup(final List<ShardSpec> shardSpecs)
-  {
-    return (long timestamp, InputRow row) -> shardSpecs.get(0);
-  }
-
-  @Override
-  public List<String> getDomainDimensions()
-  {
-    return ImmutableList.of();
-  }
-
-  @Override
-  public boolean possibleInDomain(Map<String, RangeSet<String>> domain)
-  {
-    return true;
-  }
-
-  @Override
-  public boolean equals(Object obj)
-  {
-    return obj instanceof NoneShardSpec;
-  }
-
-  @Override
-  public int hashCode()
-  {
-    return 0;
-  }
-
-  @Override
-  public String toString()
-  {
-    return "NoneShardSpec";
-  }
-}
diff --git a/api/src/main/java/io/druid/timeline/partition/PartitionChunk.java b/api/src/main/java/io/druid/timeline/partition/PartitionChunk.java
deleted file mode 100644
index 656106ddbc6..00000000000
--- a/api/src/main/java/io/druid/timeline/partition/PartitionChunk.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.timeline.partition;
-
-/**
- * A PartitionChunk represents a chunk of a partitioned(sharded) space.  It has knowledge of whether it is
- * the start of the domain of partitions, the end of the domain, if it abuts another partition and where it stands
- * inside of a sorted collection of partitions.
- *
- * The ordering of PartitionChunks is based entirely upon the partition boundaries defined inside the concrete
- * PartitionChunk class.  That is, the payload (the object returned by getObject()) should *not* be involved in
- * comparisons between PartitionChunk objects.
- */
-public interface PartitionChunk<T> extends Comparable<PartitionChunk<T>>
-{
-  /**
-   * Returns the payload, generally an object that can be used to perform some action against the shard.
-   *
-   * @return the payload
-   */
-  T getObject();
-
-  /**
-   * Determines if this PartitionChunk abuts another PartitionChunk.  A sequence of abutting PartitionChunks should
-   * start with an object where isStart() == true and eventually end with an object where isEnd() == true.
-   *
-   * @param chunk input chunk
-   * @return true if this chunk abuts the input chunk
-   */
-  boolean abuts(PartitionChunk<T> chunk);
-
-  /**
-   * Returns true if this chunk is the beginning of the partition. Most commonly, that means it represents the range
-   * [-infinity, X) for some concrete X.
-   *
-   * @return true if the chunk is the beginning of the partition
-   */
-  boolean isStart();
-
-  /**
-   * Returns true if this chunk is the end of the partition.  Most commonly, that means it represents the range
-   * [X, infinity] for some concrete X.
-   *
-   * @return true if the chunk is the beginning of the partition
-   */
-  boolean isEnd();
-
-  /**
-   * Returns the partition chunk number of this PartitionChunk.  I.e. if there are 4 partitions in total and this
-   * is the 3rd partition, it will return 2
-   *
-   * @return the sequential numerical id of this partition chunk
-   */
-  int getChunkNumber();
-}
diff --git a/api/src/main/java/io/druid/timeline/partition/ShardSpec.java b/api/src/main/java/io/druid/timeline/partition/ShardSpec.java
deleted file mode 100644
index 4849478a47e..00000000000
--- a/api/src/main/java/io/druid/timeline/partition/ShardSpec.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.timeline.partition;
-
-import com.fasterxml.jackson.annotation.JsonSubTypes;
-import com.fasterxml.jackson.annotation.JsonTypeInfo;
-import com.google.common.collect.RangeSet;
-import io.druid.data.input.InputRow;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- * A Marker interface that exists to combine ShardSpec objects together for Jackson. Note that this is not an
- * extension API. Extensions are not expected to create new kinds of ShardSpecs.
- */
-@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
-@JsonSubTypes({
-                  @JsonSubTypes.Type(name = "none", value = NoneShardSpec.class),
-              })
-public interface ShardSpec
-{
-  <T> PartitionChunk<T> createChunk(T obj);
-
-  boolean isInChunk(long timestamp, InputRow inputRow);
-
-  int getPartitionNum();
-
-  ShardSpecLookup getLookup(List<ShardSpec> shardSpecs);
-
-  /**
-   * Get dimensions who have possible range for the rows this shard contains.
-   *
-   * @return list of dimensions who has its possible range. Dimensions with unknown possible range are not listed
-   */
-  List<String> getDomainDimensions();
-
-  /**
-   * if given domain ranges are not possible in this shard, return false; otherwise return true;
-   * @return possibility of in domain
-   */
-  boolean possibleInDomain(Map<String, RangeSet<String>> domain);
-}
diff --git a/api/src/main/java/io/druid/timeline/partition/ShardSpecLookup.java b/api/src/main/java/io/druid/timeline/partition/ShardSpecLookup.java
deleted file mode 100644
index bf2ce2f47b0..00000000000
--- a/api/src/main/java/io/druid/timeline/partition/ShardSpecLookup.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.timeline.partition;
-
-import io.druid.data.input.InputRow;
-
-public interface ShardSpecLookup
-{
-  ShardSpec getShardSpec(long timestamp, InputRow row);
-}
diff --git a/api/src/main/java/io/druid/timeline/partition/SingleElementPartitionChunk.java b/api/src/main/java/io/druid/timeline/partition/SingleElementPartitionChunk.java
deleted file mode 100644
index 7d15f0fe3e5..00000000000
--- a/api/src/main/java/io/druid/timeline/partition/SingleElementPartitionChunk.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.timeline.partition;
-
-/**
- */
-public class SingleElementPartitionChunk<T> implements PartitionChunk<T>
-{
-  private final T element;
-
-  public SingleElementPartitionChunk(T element)
-  {
-    this.element = element;
-  }
-
-  @Override
-  public T getObject()
-  {
-    return element;
-  }
-
-  @Override
-  public boolean abuts(PartitionChunk<T> tPartitionChunk)
-  {
-    return false;
-  }
-
-  @Override
-  public boolean isStart()
-  {
-    return true;
-  }
-
-  @Override
-  public boolean isEnd()
-  {
-    return true;
-  }
-
-  @Override
-  public int getChunkNumber()
-  {
-    return 0;
-  }
-
-  /**
-   * The ordering of PartitionChunks is determined entirely by the partition boundaries and has nothing to do
-   * with the object.  Thus, if there are two SingleElementPartitionChunks, they are equal because they both
-   * represent the full partition space.
-   *
-   * SingleElementPartitionChunks are currently defined as less than every other type of PartitionChunk.  There
-   * is no good reason for it, nor is there a bad reason, that's just the way it is.  This is subject to change.
-   *
-   * @param chunk
-   * @return
-   */
-  @Override
-  public int compareTo(PartitionChunk<T> chunk)
-  {
-    return chunk instanceof SingleElementPartitionChunk ? 0 : -1;
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    return true;
-  }
-
-  @Override
-  public int hashCode()
-  {
-    return element != null ? element.hashCode() : 0;
-  }
-
-  @Override
-  public String toString()
-  {
-    return "SingleElementPartitionChunk{" +
-           "element=" + element +
-           '}';
-  }
-}
diff --git a/api/src/main/java/io/druid/utils/CircularBuffer.java b/api/src/main/java/io/druid/utils/CircularBuffer.java
deleted file mode 100644
index 7f2f96b2f69..00000000000
--- a/api/src/main/java/io/druid/utils/CircularBuffer.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.utils;
-
-import com.google.common.base.Preconditions;
-
-/**
- * A circular buffer that supports random bidirectional access.
- *
- * @param <E> Type of object to be stored in the buffer
- */
-public class CircularBuffer<E>
-{
-  public E[] getBuffer()
-  {
-    return buffer;
-  }
-
-  private final E[] buffer;
-
-  private int start = 0;
-  private int size = 0;
-
-  public CircularBuffer(int capacity)
-  {
-    Preconditions.checkArgument(capacity > 0, "Capacity must be greater than 0.");
-    buffer = (E[]) new Object[capacity];
-  }
-
-  public void add(E item)
-  {
-    buffer[start++] = item;
-
-    if (start >= buffer.length) {
-      start = 0;
-    }
-
-    if (size < buffer.length) {
-      size++;
-    }
-  }
-
-  /**
-   * Access object at a given index, starting from the latest entry added and moving backwards.
-   */
-  public E getLatest(int index)
-  {
-    Preconditions.checkArgument(index >= 0 && index < size, "invalid index");
-
-    int bufferIndex = start - index - 1;
-    if (bufferIndex < 0) {
-      bufferIndex = buffer.length + bufferIndex;
-    }
-    return buffer[bufferIndex];
-  }
-
-  /**
-   * Access object at a given index, starting from the earliest entry added and moving forward.
-   */
-  public E get(int index)
-  {
-    Preconditions.checkArgument(index >= 0 && index < size, "invalid index");
-
-    int bufferIndex = (start - size + index) % buffer.length;
-    if (bufferIndex < 0) {
-      bufferIndex += buffer.length;
-    }
-    return buffer[bufferIndex];
-  }
-
-  public int size()
-  {
-    return size;
-  }
-}
diff --git a/api/src/main/java/io/druid/utils/CompressionUtils.java b/api/src/main/java/io/druid/utils/CompressionUtils.java
deleted file mode 100644
index bbe7aff4048..00000000000
--- a/api/src/main/java/io/druid/utils/CompressionUtils.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.utils;
-
-
-import io.druid.guice.annotations.PublicApi;
-import io.druid.java.util.common.logger.Logger;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
-/**
- */
-@PublicApi
-public class CompressionUtils
-{
-  private static final Logger log = new Logger(CompressionUtils.class);
-
-
-  @Deprecated // Use io.druid.java.util.common.CompressionUtils.zip
-  public static long zip(File directory, File outputZipFile) throws IOException
-  {
-    return io.druid.java.util.common.CompressionUtils.zip(directory, outputZipFile);
-  }
-
-
-  @Deprecated // Use io.druid.java.util.common.CompressionUtils.zip
-  public static long zip(File directory, OutputStream out) throws IOException
-  {
-    return io.druid.java.util.common.CompressionUtils.zip(directory, out);
-  }
-
-  @Deprecated // Use io.druid.java.util.common.CompressionUtils.unzip
-  public static void unzip(File pulledFile, File outDir) throws IOException
-  {
-    io.druid.java.util.common.CompressionUtils.unzip(pulledFile, outDir);
-  }
-
-  @Deprecated // Use io.druid.java.util.common.CompressionUtils.unzip
-  public static void unzip(InputStream in, File outDir) throws IOException
-  {
-    io.druid.java.util.common.CompressionUtils.unzip(in, outDir);
-  }
-
-  /**
-   * Uncompress using a gzip uncompress algorithm from the `pulledFile` to the `outDir`.
-   * Unlike `io.druid.java.util.common.CompressionUtils.gunzip`, this function takes an output *DIRECTORY* and tries to guess the file name.
-   * It is recommended that the caller use `io.druid.java.util.common.CompressionUtils.gunzip` and specify the output file themselves to ensure names are as expected
-   *
-   * @param pulledFile The source file
-   * @param outDir     The destination directory to put the resulting file
-   *
-   * @throws IOException on propagated IO exception, IAE if it cannot determine the proper new name for `pulledFile`
-   */
-  @Deprecated // See description for alternative
-  public static void gunzip(File pulledFile, File outDir)
-  {
-    final File outFile = new File(outDir, io.druid.java.util.common.CompressionUtils.getGzBaseName(pulledFile.getName()));
-    io.druid.java.util.common.CompressionUtils.gunzip(pulledFile, outFile);
-    if (!pulledFile.delete()) {
-      log.error("Could not delete tmpFile[%s].", pulledFile);
-    }
-  }
-
-}
diff --git a/api/src/main/java/io/druid/utils/Runnables.java b/api/src/main/java/io/druid/utils/Runnables.java
deleted file mode 100644
index 30ccf37b56d..00000000000
--- a/api/src/main/java/io/druid/utils/Runnables.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.utils;
-
-import io.druid.guice.annotations.PublicApi;
-
-/**
- */
-@PublicApi
-public class Runnables
-{
-  public static Runnable getNoopRunnable()
-  {
-    return () -> {};
-  }
-}
diff --git a/api/src/main/java/org/apache/druid/cli/CliCommandCreator.java b/api/src/main/java/org/apache/druid/cli/CliCommandCreator.java
new file mode 100644
index 00000000000..dc314a7eab9
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/cli/CliCommandCreator.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.cli;
+
+import io.airlift.airline.Cli;
+import org.apache.druid.guice.annotations.ExtensionPoint;
+
+/**
+ */
+@ExtensionPoint
+public interface CliCommandCreator
+{
+  void addCommands(Cli.CliBuilder builder);
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/ByteBufferInputRowParser.java b/api/src/main/java/org/apache/druid/data/input/ByteBufferInputRowParser.java
new file mode 100644
index 00000000000..10c4633f88f
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/ByteBufferInputRowParser.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input;
+
+import org.apache.druid.data.input.impl.InputRowParser;
+import org.apache.druid.data.input.impl.ParseSpec;
+
+import java.nio.ByteBuffer;
+
+public interface ByteBufferInputRowParser extends InputRowParser<ByteBuffer>
+{
+  @Override
+  ByteBufferInputRowParser withParseSpec(ParseSpec parseSpec);
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/Committer.java b/api/src/main/java/org/apache/druid/data/input/Committer.java
new file mode 100644
index 00000000000..0f8e31bd714
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/Committer.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input;
+
+import org.apache.druid.guice.annotations.ExtensionPoint;
+
+/**
+ * Committer includes a Runnable and a Jackson-serialized metadata object containing the offset
+ */
+@ExtensionPoint
+public interface Committer extends Runnable
+{
+  /**
+   * @return A json serialized representation of commit metadata,
+   * which needs to be serialized and deserialized by Jackson.
+   * Commit metadata can be a complex type, but we recommend keeping it to List/Map/"Primitive JSON" types
+   */
+  Object getMetadata();
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/FiniteFirehoseFactory.java b/api/src/main/java/org/apache/druid/data/input/FiniteFirehoseFactory.java
new file mode 100644
index 00000000000..42a16837b97
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/FiniteFirehoseFactory.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import org.apache.druid.data.input.impl.InputRowParser;
+
+import java.io.IOException;
+import java.util.stream.Stream;
+
+/**
+ * {@link FiniteFirehoseFactory} designed for batch processing. Its implementations assume that the amount of inputs is
+ * limited.
+ *
+ * @param <T> parser type
+ * @param <S> input split type
+ */
+public interface FiniteFirehoseFactory<T extends InputRowParser, S> extends FirehoseFactory<T>
+{
+  /**
+   * Returns true if this {@link FiniteFirehoseFactory} supports parallel batch indexing.
+   */
+  @JsonIgnore
+  @Override
+  default boolean isSplittable()
+  {
+    return true;
+  }
+
+  /**
+   * Returns a {@link Stream} for {@link InputSplit}s. In parallel batch indexing, each {@link InputSplit} is processed
+   * by a sub task.
+   *
+   * Listing splits may cause high overhead in some implementations. In this case, {@link InputSplit}s should be listed
+   * lazily so that the listing overhead could be amortized.
+   */
+  @JsonIgnore
+  Stream<InputSplit<S>> getSplits() throws IOException;
+
+  /**
+   * Returns number of splits returned by {@link #getSplits()}.
+   */
+  @JsonIgnore
+  int getNumSplits() throws IOException;
+
+  /**
+   * Returns the same {@link FiniteFirehoseFactory} but with the given {@link InputSplit}. The returned
+   * {@link FiniteFirehoseFactory} is used by sub tasks in parallel batch indexing.
+   */
+  FiniteFirehoseFactory<T, S> withSplit(InputSplit<S> split);
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/Firehose.java b/api/src/main/java/org/apache/druid/data/input/Firehose.java
new file mode 100644
index 00000000000..7886cdc81f3
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/Firehose.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input;
+
+import org.apache.druid.guice.annotations.ExtensionPoint;
+
+import javax.annotation.Nullable;
+import java.io.Closeable;
+
+/**
+ * This is an interface that holds onto the stream of incoming data.  Realtime data ingestion is built around this
+ * abstraction.  In order to add a new type of source for realtime data ingestion, all you need to do is implement
+ * one of these and register it with the Main.
+ *
+ * This object acts a lot like an Iterator, but it doesn't extend the Iterator interface because it extends
+ * Closeable and it is very important that the close() method doesn't get forgotten, which is easy to do if this
+ * gets passed around as an Iterator.
+ * <p>
+ * The implementation of this interface only needs to be minimally thread-safe. The three methods ##hasMore(),
+ * ##nextRow() and ##commit() are all called from the same thread.  ##commit(), however, returns a callback
+ * which will be called on another thread, so the operations inside of that callback must be thread-safe.
+ * </p>
+ */
+@ExtensionPoint
+public interface Firehose extends Closeable
+{
+  /**
+   * Returns whether there are more rows to process.  This is used to indicate that another item is immediately
+   * available via ##nextRow().  Thus, if the stream is still available but there are no new messages on it, this call
+   * should block until a new message is available.
+   *
+   * If something happens such that the stream is no longer available, this should return false.
+   *
+   * @return true if and when there is another row available, false if the stream has dried up
+   */
+  boolean hasMore();
+
+  /**
+   * The next row available.  Should only be called if hasMore returns true.
+   * The return value can be null which means the caller must skip this row.
+   *
+   * @return The next row
+   */
+  @Nullable
+  InputRow nextRow();
+
+  /**
+   * Returns a runnable that will "commit" everything read up to the point at which commit() is called.  This is
+   * often equivalent to everything that has been read since the last commit() call (or instantiation of the object),
+   * but doesn't necessarily have to be.
+   *
+   * This method is called when the main processing loop starts to persist its current batch of things to process.
+   * The returned runnable will be run when the current batch has been successfully persisted, there is usually
+   * some time lag between when this method is called and when the runnable is run.  The Runnable is also run on
+   * a separate thread so its operation should be thread-safe.
+   *
+   * The Runnable is essentially just a lambda/closure that is run() after data supplied by this instance has
+   * been committed on the writer side of this interface protocol.
+   * <p>
+   * A simple implementation of this interface might do nothing when run() is called 
+   * (in which case the same do-nothing instance can be returned every time), or 
+   * a more complex implementation might clean up temporary resources that are no longer needed 
+   * because of InputRows delivered by prior calls to ##nextRow().
+   * </p>
+   */
+  Runnable commit();
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/FirehoseFactory.java b/api/src/main/java/org/apache/druid/data/input/FirehoseFactory.java
new file mode 100644
index 00000000000..64cb3687cae
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/FirehoseFactory.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input;
+
+import com.fasterxml.jackson.annotation.JsonTypeInfo;
+import org.apache.druid.data.input.impl.InputRowParser;
+import org.apache.druid.data.input.impl.prefetch.PrefetchableTextFilesFirehoseFactory;
+import org.apache.druid.guice.annotations.ExtensionPoint;
+import org.apache.druid.java.util.common.parsers.ParseException;
+
+import javax.annotation.Nullable;
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * FirehoseFactory creates a {@link Firehose} which is an interface holding onto the stream of incoming data.
+ * It currently provides two methods for creating a {@link Firehose} and their default implementations call each other
+ * for the backward compatibility.  Implementations of this interface must implement one of these methods.
+ */
+@ExtensionPoint
+@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
+public interface FirehoseFactory<T extends InputRowParser>
+{
+  /**
+   * Initialization method that connects up the fire hose.  If this method returns successfully it should be safe to
+   * call hasMore() on the returned Firehose (which might subsequently block).
+   * <p/>
+   * If this method returns null, then any attempt to call hasMore(), nextRow(), commit() and close() on the return
+   * value will throw a surprising NPE.   Throwing IOException on connection failure or runtime exception on
+   * invalid configuration is preferred over returning null.
+   *
+   * @param parser             an input row parser
+   */
+  @Deprecated
+  default Firehose connect(T parser) throws IOException, ParseException
+  {
+    return connect(parser, null);
+  }
+
+  /**
+   * Initialization method that connects up the fire hose.  If this method returns successfully it should be safe to
+   * call hasMore() on the returned Firehose (which might subsequently block).
+   * <p/>
+   * If this method returns null, then any attempt to call hasMore(), nextRow(), commit() and close() on the return
+   * value will throw a surprising NPE.   Throwing IOException on connection failure or runtime exception on
+   * invalid configuration is preferred over returning null.
+   * <p/>
+   * Some fire hoses like {@link PrefetchableTextFilesFirehoseFactory} may use a temporary
+   * directory to cache data in it.
+   *
+   * @param parser             an input row parser
+   * @param temporaryDirectory a directory where temporary files are stored
+   */
+  default Firehose connect(T parser, @Nullable File temporaryDirectory) throws IOException, ParseException
+  {
+    return connect(parser);
+  }
+
+  default boolean isSplittable()
+  {
+    return false;
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/FirehoseFactoryV2.java b/api/src/main/java/org/apache/druid/data/input/FirehoseFactoryV2.java
new file mode 100644
index 00000000000..08259b4f69c
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/FirehoseFactoryV2.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input;
+
+import com.fasterxml.jackson.annotation.JsonTypeInfo;
+import org.apache.druid.data.input.impl.InputRowParser;
+import org.apache.druid.guice.annotations.ExtensionPoint;
+import org.apache.druid.java.util.common.parsers.ParseException;
+
+import java.io.IOException;
+
+/**
+ * Initialization method that connects up the FirehoseV2.  If this method returns successfully it should be safe to
+ * call start() on the returned FirehoseV2 (which might subsequently block).
+ *
+ * In contrast to V1 version, FirehoseFactoryV2 is able to pass an additional json-serialized object to FirehoseV2,
+ * which contains last commit metadata
+ *
+ * <p/>
+ * If this method returns null, then any attempt to call start(), advance(), currRow(), makeCommitter() and close() on the return
+ * value will throw a surprising NPE.   Throwing IOException on connection failure or runtime exception on
+ * invalid configuration is preferred over returning null.
+ */
+@ExtensionPoint
+@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
+public interface FirehoseFactoryV2<T extends InputRowParser>
+{
+  /**
+   * This method is declared to throw {@link IOException}, although it's not thrown in the implementations in Druid
+   * code, for compatibility with third-party extensions.
+   */
+  @SuppressWarnings("RedundantThrows")
+  FirehoseV2 connect(T parser, Object lastCommit) throws IOException, ParseException;
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/FirehoseV2.java b/api/src/main/java/org/apache/druid/data/input/FirehoseV2.java
new file mode 100644
index 00000000000..6c2ddae5ef0
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/FirehoseV2.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input;
+
+import org.apache.druid.guice.annotations.ExtensionPoint;
+
+import java.io.Closeable;
+
+/**
+ * This is an interface that holds onto the stream of incoming data.  Realtime data ingestion is built around this
+ * abstraction.  In order to add a new type of source for realtime data ingestion, all you need to do is implement
+ * one of these and register it with the Main.
+ *
+ * In contrast to Firehose v1 version, FirehoseV2 will always operate in a "peek, then advance" manner.
+ * And the intended usage patttern is
+ * 1. Call start()
+ * 2. Read currRow()
+ * 3. Call advance()
+ * 4. If index should be committed: commit()
+ * 5. GOTO 2
+ *
+ * Note that commit() is being called *after* advance.
+ *
+ * This object acts a lot like an Iterator, but it doesn't extend the Iterator interface because it extends
+ * Closeable and it is very important that the close() method doesn't get forgotten, which is easy to do if this
+ * gets passed around as an Iterator.
+ *
+ * The implementation of this interface only needs to be minimally thread-safe. The methods ##start(), ##advance(),
+ * ##currRow() and ##makeCommitter() are all called from the same thread.  ##makeCommitter(), however, returns a callback
+ * which will be called on another thread, so the operations inside of that callback must be thread-safe.
+ */
+@ExtensionPoint
+public interface FirehoseV2 extends Closeable
+{
+  /**
+   * For initial start
+   */
+  void start();
+
+  /**
+   * Advance the firehose to the next offset.  Implementations of this interface should make sure that
+   * if advance() is called and throws out an exception, the next call to currRow() should return a
+   * null value.
+   *
+   * @return true if and when there is another row available, false if the stream has dried up
+   */
+  boolean advance();
+
+  /**
+   * @return The current row
+   */
+  InputRow currRow();
+
+  /**
+   * Returns a Committer that will "commit" everything read up to the point at which makeCommitter() is called.
+   *
+   * This method is called when the main processing loop starts to persist its current batch of things to process.
+   * The returned committer will be run when the current batch has been successfully persisted
+   * and the metadata the committer carries can also be persisted along with segment data. There is usually
+   * some time lag between when this method is called and when the runnable is run.  The Runnable is also run on
+   * a separate thread so its operation should be thread-safe.
+   *
+   * Note that "correct" usage of this interface will always call advance() before commit() if the current row
+   * is considered in the commit.
+   *
+   * The Runnable is essentially just a lambda/closure that is run() after data supplied by this instance has
+   * been committed on the writer side of this interface protocol.
+   *
+   * A simple implementation of this interface might do nothing when run() is called,
+   * and save proper commit information in metadata
+   */
+  Committer makeCommitter();
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/InputRow.java b/api/src/main/java/org/apache/druid/data/input/InputRow.java
new file mode 100644
index 00000000000..bab19286be8
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/InputRow.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input;
+
+import org.apache.druid.guice.annotations.ExtensionPoint;
+
+import java.util.List;
+
+/**
+ * An InputRow is the interface definition of an event being input into the data ingestion layer.
+ *
+ * An InputRow is a Row with a self-describing list of the dimensions available.  This list is used to
+ * implement "schema-less" data ingestion that allows the system to add new dimensions as they appear.
+ *
+ */
+@ExtensionPoint
+public interface InputRow extends Row
+{
+  /**
+   * Returns the dimensions that exist in this row.
+   *
+   * @return the dimensions that exist in this row.
+   */
+  List<String> getDimensions();
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/InputSplit.java b/api/src/main/java/org/apache/druid/data/input/InputSplit.java
new file mode 100644
index 00000000000..7ceb2b0410e
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/InputSplit.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+/**
+ * Input unit for distributed batch ingestion. Used in {@link FiniteFirehoseFactory}.
+ * An {@link InputSplit} represents the input data processed by a {@code org.apache.druid.indexing.common.task.Task}.
+ */
+public class InputSplit<T>
+{
+  private final T split;
+
+  @JsonCreator
+  public InputSplit(@JsonProperty("split") T split)
+  {
+    this.split = split;
+  }
+
+  @JsonProperty("split")
+  public T get()
+  {
+    return split;
+  }
+
+  @Override
+  public String toString()
+  {
+    return "InputSplit{" +
+           "split=" + split +
+           "}";
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/MapBasedInputRow.java b/api/src/main/java/org/apache/druid/data/input/MapBasedInputRow.java
new file mode 100644
index 00000000000..59ab8a55710
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/MapBasedInputRow.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input;
+
+import org.apache.druid.guice.annotations.PublicApi;
+import org.apache.druid.java.util.common.DateTimes;
+import org.joda.time.DateTime;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ */
+@PublicApi
+public class MapBasedInputRow extends MapBasedRow implements InputRow
+{
+  private final List<String> dimensions;
+
+  public MapBasedInputRow(
+      long timestamp,
+      List<String> dimensions,
+      Map<String, Object> event
+  )
+  {
+    super(timestamp, event);
+    this.dimensions = dimensions;
+  }
+
+  public MapBasedInputRow(
+      DateTime timestamp,
+      List<String> dimensions,
+      Map<String, Object> event
+  )
+  {
+    super(timestamp, event);
+    this.dimensions = dimensions;
+  }
+
+  @Override
+  public List<String> getDimensions()
+  {
+    return dimensions;
+  }
+
+  @Override
+  public String toString()
+  {
+    return "MapBasedInputRow{" +
+           "timestamp=" + DateTimes.utc(getTimestampFromEpoch()) +
+           ", event=" + getEvent() +
+           ", dimensions=" + dimensions +
+           '}';
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/MapBasedRow.java b/api/src/main/java/org/apache/druid/data/input/MapBasedRow.java
new file mode 100644
index 00000000000..72b90f22f21
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/MapBasedRow.java
@@ -0,0 +1,138 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import org.apache.druid.guice.annotations.PublicApi;
+import org.apache.druid.java.util.common.DateTimes;
+import org.joda.time.DateTime;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ */
+@PublicApi
+public class MapBasedRow implements Row
+{
+  private final DateTime timestamp;
+  private final Map<String, Object> event;
+
+  @JsonCreator
+  public MapBasedRow(
+      @JsonProperty("timestamp") DateTime timestamp,
+      @JsonProperty("event") Map<String, Object> event
+  )
+  {
+    this.timestamp = timestamp;
+    this.event = event;
+  }
+
+  public MapBasedRow(
+      long timestamp,
+      Map<String, Object> event
+  )
+  {
+    this(DateTimes.utc(timestamp), event);
+  }
+
+  @Override
+  public long getTimestampFromEpoch()
+  {
+    return timestamp.getMillis();
+  }
+
+  @Override
+  @JsonProperty
+  public DateTime getTimestamp()
+  {
+    return timestamp;
+  }
+
+  @JsonProperty
+  public Map<String, Object> getEvent()
+  {
+    return event;
+  }
+
+  @Override
+  public List<String> getDimension(String dimension)
+  {
+    return Rows.objectToStrings(event.get(dimension));
+  }
+
+  @Override
+  public Object getRaw(String dimension)
+  {
+    return event.get(dimension);
+  }
+
+  @Override
+  public Number getMetric(String metric)
+  {
+    return Rows.objectToNumber(metric, event.get(metric));
+  }
+
+  @Override
+  public String toString()
+  {
+    return "MapBasedRow{" +
+           "timestamp=" + timestamp +
+           ", event=" + event +
+           '}';
+  }
+
+  @Override
+  public boolean equals(Object o)
+  {
+    if (this == o) {
+      return true;
+    }
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+
+    MapBasedRow that = (MapBasedRow) o;
+
+    if (!event.equals(that.event)) {
+      return false;
+    }
+    if (!timestamp.equals(that.timestamp)) {
+      return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode()
+  {
+    int result = timestamp.hashCode();
+    result = 31 * result + event.hashCode();
+    return result;
+  }
+
+  @Override
+  public int compareTo(Row o)
+  {
+    return timestamp.compareTo(o.getTimestamp());
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/Row.java b/api/src/main/java/org/apache/druid/data/input/Row.java
new file mode 100644
index 00000000000..c757e0821d9
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/Row.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input;
+
+import com.fasterxml.jackson.annotation.JsonSubTypes;
+import com.fasterxml.jackson.annotation.JsonTypeInfo;
+import org.apache.druid.guice.annotations.PublicApi;
+import org.joda.time.DateTime;
+
+import java.util.List;
+
+/**
+ * A Row of data.  This can be used for both input and output into various parts of the system.  It assumes
+ * that the user already knows the schema of the row and can query for the parts that they care about.
+ */
+@PublicApi
+@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "version", defaultImpl = MapBasedRow.class)
+@JsonSubTypes(value = {
+    @JsonSubTypes.Type(name = "v1", value = MapBasedRow.class)
+})
+public interface Row extends Comparable<Row>
+{
+  /**
+   * Returns the timestamp from the epoch in milliseconds.  If the event happened _right now_, this would return the
+   * same thing as System.currentTimeMillis();
+   *
+   * @return the timestamp from the epoch in milliseconds.
+   */
+  long getTimestampFromEpoch();
+
+  /**
+   * Returns the timestamp from the epoch as an org.joda.time.DateTime.  If the event happened _right now_, this would return the
+   * same thing as new DateTime();
+   *
+   * @return the timestamp from the epoch as an org.joda.time.DateTime object.
+   */
+  DateTime getTimestamp();
+
+  /**
+   * Returns the list of dimension values for the given column name.
+   * <p/>
+   *
+   * @param dimension the column name of the dimension requested
+   *
+   * @return the list of values for the provided column name
+   */
+  List<String> getDimension(String dimension);
+
+  /**
+   * Returns the raw dimension value for the given column name. This is different from {@link #getDimension} which
+   * converts all values to strings before returning them.
+   *
+   * @param dimension the column name of the dimension requested
+   *
+   * @return the value of the provided column name
+   */
+  Object getRaw(String dimension);
+
+  /**
+   * Returns the metric column value for the given column name. This method is different from {@link #getRaw} in two
+   * aspects:
+   *  1. If the column is absent in the row, numeric zero is returned, rather than null.
+   *  2. If the column has string value, an attempt is made to parse this value as a number.
+   */
+  Number getMetric(String metric);
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/Rows.java b/api/src/main/java/org/apache/druid/data/input/Rows.java
new file mode 100644
index 00000000000..a1aedfff75c
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/Rows.java
@@ -0,0 +1,137 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSortedSet;
+import com.google.common.collect.Maps;
+import com.google.common.primitives.Longs;
+import org.apache.druid.common.config.NullHandling;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.java.util.common.parsers.ParseException;
+
+import javax.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ */
+public class Rows
+{
+  public static final Long LONG_ZERO = 0L;
+
+  /**
+   * @param timeStamp rollup up timestamp to be used to create group key
+   * @param inputRow  input row
+   *
+   * @return groupKey for the given input row
+   */
+  public static List<Object> toGroupKey(long timeStamp, InputRow inputRow)
+  {
+    final Map<String, Set<String>> dims = Maps.newTreeMap();
+    for (final String dim : inputRow.getDimensions()) {
+      final Set<String> dimValues = ImmutableSortedSet.copyOf(inputRow.getDimension(dim));
+      if (dimValues.size() > 0) {
+        dims.put(dim, dimValues);
+      }
+    }
+    return ImmutableList.of(
+        timeStamp,
+        dims
+    );
+  }
+
+  /**
+   * Convert an object to a list of strings.
+   */
+  public static List<String> objectToStrings(final Object inputValue)
+  {
+    if (inputValue == null) {
+      return Collections.emptyList();
+    } else if (inputValue instanceof List) {
+      // guava's toString function fails on null objects, so please do not use it
+      final List<Object> values = (List) inputValue;
+
+      final List<String> retVal = new ArrayList<>(values.size());
+      for (Object val : values) {
+        retVal.add(String.valueOf(val));
+      }
+
+      return retVal;
+    } else {
+      return Collections.singletonList(String.valueOf(inputValue));
+    }
+  }
+
+  /**
+   * Convert an object to a number. Nulls are treated as zeroes.
+   *
+   * @param name       field name of the object being converted (may be used for exception messages)
+   * @param inputValue the actual object being converted
+   *
+   * @return a number
+   *
+   * @throws NullPointerException if the string is null
+   * @throws ParseException       if the column cannot be converted to a number
+   */
+  @Nullable
+  public static Number objectToNumber(final String name, final Object inputValue)
+  {
+    if (inputValue == null) {
+      return NullHandling.defaultLongValue();
+    }
+
+    if (inputValue instanceof Number) {
+      return (Number) inputValue;
+    } else if (inputValue instanceof String) {
+      try {
+        String metricValueString = StringUtils.removeChar(((String) inputValue).trim(), ',');
+        // Longs.tryParse() doesn't support leading '+', so we need to trim it ourselves
+        metricValueString = trimLeadingPlusOfLongString(metricValueString);
+        Long v = Longs.tryParse(metricValueString);
+        // Do NOT use ternary operator here, because it makes Java to convert Long to Double
+        if (v != null) {
+          return v;
+        } else {
+          return Double.valueOf(metricValueString);
+        }
+      }
+      catch (Exception e) {
+        throw new ParseException(e, "Unable to parse value[%s] for field[%s]", inputValue, name);
+      }
+    } else {
+      throw new ParseException("Unknown type[%s] for field", inputValue.getClass(), inputValue);
+    }
+  }
+
+  private static String trimLeadingPlusOfLongString(String metricValueString)
+  {
+    if (metricValueString.length() > 1 && metricValueString.charAt(0) == '+') {
+      char secondChar = metricValueString.charAt(1);
+      if (secondChar >= '0' && secondChar <= '9') {
+        metricValueString = metricValueString.substring(1);
+      }
+    }
+    return metricValueString;
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/AbstractTextFilesFirehoseFactory.java b/api/src/main/java/org/apache/druid/data/input/impl/AbstractTextFilesFirehoseFactory.java
new file mode 100644
index 00000000000..c40ff2feec5
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/AbstractTextFilesFirehoseFactory.java
@@ -0,0 +1,147 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl;
+
+import com.google.common.base.Preconditions;
+import com.google.common.base.Throwables;
+import com.google.common.collect.ImmutableList;
+import org.apache.druid.data.input.FiniteFirehoseFactory;
+import org.apache.druid.data.input.Firehose;
+import org.apache.druid.data.input.InputSplit;
+import org.apache.druid.java.util.common.logger.Logger;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.io.LineIterator;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.NoSuchElementException;
+import java.util.stream.Stream;
+
+/**
+ * This is an abstract class for firehose factory for making firehoses reading text files.
+ * It provides an unified {@link #connect(StringInputRowParser, File)} implementation for its subclasses.
+ *
+ * @param <T> object type representing input data
+ */
+public abstract class AbstractTextFilesFirehoseFactory<T>
+    implements FiniteFirehoseFactory<StringInputRowParser, T>
+{
+  private static final Logger LOG = new Logger(AbstractTextFilesFirehoseFactory.class);
+
+  private List<T> objects;
+
+  @Override
+  public Firehose connect(StringInputRowParser firehoseParser, File temporaryDirectory) throws IOException
+  {
+    initializeObjectsIfNeeded();
+    final Iterator<T> iterator = objects.iterator();
+    return new FileIteratingFirehose(
+        new Iterator<LineIterator>()
+        {
+          @Override
+          public boolean hasNext()
+          {
+            return iterator.hasNext();
+          }
+
+          @Override
+          public LineIterator next()
+          {
+            if (!hasNext()) {
+              throw new NoSuchElementException();
+            }
+            final T object = iterator.next();
+            try {
+              return IOUtils.lineIterator(wrapObjectStream(object, openObjectStream(object)), StandardCharsets.UTF_8);
+            }
+            catch (Exception e) {
+              LOG.error(
+                  e,
+                  "Exception reading object[%s]",
+                  object
+              );
+              throw Throwables.propagate(e);
+            }
+          }
+        },
+        firehoseParser
+    );
+  }
+
+  protected void initializeObjectsIfNeeded() throws IOException
+  {
+    if (objects == null) {
+      objects = ImmutableList.copyOf(Preconditions.checkNotNull(initObjects(), "initObjects"));
+    }
+  }
+
+  public List<T> getObjects()
+  {
+    return objects;
+  }
+
+  @Override
+  public Stream<InputSplit<T>> getSplits() throws IOException
+  {
+    initializeObjectsIfNeeded();
+    return getObjects().stream().map(InputSplit::new);
+  }
+
+  @Override
+  public int getNumSplits() throws IOException
+  {
+    initializeObjectsIfNeeded();
+    return getObjects().size();
+  }
+
+  /**
+   * Initialize objects to be read by this firehose.  Since firehose factories are constructed whenever
+   * org.apache.druid.indexing.common.task.Task objects are deserialized, actual initialization of objects is deferred
+   * until {@link #connect(StringInputRowParser, File)} is called.
+   *
+   * @return a collection of initialized objects.
+   */
+  protected abstract Collection<T> initObjects() throws IOException;
+
+  /**
+   * Open an input stream from the given object.  If the object is compressed, this method should return a byte stream
+   * as it is compressed.  The object compression should be handled in {@link #wrapObjectStream(Object, InputStream)}.
+   *
+   * @param object an object to be read
+   *
+   * @return an input stream for the object
+   */
+  protected abstract InputStream openObjectStream(T object) throws IOException;
+
+  /**
+   * Wrap the given input stream if needed.  The decompression logic should be applied to the given stream if the object
+   * is compressed.
+   *
+   * @param object an input object
+   * @param stream a stream for the object
+   * @return an wrapped input stream
+   */
+  protected abstract InputStream wrapObjectStream(T object, InputStream stream) throws IOException;
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/CSVParseSpec.java b/api/src/main/java/org/apache/druid/data/input/impl/CSVParseSpec.java
new file mode 100644
index 00000000000..61eca2e091e
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/CSVParseSpec.java
@@ -0,0 +1,131 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.common.base.Preconditions;
+import org.apache.druid.java.util.common.parsers.CSVParser;
+import org.apache.druid.java.util.common.parsers.Parser;
+
+import java.util.List;
+
+/**
+ */
+public class CSVParseSpec extends ParseSpec
+{
+  private final String listDelimiter;
+  private final List<String> columns;
+  private final boolean hasHeaderRow;
+  private final int skipHeaderRows;
+
+  @JsonCreator
+  public CSVParseSpec(
+      @JsonProperty("timestampSpec") TimestampSpec timestampSpec,
+      @JsonProperty("dimensionsSpec") DimensionsSpec dimensionsSpec,
+      @JsonProperty("listDelimiter") String listDelimiter,
+      @JsonProperty("columns") List<String> columns,
+      @JsonProperty("hasHeaderRow") boolean hasHeaderRow,
+      @JsonProperty("skipHeaderRows") int skipHeaderRows
+  )
+  {
+    super(timestampSpec, dimensionsSpec);
+
+    this.listDelimiter = listDelimiter;
+    this.columns = columns;
+    this.hasHeaderRow = hasHeaderRow;
+    this.skipHeaderRows = skipHeaderRows;
+
+    if (columns != null) {
+      for (String column : columns) {
+        Preconditions.checkArgument(!column.contains(","), "Column[%s] has a comma, it cannot", column);
+      }
+      verify(dimensionsSpec.getDimensionNames());
+    } else {
+      Preconditions.checkArgument(
+          hasHeaderRow,
+          "If columns field is not set, the first row of your data must have your header"
+          + " and hasHeaderRow must be set to true."
+      );
+    }
+  }
+
+  @Deprecated
+  public CSVParseSpec(
+      TimestampSpec timestampSpec,
+      DimensionsSpec dimensionsSpec,
+      String listDelimiter,
+      List<String> columns
+  )
+  {
+    this(timestampSpec, dimensionsSpec, listDelimiter, columns, false, 0);
+  }
+
+  @JsonProperty
+  public String getListDelimiter()
+  {
+    return listDelimiter;
+  }
+
+  @JsonProperty("columns")
+  public List<String> getColumns()
+  {
+    return columns;
+  }
+
+  @JsonProperty
+  public boolean isHasHeaderRow()
+  {
+    return hasHeaderRow;
+  }
+
+  @JsonProperty("skipHeaderRows")
+  public int getSkipHeaderRows()
+  {
+    return skipHeaderRows;
+  }
+
+  @Override
+  public void verify(List<String> usedCols)
+  {
+    for (String columnName : usedCols) {
+      Preconditions.checkArgument(columns.contains(columnName), "column[%s] not in columns.", columnName);
+    }
+  }
+
+  @Override
+  public Parser<String, Object> makeParser()
+  {
+    return new CSVParser(listDelimiter, columns, hasHeaderRow, skipHeaderRows);
+  }
+
+  @Override
+  public ParseSpec withTimestampSpec(TimestampSpec spec)
+  {
+    return new CSVParseSpec(spec, getDimensionsSpec(), listDelimiter, columns, hasHeaderRow, skipHeaderRows);
+  }
+
+  @Override
+  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
+  {
+    return new CSVParseSpec(getTimestampSpec(), spec, listDelimiter, columns, hasHeaderRow, skipHeaderRows);
+  }
+
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/DelimitedParseSpec.java b/api/src/main/java/org/apache/druid/data/input/impl/DelimitedParseSpec.java
new file mode 100644
index 00000000000..f88b1326002
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/DelimitedParseSpec.java
@@ -0,0 +1,163 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.common.base.Preconditions;
+import org.apache.druid.java.util.common.parsers.DelimitedParser;
+import org.apache.druid.java.util.common.parsers.Parser;
+
+import java.util.List;
+
+/**
+ */
+public class DelimitedParseSpec extends ParseSpec
+{
+  private final String delimiter;
+  private final String listDelimiter;
+  private final List<String> columns;
+  private final boolean hasHeaderRow;
+  private final int skipHeaderRows;
+
+  @JsonCreator
+  public DelimitedParseSpec(
+      @JsonProperty("timestampSpec") TimestampSpec timestampSpec,
+      @JsonProperty("dimensionsSpec") DimensionsSpec dimensionsSpec,
+      @JsonProperty("delimiter") String delimiter,
+      @JsonProperty("listDelimiter") String listDelimiter,
+      @JsonProperty("columns") List<String> columns,
+      @JsonProperty("hasHeaderRow") boolean hasHeaderRow,
+      @JsonProperty("skipHeaderRows") int skipHeaderRows
+  )
+  {
+    super(timestampSpec, dimensionsSpec);
+
+    this.delimiter = delimiter;
+    this.listDelimiter = listDelimiter;
+    this.columns = columns;
+    this.hasHeaderRow = hasHeaderRow;
+    this.skipHeaderRows = skipHeaderRows;
+
+    if (columns != null) {
+      for (String column : this.columns) {
+        Preconditions.checkArgument(!column.contains(","), "Column[%s] has a comma, it cannot", column);
+      }
+      verify(dimensionsSpec.getDimensionNames());
+    } else {
+      Preconditions.checkArgument(
+          hasHeaderRow,
+          "If columns field is not set, the first row of your data must have your header"
+          + " and hasHeaderRow must be set to true."
+      );
+    }
+  }
+
+  @Deprecated
+  public DelimitedParseSpec(
+      TimestampSpec timestampSpec,
+      DimensionsSpec dimensionsSpec,
+      String delimiter,
+      String listDelimiter,
+      List<String> columns
+  )
+  {
+    this(timestampSpec, dimensionsSpec, delimiter, listDelimiter, columns, false, 0);
+  }
+
+  @JsonProperty("delimiter")
+  public String getDelimiter()
+  {
+    return delimiter;
+  }
+
+  @JsonProperty("listDelimiter")
+  public String getListDelimiter()
+  {
+    return listDelimiter;
+  }
+
+  @JsonProperty("columns")
+  public List<String> getColumns()
+  {
+    return columns;
+  }
+
+  @JsonProperty
+  public boolean isHasHeaderRow()
+  {
+    return hasHeaderRow;
+  }
+
+  @JsonProperty("skipHeaderRows")
+  public int getSkipHeaderRows()
+  {
+    return skipHeaderRows;
+  }
+
+  @Override
+  public void verify(List<String> usedCols)
+  {
+    for (String columnName : usedCols) {
+      Preconditions.checkArgument(columns.contains(columnName), "column[%s] not in columns.", columnName);
+    }
+  }
+
+  @Override
+  public Parser<String, Object> makeParser()
+  {
+    return new DelimitedParser(
+        delimiter,
+        listDelimiter,
+        columns,
+        hasHeaderRow,
+        skipHeaderRows
+    );
+  }
+
+  @Override
+  public ParseSpec withTimestampSpec(TimestampSpec spec)
+  {
+    return new DelimitedParseSpec(
+        spec,
+        getDimensionsSpec(),
+        delimiter,
+        listDelimiter,
+        columns,
+        hasHeaderRow,
+        skipHeaderRows
+    );
+  }
+
+  @Override
+  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
+  {
+    return new DelimitedParseSpec(
+        getTimestampSpec(),
+        spec,
+        delimiter,
+        listDelimiter,
+        columns,
+        hasHeaderRow,
+        skipHeaderRows
+    );
+  }
+
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/DimensionSchema.java b/api/src/main/java/org/apache/druid/data/input/impl/DimensionSchema.java
new file mode 100644
index 00000000000..66461777ce3
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/DimensionSchema.java
@@ -0,0 +1,195 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonSubTypes;
+import com.fasterxml.jackson.annotation.JsonTypeInfo;
+import com.fasterxml.jackson.annotation.JsonValue;
+import com.google.common.base.Strings;
+import org.apache.druid.guice.annotations.PublicApi;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.java.util.emitter.EmittingLogger;
+
+import java.util.Objects;
+
+/**
+ */
+@PublicApi
+@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = StringDimensionSchema.class)
+@JsonSubTypes(value = {
+    @JsonSubTypes.Type(name = DimensionSchema.STRING_TYPE_NAME, value = StringDimensionSchema.class),
+    @JsonSubTypes.Type(name = DimensionSchema.LONG_TYPE_NAME, value = LongDimensionSchema.class),
+    @JsonSubTypes.Type(name = DimensionSchema.FLOAT_TYPE_NAME, value = FloatDimensionSchema.class),
+    @JsonSubTypes.Type(name = DimensionSchema.DOUBLE_TYPE_NAME, value = DoubleDimensionSchema.class),
+    @JsonSubTypes.Type(name = DimensionSchema.SPATIAL_TYPE_NAME, value = NewSpatialDimensionSchema.class),
+})
+public abstract class DimensionSchema
+{
+  public static final String STRING_TYPE_NAME = "string";
+  public static final String LONG_TYPE_NAME = "long";
+  public static final String FLOAT_TYPE_NAME = "float";
+  public static final String SPATIAL_TYPE_NAME = "spatial";
+  public static final String DOUBLE_TYPE_NAME = "double";
+  private static final EmittingLogger log = new EmittingLogger(DimensionSchema.class);
+
+
+  // main druid and druid-api should really use the same ValueType enum.
+  // merge them when druid-api is merged back into the main repo
+
+  /**
+   * Should be the same as {@code org.apache.druid.segment.column.ValueType}.
+   * TODO merge them when druid-api is merged back into the main repo
+   */
+  public enum ValueType
+  {
+    FLOAT,
+    LONG,
+    STRING,
+    DOUBLE,
+    @SuppressWarnings("unused") // used in org.apache.druid.segment.column.ValueType
+    COMPLEX;
+
+    @JsonValue
+    @Override
+    public String toString()
+    {
+      return StringUtils.toUpperCase(this.name());
+    }
+
+    @JsonCreator
+    public static ValueType fromString(String name)
+    {
+      return valueOf(StringUtils.toUpperCase(name));
+    }
+  }
+
+  public enum MultiValueHandling
+  {
+    SORTED_ARRAY,
+    SORTED_SET,
+    ARRAY {
+      @Override
+      public boolean needSorting()
+      {
+        return false;
+      }
+    };
+
+    public boolean needSorting()
+    {
+      return true;
+    }
+
+    @Override
+    @JsonValue
+    public String toString()
+    {
+      return StringUtils.toUpperCase(name());
+    }
+
+    @JsonCreator
+    public static MultiValueHandling fromString(String name)
+    {
+      return name == null ? ofDefault() : valueOf(StringUtils.toUpperCase(name));
+    }
+
+    // this can be system configuration
+    public static MultiValueHandling ofDefault()
+    {
+      return SORTED_ARRAY;
+    }
+  }
+
+  private final String name;
+  private final MultiValueHandling multiValueHandling;
+  private final boolean createBitmapIndex;
+
+  protected DimensionSchema(String name, MultiValueHandling multiValueHandling, boolean createBitmapIndex)
+  {
+    if (Strings.isNullOrEmpty(name)) {
+      log.warn("Null or Empty Dimension found");
+    }
+    this.name = name;
+    this.multiValueHandling = multiValueHandling == null ? MultiValueHandling.ofDefault() : multiValueHandling;
+    this.createBitmapIndex = createBitmapIndex;
+  }
+
+  @JsonProperty
+  public String getName()
+  {
+    return name;
+  }
+
+  @JsonProperty
+  public MultiValueHandling getMultiValueHandling()
+  {
+    return multiValueHandling;
+  }
+
+  @JsonProperty("createBitmapIndex")
+  public boolean hasBitmapIndex()
+  {
+    return createBitmapIndex;
+  }
+
+  @JsonIgnore
+  public abstract String getTypeName();
+
+  @JsonIgnore
+  public abstract ValueType getValueType();
+
+  @Override
+  public boolean equals(final Object o)
+  {
+    if (this == o) {
+      return true;
+    }
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+    final DimensionSchema that = (DimensionSchema) o;
+    return createBitmapIndex == that.createBitmapIndex &&
+           Objects.equals(name, that.name) &&
+           Objects.equals(getTypeName(), that.getTypeName()) &&
+           Objects.equals(getValueType(), that.getValueType()) &&
+           multiValueHandling == that.multiValueHandling;
+  }
+
+  @Override
+  public int hashCode()
+  {
+    return Objects.hash(name, multiValueHandling, createBitmapIndex, getTypeName(), getValueType());
+  }
+
+  @Override
+  public String toString()
+  {
+    return "DimensionSchema{" +
+           "name='" + name + '\'' +
+           ", valueType=" + getValueType() +
+           ", typeName=" + getTypeName() +
+           ", multiValueHandling=" + multiValueHandling +
+           ", createBitmapIndex=" + createBitmapIndex +
+           '}';
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/DimensionsSpec.java b/api/src/main/java/org/apache/druid/data/input/impl/DimensionsSpec.java
new file mode 100644
index 00000000000..3073976e0fd
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/DimensionsSpec.java
@@ -0,0 +1,256 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.common.base.Function;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+import org.apache.druid.guice.annotations.PublicApi;
+import org.apache.druid.java.util.common.parsers.ParserUtils;
+
+import javax.annotation.Nullable;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+@PublicApi
+public class DimensionsSpec
+{
+  private final List<DimensionSchema> dimensions;
+  private final Set<String> dimensionExclusions;
+  private final Map<String, DimensionSchema> dimensionSchemaMap;
+
+  public static final DimensionsSpec EMPTY = new DimensionsSpec(null, null, null);
+
+  public static List<DimensionSchema> getDefaultSchemas(List<String> dimNames)
+  {
+    return getDefaultSchemas(dimNames, DimensionSchema.MultiValueHandling.ofDefault());
+  }
+
+  public static List<DimensionSchema> getDefaultSchemas(
+      final List<String> dimNames,
+      final DimensionSchema.MultiValueHandling multiValueHandling
+  )
+  {
+    return dimNames.stream()
+                   .map(input -> new StringDimensionSchema(input, multiValueHandling, true))
+                   .collect(Collectors.toList());
+  }
+
+  public static DimensionSchema convertSpatialSchema(SpatialDimensionSchema spatialSchema)
+  {
+    return new NewSpatialDimensionSchema(spatialSchema.getDimName(), spatialSchema.getDims());
+  }
+
+  @JsonCreator
+  public DimensionsSpec(
+      @JsonProperty("dimensions") List<DimensionSchema> dimensions,
+      @JsonProperty("dimensionExclusions") List<String> dimensionExclusions,
+      @Deprecated @JsonProperty("spatialDimensions") List<SpatialDimensionSchema> spatialDimensions
+  )
+  {
+    this.dimensions = dimensions == null
+                      ? Lists.newArrayList()
+                      : Lists.newArrayList(dimensions);
+
+    this.dimensionExclusions = (dimensionExclusions == null)
+                               ? Sets.newHashSet()
+                               : Sets.newHashSet(dimensionExclusions);
+
+    List<SpatialDimensionSchema> spatialDims = (spatialDimensions == null)
+                                               ? Lists.newArrayList()
+                                               : spatialDimensions;
+
+    verify(spatialDims);
+
+    // Map for easy dimension name-based schema lookup
+    this.dimensionSchemaMap = new HashMap<>();
+    for (DimensionSchema schema : this.dimensions) {
+      dimensionSchemaMap.put(schema.getName(), schema);
+    }
+
+    for (SpatialDimensionSchema spatialSchema : spatialDims) {
+      DimensionSchema newSchema = DimensionsSpec.convertSpatialSchema(spatialSchema);
+      this.dimensions.add(newSchema);
+      dimensionSchemaMap.put(newSchema.getName(), newSchema);
+    }
+  }
+
+  public DimensionsSpec(List<DimensionSchema> dimensions)
+  {
+    this(dimensions, null, null);
+  }
+
+  @JsonProperty
+  public List<DimensionSchema> getDimensions()
+  {
+    return dimensions;
+  }
+
+  @JsonProperty
+  public Set<String> getDimensionExclusions()
+  {
+    return dimensionExclusions;
+  }
+
+  @Deprecated
+  @JsonIgnore
+  public List<SpatialDimensionSchema> getSpatialDimensions()
+  {
+    Iterable<NewSpatialDimensionSchema> filteredList = Iterables.filter(
+        dimensions, NewSpatialDimensionSchema.class
+    );
+
+    Iterable<SpatialDimensionSchema> transformedList = Iterables.transform(
+        filteredList,
+        new Function<NewSpatialDimensionSchema, SpatialDimensionSchema>()
+        {
+          @Nullable
+          @Override
+          public SpatialDimensionSchema apply(NewSpatialDimensionSchema input)
+          {
+            return new SpatialDimensionSchema(input.getName(), input.getDims());
+          }
+        }
+    );
+
+    return Lists.newArrayList(transformedList);
+  }
+
+
+  @JsonIgnore
+  public List<String> getDimensionNames()
+  {
+    return Lists.transform(
+        dimensions,
+        new Function<DimensionSchema, String>()
+        {
+          @Override
+          public String apply(DimensionSchema input)
+          {
+            return input.getName();
+          }
+        }
+    );
+  }
+
+  @PublicApi
+  public DimensionSchema getSchema(String dimension)
+  {
+    return dimensionSchemaMap.get(dimension);
+  }
+
+  public boolean hasCustomDimensions()
+  {
+    return !(dimensions == null || dimensions.isEmpty());
+  }
+
+  @PublicApi
+  public DimensionsSpec withDimensions(List<DimensionSchema> dims)
+  {
+    return new DimensionsSpec(dims, ImmutableList.copyOf(dimensionExclusions), null);
+  }
+
+  public DimensionsSpec withDimensionExclusions(Set<String> dimExs)
+  {
+    return new DimensionsSpec(
+        dimensions,
+        ImmutableList.copyOf(Sets.union(dimensionExclusions, dimExs)),
+        null
+    );
+  }
+
+  @Deprecated
+  public DimensionsSpec withSpatialDimensions(List<SpatialDimensionSchema> spatials)
+  {
+    return new DimensionsSpec(dimensions, ImmutableList.copyOf(dimensionExclusions), spatials);
+  }
+
+  private void verify(List<SpatialDimensionSchema> spatialDimensions)
+  {
+    List<String> dimNames = getDimensionNames();
+    Preconditions.checkArgument(
+        Sets.intersection(this.dimensionExclusions, Sets.newHashSet(dimNames)).isEmpty(),
+        "dimensions and dimensions exclusions cannot overlap"
+    );
+
+    ParserUtils.validateFields(dimNames);
+    ParserUtils.validateFields(dimensionExclusions);
+
+    List<String> spatialDimNames = Lists.transform(
+        spatialDimensions,
+        new Function<SpatialDimensionSchema, String>()
+        {
+          @Override
+          public String apply(SpatialDimensionSchema input)
+          {
+            return input.getDimName();
+          }
+        }
+    );
+
+    // Don't allow duplicates between main list and deprecated spatial list
+    ParserUtils.validateFields(Iterables.concat(dimNames, spatialDimNames));
+  }
+
+  @Override
+  public boolean equals(Object o)
+  {
+    if (this == o) {
+      return true;
+    }
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+
+    DimensionsSpec that = (DimensionsSpec) o;
+
+    if (!dimensions.equals(that.dimensions)) {
+      return false;
+    }
+
+    return dimensionExclusions.equals(that.dimensionExclusions);
+  }
+
+  @Override
+  public int hashCode()
+  {
+    int result = dimensions.hashCode();
+    result = 31 * result + dimensionExclusions.hashCode();
+    return result;
+  }
+
+  @Override
+  public String toString()
+  {
+    return "DimensionsSpec{" +
+           "dimensions=" + dimensions +
+           ", dimensionExclusions=" + dimensionExclusions +
+           '}';
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/DoubleDimensionSchema.java b/api/src/main/java/org/apache/druid/data/input/impl/DoubleDimensionSchema.java
new file mode 100644
index 00000000000..e00e86ab100
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/DoubleDimensionSchema.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class DoubleDimensionSchema extends DimensionSchema
+{
+  @JsonCreator
+  public DoubleDimensionSchema(@JsonProperty("name") String name)
+  {
+    super(name, null, false);
+  }
+
+  @Override
+  public String getTypeName()
+  {
+    return DimensionSchema.DOUBLE_TYPE_NAME;
+  }
+
+  @Override
+  public ValueType getValueType()
+  {
+    return ValueType.DOUBLE;
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/FileIteratingFirehose.java b/api/src/main/java/org/apache/druid/data/input/impl/FileIteratingFirehose.java
new file mode 100644
index 00000000000..a546f361f33
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/FileIteratingFirehose.java
@@ -0,0 +1,124 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl;
+
+import org.apache.druid.data.input.Firehose;
+import org.apache.druid.data.input.InputRow;
+import org.apache.druid.utils.Runnables;
+import org.apache.commons.io.LineIterator;
+
+import javax.annotation.Nullable;
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+
+/**
+ */
+public class FileIteratingFirehose implements Firehose
+{
+  private final Iterator<LineIterator> lineIterators;
+  private final StringInputRowParser parser;
+
+  private LineIterator lineIterator = null;
+
+  private final Closeable closer;
+
+  public FileIteratingFirehose(
+      Iterator<LineIterator> lineIterators,
+      StringInputRowParser parser
+  )
+  {
+    this(lineIterators, parser, null);
+  }
+
+  public FileIteratingFirehose(
+      Iterator<LineIterator> lineIterators,
+      StringInputRowParser parser,
+      Closeable closer
+  )
+  {
+    this.lineIterators = lineIterators;
+    this.parser = parser;
+    this.closer = closer;
+  }
+
+  @Override
+  public boolean hasMore()
+  {
+    while ((lineIterator == null || !lineIterator.hasNext()) && lineIterators.hasNext()) {
+      lineIterator = getNextLineIterator();
+    }
+
+    return lineIterator != null && lineIterator.hasNext();
+  }
+
+  @Nullable
+  @Override
+  public InputRow nextRow()
+  {
+    if (!hasMore()) {
+      throw new NoSuchElementException();
+    }
+
+    return parser.parse(lineIterator.next());
+  }
+
+  private LineIterator getNextLineIterator()
+  {
+    if (lineIterator != null) {
+      lineIterator.close();
+    }
+
+    final LineIterator iterator = lineIterators.next();
+    parser.startFileFromBeginning();
+    return iterator;
+  }
+
+  @Override
+  public Runnable commit()
+  {
+    return Runnables.getNoopRunnable();
+  }
+
+  @Override
+  public void close() throws IOException
+  {
+    try {
+      if (lineIterator != null) {
+        lineIterator.close();
+      }
+    }
+    catch (Throwable t) {
+      try {
+        if (closer != null) {
+          closer.close();
+        }
+      }
+      catch (Exception e) {
+        t.addSuppressed(e);
+      }
+      throw t;
+    }
+    if (closer != null) {
+      closer.close();
+    }
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/FloatDimensionSchema.java b/api/src/main/java/org/apache/druid/data/input/impl/FloatDimensionSchema.java
new file mode 100644
index 00000000000..86bb1cd24bc
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/FloatDimensionSchema.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class FloatDimensionSchema extends DimensionSchema
+{
+  @JsonCreator
+  public FloatDimensionSchema(
+      @JsonProperty("name") String name
+  )
+  {
+    super(name, null, false);
+  }
+
+  @Override
+  public String getTypeName()
+  {
+    return DimensionSchema.FLOAT_TYPE_NAME;
+  }
+
+  @Override
+  @JsonIgnore
+  public ValueType getValueType()
+  {
+    return ValueType.FLOAT;
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/InputRowParser.java b/api/src/main/java/org/apache/druid/data/input/impl/InputRowParser.java
new file mode 100644
index 00000000000..52a0ac48c7a
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/InputRowParser.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl;
+
+import com.fasterxml.jackson.annotation.JsonSubTypes;
+import com.fasterxml.jackson.annotation.JsonTypeInfo;
+import org.apache.druid.data.input.InputRow;
+import org.apache.druid.guice.annotations.ExtensionPoint;
+import org.apache.druid.java.util.common.collect.Utils;
+
+import javax.annotation.Nullable;
+import javax.validation.constraints.NotNull;
+import java.util.List;
+
+@ExtensionPoint
+@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = StringInputRowParser.class)
+@JsonSubTypes(value = {
+    @JsonSubTypes.Type(name = "string", value = StringInputRowParser.class),
+    @JsonSubTypes.Type(name = "map", value = MapInputRowParser.class),
+    @JsonSubTypes.Type(name = "noop", value = NoopInputRowParser.class)
+})
+public interface InputRowParser<T>
+{
+  /**
+   * Parse an input into list of {@link InputRow}. List can contains null for rows that should be thrown away,
+   * or throws {@code ParseException} if the input is unparseable. This method should never return null otherwise
+   * lots of things will break.
+   */
+  @NotNull
+  default List<InputRow> parseBatch(T input)
+  {
+    return Utils.nullableListOf(parse(input));
+  }
+
+  /**
+   * Parse an input into an {@link InputRow}. Return null if this input should be thrown away, or throws
+   * {@code ParseException} if the input is unparseable.
+   */
+  @Deprecated
+  @Nullable
+  default InputRow parse(T input)
+  {
+    return null;
+  }
+
+  ParseSpec getParseSpec();
+
+  InputRowParser withParseSpec(ParseSpec parseSpec);
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/JSONLowercaseParseSpec.java b/api/src/main/java/org/apache/druid/data/input/impl/JSONLowercaseParseSpec.java
new file mode 100644
index 00000000000..c555f8feec8
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/JSONLowercaseParseSpec.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.druid.java.util.common.parsers.JSONToLowerParser;
+import org.apache.druid.java.util.common.parsers.Parser;
+
+import java.util.List;
+
+/**
+ * This class is only here for backwards compatibility
+ */
+@Deprecated
+public class JSONLowercaseParseSpec extends ParseSpec
+{
+  private final ObjectMapper objectMapper;
+
+  @JsonCreator
+  public JSONLowercaseParseSpec(
+      @JsonProperty("timestampSpec") TimestampSpec timestampSpec,
+      @JsonProperty("dimensionsSpec") DimensionsSpec dimensionsSpec
+  )
+  {
+    super(timestampSpec, dimensionsSpec);
+    this.objectMapper = new ObjectMapper();
+  }
+
+  @Override
+  public void verify(List<String> usedCols)
+  {
+  }
+
+  @Override
+  public Parser<String, Object> makeParser()
+  {
+    return new JSONToLowerParser(objectMapper, null, null);
+  }
+
+  @Override
+  public ParseSpec withTimestampSpec(TimestampSpec spec)
+  {
+    return new JSONLowercaseParseSpec(spec, getDimensionsSpec());
+  }
+
+  @Override
+  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
+  {
+    return new JSONLowercaseParseSpec(getTimestampSpec(), spec);
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/JSONParseSpec.java b/api/src/main/java/org/apache/druid/data/input/impl/JSONParseSpec.java
new file mode 100644
index 00000000000..d72c02e688e
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/JSONParseSpec.java
@@ -0,0 +1,135 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.core.JsonParser.Feature;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.druid.java.util.common.parsers.JSONPathParser;
+import org.apache.druid.java.util.common.parsers.JSONPathSpec;
+import org.apache.druid.java.util.common.parsers.Parser;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+/**
+ */
+public class JSONParseSpec extends ParseSpec
+{
+  private final ObjectMapper objectMapper;
+  private final JSONPathSpec flattenSpec;
+  private final Map<String, Boolean> featureSpec;
+
+  @JsonCreator
+  public JSONParseSpec(
+      @JsonProperty("timestampSpec") TimestampSpec timestampSpec,
+      @JsonProperty("dimensionsSpec") DimensionsSpec dimensionsSpec,
+      @JsonProperty("flattenSpec") JSONPathSpec flattenSpec,
+      @JsonProperty("featureSpec") Map<String, Boolean> featureSpec
+  )
+  {
+    super(timestampSpec, dimensionsSpec);
+    this.objectMapper = new ObjectMapper();
+    this.flattenSpec = flattenSpec != null ? flattenSpec : JSONPathSpec.DEFAULT;
+    this.featureSpec = (featureSpec == null) ? new HashMap<String, Boolean>() : featureSpec;
+    for (Map.Entry<String, Boolean> entry : this.featureSpec.entrySet()) {
+      Feature feature = Feature.valueOf(entry.getKey());
+      objectMapper.configure(feature, entry.getValue());
+    }
+  }
+
+  @Deprecated
+  public JSONParseSpec(TimestampSpec ts, DimensionsSpec dims)
+  {
+    this(ts, dims, null, null);
+  }
+
+  @Override
+  public void verify(List<String> usedCols)
+  {
+  }
+
+  @Override
+  public Parser<String, Object> makeParser()
+  {
+    return new JSONPathParser(flattenSpec, objectMapper);
+  }
+
+  @Override
+  public ParseSpec withTimestampSpec(TimestampSpec spec)
+  {
+    return new JSONParseSpec(spec, getDimensionsSpec(), getFlattenSpec(), getFeatureSpec());
+  }
+
+  @Override
+  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
+  {
+    return new JSONParseSpec(getTimestampSpec(), spec, getFlattenSpec(), getFeatureSpec());
+  }
+
+  @JsonProperty
+  public JSONPathSpec getFlattenSpec()
+  {
+    return flattenSpec;
+  }
+
+  @JsonProperty
+  public Map<String, Boolean> getFeatureSpec()
+  {
+    return featureSpec;
+  }
+
+  @Override
+  public boolean equals(final Object o)
+  {
+    if (this == o) {
+      return true;
+    }
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+    if (!super.equals(o)) {
+      return false;
+    }
+    final JSONParseSpec that = (JSONParseSpec) o;
+    return Objects.equals(flattenSpec, that.flattenSpec) &&
+           Objects.equals(featureSpec, that.featureSpec);
+  }
+
+  @Override
+  public int hashCode()
+  {
+    return Objects.hash(super.hashCode(), flattenSpec, featureSpec);
+  }
+
+  @Override
+  public String toString()
+  {
+    return "JSONParseSpec{" +
+           "timestampSpec=" + getTimestampSpec() +
+           ", dimensionsSpec=" + getDimensionsSpec() +
+           ", flattenSpec=" + flattenSpec +
+           ", featureSpec=" + featureSpec +
+           '}';
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/JavaScriptParseSpec.java b/api/src/main/java/org/apache/druid/data/input/impl/JavaScriptParseSpec.java
new file mode 100644
index 00000000000..d12b978991d
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/JavaScriptParseSpec.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl;
+
+import com.fasterxml.jackson.annotation.JacksonInject;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.common.base.Preconditions;
+import org.apache.druid.java.util.common.parsers.JavaScriptParser;
+import org.apache.druid.java.util.common.parsers.Parser;
+import org.apache.druid.js.JavaScriptConfig;
+
+import java.util.List;
+
+/**
+ */
+public class JavaScriptParseSpec extends ParseSpec
+{
+  private final String function;
+  private final JavaScriptConfig config;
+
+  // This variable is lazily initialized to avoid unnecessary JavaScript compilation during JSON serde
+  private JavaScriptParser parser;
+
+  @JsonCreator
+  public JavaScriptParseSpec(
+      @JsonProperty("timestampSpec") TimestampSpec timestampSpec,
+      @JsonProperty("dimensionsSpec") DimensionsSpec dimensionsSpec,
+      @JsonProperty("function") String function,
+      @JacksonInject JavaScriptConfig config
+  )
+  {
+    super(timestampSpec, dimensionsSpec);
+
+    this.function = function;
+    this.config = config;
+  }
+
+  @JsonProperty("function")
+  public String getFunction()
+  {
+    return function;
+  }
+
+  @Override
+  public void verify(List<String> usedCols)
+  {
+  }
+
+  @Override
+  public Parser<String, Object> makeParser()
+  {
+    // JavaScript configuration should be checked when it's actually used because someone might still want Druid
+    // nodes to be able to deserialize JavaScript-based objects even though JavaScript is disabled.
+    Preconditions.checkState(config.isEnabled(), "JavaScript is disabled");
+    parser = parser == null ? new JavaScriptParser(function) : parser;
+    return parser;
+  }
+
+  @Override
+  public ParseSpec withTimestampSpec(TimestampSpec spec)
+  {
+    return new JavaScriptParseSpec(spec, getDimensionsSpec(), function, config);
+  }
+
+  @Override
+  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
+  {
+    return new JavaScriptParseSpec(getTimestampSpec(), spec, function, config);
+  }
+
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/LongDimensionSchema.java b/api/src/main/java/org/apache/druid/data/input/impl/LongDimensionSchema.java
new file mode 100644
index 00000000000..a342013dc36
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/LongDimensionSchema.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class LongDimensionSchema extends DimensionSchema
+{
+  @JsonCreator
+  public LongDimensionSchema(
+      @JsonProperty("name") String name
+  )
+  {
+    super(name, null, false);
+  }
+
+  @Override
+  public String getTypeName()
+  {
+    return DimensionSchema.LONG_TYPE_NAME;
+  }
+
+  @Override
+  @JsonIgnore
+  public ValueType getValueType()
+  {
+    return ValueType.LONG;
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/MapInputRowParser.java b/api/src/main/java/org/apache/druid/data/input/impl/MapInputRowParser.java
new file mode 100644
index 00000000000..133e574f567
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/MapInputRowParser.java
@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+import org.apache.druid.data.input.InputRow;
+import org.apache.druid.data.input.MapBasedInputRow;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.java.util.common.parsers.ParseException;
+import org.joda.time.DateTime;
+
+import java.util.List;
+import java.util.Map;
+
+public class MapInputRowParser implements InputRowParser<Map<String, Object>>
+{
+  private final ParseSpec parseSpec;
+
+  @JsonCreator
+  public MapInputRowParser(
+      @JsonProperty("parseSpec") ParseSpec parseSpec
+  )
+  {
+    this.parseSpec = parseSpec;
+  }
+
+  @Override
+  public List<InputRow> parseBatch(Map<String, Object> theMap)
+  {
+    final List<String> dimensions = parseSpec.getDimensionsSpec().hasCustomDimensions()
+                                    ? parseSpec.getDimensionsSpec().getDimensionNames()
+                                    : Lists.newArrayList(
+                                        Sets.difference(
+                                            theMap.keySet(),
+                                            parseSpec.getDimensionsSpec()
+                                                     .getDimensionExclusions()
+                                        )
+                                    );
+
+    final DateTime timestamp;
+    try {
+      timestamp = parseSpec.getTimestampSpec().extractTimestamp(theMap);
+      if (timestamp == null) {
+        final String input = theMap.toString();
+        throw new NullPointerException(
+            StringUtils.format(
+                "Null timestamp in input: %s",
+                input.length() < 100 ? input : input.substring(0, 100) + "..."
+            )
+        );
+      }
+    }
+    catch (Exception e) {
+      throw new ParseException(e, "Unparseable timestamp found! Event: %s", theMap);
+    }
+
+    return ImmutableList.of(new MapBasedInputRow(timestamp.getMillis(), dimensions, theMap));
+  }
+
+  @JsonProperty
+  @Override
+  public ParseSpec getParseSpec()
+  {
+    return parseSpec;
+  }
+
+  @Override
+  public InputRowParser withParseSpec(ParseSpec parseSpec)
+  {
+    return new MapInputRowParser(parseSpec);
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/NewSpatialDimensionSchema.java b/api/src/main/java/org/apache/druid/data/input/impl/NewSpatialDimensionSchema.java
new file mode 100644
index 00000000000..181d26fceb0
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/NewSpatialDimensionSchema.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import java.util.List;
+
+/**
+ * NOTE: 
+ * This class should be deprecated after Druid supports configurable index types on dimensions.
+ * When that exists, this should be the implementation: https://github.com/apache/incubator-druid/issues/2622
+ * 
+ * This is a stop-gap solution to consolidate the dimension specs and remove the separate spatial 
+ * section in DimensionsSpec.
+ */
+public class NewSpatialDimensionSchema extends DimensionSchema
+{
+  private final List<String> dims;
+
+  @JsonCreator
+  public NewSpatialDimensionSchema(
+      @JsonProperty("name") String name,
+      @JsonProperty("dims") List<String> dims
+  )
+  {
+    super(name, null, true);
+    this.dims = dims;
+  }
+
+  @JsonProperty
+  public List<String> getDims()
+  {
+    return dims;
+  }
+
+  @Override
+  public String getTypeName()
+  {
+    return DimensionSchema.SPATIAL_TYPE_NAME;
+  }
+
+  @Override
+  @JsonIgnore
+  public ValueType getValueType()
+  {
+    return ValueType.STRING;
+  }
+
+  @Override
+  public boolean equals(Object o)
+  {
+    if (this == o) {
+      return true;
+    }
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+
+    NewSpatialDimensionSchema that = (NewSpatialDimensionSchema) o;
+
+    return dims != null ? dims.equals(that.dims) : that.dims == null;
+
+  }
+
+  @Override
+  public int hashCode()
+  {
+    return dims != null ? dims.hashCode() : 0;
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/NoopInputRowParser.java b/api/src/main/java/org/apache/druid/data/input/impl/NoopInputRowParser.java
new file mode 100644
index 00000000000..c387df8ee8b
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/NoopInputRowParser.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.common.collect.ImmutableList;
+import org.apache.druid.data.input.InputRow;
+
+import java.util.List;
+
+/**
+ */
+public class NoopInputRowParser implements InputRowParser<InputRow>
+{
+  private final ParseSpec parseSpec;
+
+  @JsonCreator
+  public NoopInputRowParser(
+      @JsonProperty("parseSpec") ParseSpec parseSpec
+  )
+  {
+    this.parseSpec = parseSpec != null ? parseSpec : new TimeAndDimsParseSpec(null, null);
+  }
+
+  @Override
+  public List<InputRow> parseBatch(InputRow input)
+  {
+    return ImmutableList.of(input);
+  }
+
+  @JsonProperty
+  @Override
+  public ParseSpec getParseSpec()
+  {
+    return parseSpec;
+  }
+
+  @Override
+  public InputRowParser withParseSpec(ParseSpec parseSpec)
+  {
+    return new NoopInputRowParser(parseSpec);
+  }
+
+  @Override
+  public boolean equals(Object o)
+  {
+    if (this == o) {
+      return true;
+    }
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+
+    NoopInputRowParser that = (NoopInputRowParser) o;
+
+    return parseSpec.equals(that.parseSpec);
+
+  }
+
+  @Override
+  public int hashCode()
+  {
+    return parseSpec.hashCode();
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/ParseSpec.java b/api/src/main/java/org/apache/druid/data/input/impl/ParseSpec.java
new file mode 100644
index 00000000000..f98e9b14873
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/ParseSpec.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonSubTypes;
+import com.fasterxml.jackson.annotation.JsonTypeInfo;
+import com.google.common.base.Preconditions;
+import org.apache.druid.guice.annotations.ExtensionPoint;
+import org.apache.druid.guice.annotations.PublicApi;
+import org.apache.druid.java.util.common.parsers.Parser;
+
+import java.util.List;
+
+@ExtensionPoint
+@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "format", defaultImpl = DelimitedParseSpec.class)
+@JsonSubTypes(value = {
+    @JsonSubTypes.Type(name = "json", value = JSONParseSpec.class),
+    @JsonSubTypes.Type(name = "csv", value = CSVParseSpec.class),
+    @JsonSubTypes.Type(name = "tsv", value = DelimitedParseSpec.class),
+    @JsonSubTypes.Type(name = "jsonLowercase", value = JSONLowercaseParseSpec.class),
+    @JsonSubTypes.Type(name = "timeAndDims", value = TimeAndDimsParseSpec.class),
+    @JsonSubTypes.Type(name = "regex", value = RegexParseSpec.class),
+    @JsonSubTypes.Type(name = "javascript", value = JavaScriptParseSpec.class)
+
+})
+public abstract class ParseSpec
+{
+  private final TimestampSpec timestampSpec;
+  private final DimensionsSpec dimensionsSpec;
+
+  protected ParseSpec(TimestampSpec timestampSpec, DimensionsSpec dimensionsSpec)
+  {
+    this.timestampSpec = Preconditions.checkNotNull(timestampSpec, "parseSpec requires timestampSpec");
+    this.dimensionsSpec = Preconditions.checkNotNull(dimensionsSpec, "parseSpec requires dimensionSpec");
+  }
+
+  @JsonProperty
+  public TimestampSpec getTimestampSpec()
+  {
+    return timestampSpec;
+  }
+
+  @JsonProperty
+  public DimensionsSpec getDimensionsSpec()
+  {
+    return dimensionsSpec;
+  }
+
+  @PublicApi
+  public void verify(List<String> usedCols)
+  {
+    // do nothing
+  }
+
+  public Parser<String, Object> makeParser()
+  {
+    return null;
+  }
+
+  @PublicApi
+  public ParseSpec withTimestampSpec(TimestampSpec spec)
+  {
+    throw new UnsupportedOperationException();
+  }
+
+  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
+  {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public boolean equals(Object o)
+  {
+    if (this == o) {
+      return true;
+    }
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+
+    ParseSpec parseSpec = (ParseSpec) o;
+
+    if (timestampSpec != null ? !timestampSpec.equals(parseSpec.timestampSpec) : parseSpec.timestampSpec != null) {
+      return false;
+    }
+    return !(dimensionsSpec != null
+             ? !dimensionsSpec.equals(parseSpec.dimensionsSpec)
+             : parseSpec.dimensionsSpec != null);
+
+  }
+
+  @Override
+  public int hashCode()
+  {
+    int result = timestampSpec != null ? timestampSpec.hashCode() : 0;
+    result = 31 * result + (dimensionsSpec != null ? dimensionsSpec.hashCode() : 0);
+    return result;
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/RegexParseSpec.java b/api/src/main/java/org/apache/druid/data/input/impl/RegexParseSpec.java
new file mode 100644
index 00000000000..5f8c1ea3ef1
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/RegexParseSpec.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
+import org.apache.druid.java.util.common.parsers.Parser;
+import org.apache.druid.java.util.common.parsers.RegexParser;
+
+import java.util.List;
+
+/**
+ */
+public class RegexParseSpec extends ParseSpec
+{
+  private final String listDelimiter;
+  private final List<String> columns;
+  private final String pattern;
+
+  @JsonCreator
+  public RegexParseSpec(
+      @JsonProperty("timestampSpec") TimestampSpec timestampSpec,
+      @JsonProperty("dimensionsSpec") DimensionsSpec dimensionsSpec,
+      @JsonProperty("listDelimiter") String listDelimiter,
+      @JsonProperty("columns") List<String> columns,
+      @JsonProperty("pattern") String pattern
+  )
+  {
+    super(timestampSpec, dimensionsSpec);
+
+    this.listDelimiter = listDelimiter;
+    this.columns = columns;
+    this.pattern = pattern;
+
+    verify(dimensionsSpec.getDimensionNames());
+  }
+
+  @JsonProperty
+  public String getListDelimiter()
+  {
+    return listDelimiter;
+  }
+
+  @JsonProperty("pattern")
+  public String getPattern()
+  {
+    return pattern;
+  }
+
+  @JsonProperty
+  public List<String> getColumns()
+  {
+    return columns;
+  }
+
+  @Override
+  public void verify(List<String> usedCols)
+  {
+    if (columns != null) {
+      for (String columnName : usedCols) {
+        Preconditions.checkArgument(columns.contains(columnName), "column[%s] not in columns.", columnName);
+      }
+    }
+  }
+
+  @Override
+  public Parser<String, Object> makeParser()
+  {
+    if (columns == null) {
+      return new RegexParser(pattern, Optional.fromNullable(listDelimiter));
+    }
+    return new RegexParser(pattern, Optional.fromNullable(listDelimiter), columns);
+  }
+
+  @Override
+  public ParseSpec withTimestampSpec(TimestampSpec spec)
+  {
+    return new RegexParseSpec(spec, getDimensionsSpec(), listDelimiter, columns, pattern);
+  }
+
+  @Override
+  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
+  {
+    return new RegexParseSpec(getTimestampSpec(), spec, listDelimiter, columns, pattern);
+  }
+
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/SpatialDimensionSchema.java b/api/src/main/java/org/apache/druid/data/input/impl/SpatialDimensionSchema.java
new file mode 100644
index 00000000000..734e54bbec5
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/SpatialDimensionSchema.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import java.util.List;
+
+/**
+ */
+@Deprecated
+public class SpatialDimensionSchema
+{
+  private final String dimName;
+  private final List<String> dims;
+
+  @JsonCreator
+  public SpatialDimensionSchema(
+      @JsonProperty("dimName") String dimName,
+      @JsonProperty("dims") List<String> dims
+  )
+  {
+    this.dimName = dimName;
+    this.dims = dims;
+  }
+
+  @JsonProperty
+  public String getDimName()
+  {
+    return dimName;
+  }
+
+  @JsonProperty
+  public List<String> getDims()
+  {
+    return dims;
+  }
+
+  @Override
+  public boolean equals(Object o)
+  {
+    if (this == o) {
+      return true;
+    }
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+
+    SpatialDimensionSchema that = (SpatialDimensionSchema) o;
+
+    if (dimName != null ? !dimName.equals(that.dimName) : that.dimName != null) {
+      return false;
+    }
+    return dims != null ? dims.equals(that.dims) : that.dims == null;
+
+  }
+
+  @Override
+  public int hashCode()
+  {
+    int result = dimName != null ? dimName.hashCode() : 0;
+    result = 31 * result + (dims != null ? dims.hashCode() : 0);
+    return result;
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/SqlFirehose.java b/api/src/main/java/org/apache/druid/data/input/impl/SqlFirehose.java
new file mode 100644
index 00000000000..bda4a84cc77
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/SqlFirehose.java
@@ -0,0 +1,96 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.druid.data.input.impl;
+
+import com.google.common.collect.Iterators;
+import org.apache.druid.data.input.Firehose;
+import org.apache.druid.data.input.InputRow;
+import org.apache.druid.data.input.impl.prefetch.JsonIterator;
+import org.apache.druid.java.util.common.io.Closer;
+import org.apache.druid.utils.Runnables;
+
+import javax.annotation.Nullable;
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.Map;
+
+public class SqlFirehose implements Firehose
+{
+  private final Iterator<JsonIterator<Map<String, Object>>> resultIterator;
+  private final InputRowParser parser;
+  private final Closeable closer;
+  private JsonIterator<Map<String, Object>> lineIterator = null;
+
+  public SqlFirehose(
+      Iterator lineIterators,
+      InputRowParser<Map<String, Object>> parser,
+      Closeable closer
+  )
+  {
+    this.resultIterator = lineIterators;
+    this.parser = parser;
+    this.closer = closer;
+  }
+
+  @Override
+  public boolean hasMore()
+  {
+    while ((lineIterator == null || !lineIterator.hasNext()) && resultIterator.hasNext()) {
+      lineIterator = getNextLineIterator();
+    }
+
+    return lineIterator != null && lineIterator.hasNext();
+  }
+
+  @Nullable
+  @Override
+  public InputRow nextRow()
+  {
+    Map<String, Object> mapToParse = lineIterator.next();
+    return (InputRow) Iterators.getOnlyElement(parser.parseBatch(mapToParse).iterator());
+  }
+
+  private JsonIterator getNextLineIterator()
+  {
+    if (lineIterator != null) {
+      lineIterator = null;
+    }
+
+    final JsonIterator iterator = resultIterator.next();
+    return iterator;
+  }
+
+  @Override
+  public Runnable commit()
+  {
+    return Runnables.getNoopRunnable();
+  }
+
+  @Override
+  public void close() throws IOException
+  {
+    Closer firehoseCloser = Closer.create();
+    if (lineIterator != null) {
+      firehoseCloser.register(lineIterator);
+    }
+    firehoseCloser.register(closer);
+    firehoseCloser.close();
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/StringDimensionSchema.java b/api/src/main/java/org/apache/druid/data/input/impl/StringDimensionSchema.java
new file mode 100644
index 00000000000..7c030eb703a
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/StringDimensionSchema.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class StringDimensionSchema extends DimensionSchema
+{
+  private static final boolean DEFAULT_CREATE_BITMAP_INDEX = true;
+
+  @JsonCreator
+  public static StringDimensionSchema create(String name)
+  {
+    return new StringDimensionSchema(name);
+  }
+
+  @JsonCreator
+  public StringDimensionSchema(
+      @JsonProperty("name") String name,
+      @JsonProperty("multiValueHandling") MultiValueHandling multiValueHandling,
+      @JsonProperty("createBitmapIndex") Boolean createBitmapIndex
+  )
+  {
+    super(name, multiValueHandling, createBitmapIndex == null ? DEFAULT_CREATE_BITMAP_INDEX : createBitmapIndex);
+  }
+
+  public StringDimensionSchema(String name)
+  {
+    this(name, null, DEFAULT_CREATE_BITMAP_INDEX);
+  }
+
+  @Override
+  public String getTypeName()
+  {
+    return DimensionSchema.STRING_TYPE_NAME;
+  }
+
+  @Override
+  @JsonIgnore
+  public ValueType getValueType()
+  {
+    return ValueType.STRING;
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/StringInputRowParser.java b/api/src/main/java/org/apache/druid/data/input/impl/StringInputRowParser.java
new file mode 100644
index 00000000000..07761c0898d
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/StringInputRowParser.java
@@ -0,0 +1,167 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Iterators;
+import org.apache.druid.data.input.ByteBufferInputRowParser;
+import org.apache.druid.data.input.InputRow;
+import org.apache.druid.java.util.common.collect.Utils;
+import org.apache.druid.java.util.common.parsers.ParseException;
+import org.apache.druid.java.util.common.parsers.Parser;
+
+import javax.annotation.Nullable;
+import java.nio.ByteBuffer;
+import java.nio.CharBuffer;
+import java.nio.charset.Charset;
+import java.nio.charset.CoderResult;
+import java.nio.charset.CodingErrorAction;
+import java.nio.charset.StandardCharsets;
+import java.util.List;
+import java.util.Map;
+
+/**
+ */
+public class StringInputRowParser implements ByteBufferInputRowParser
+{
+  private static final Charset DEFAULT_CHARSET = StandardCharsets.UTF_8;
+
+  private final ParseSpec parseSpec;
+  private final MapInputRowParser mapParser;
+  private final Charset charset;
+
+  private Parser<String, Object> parser;
+  private CharBuffer chars;
+
+  @JsonCreator
+  public StringInputRowParser(
+      @JsonProperty("parseSpec") ParseSpec parseSpec,
+      @JsonProperty("encoding") String encoding
+  )
+  {
+    this.parseSpec = Preconditions.checkNotNull(parseSpec, "parseSpec");
+    this.mapParser = new MapInputRowParser(parseSpec);
+
+    if (encoding != null) {
+      this.charset = Charset.forName(encoding);
+    } else {
+      this.charset = DEFAULT_CHARSET;
+    }
+  }
+
+  @Deprecated
+  public StringInputRowParser(ParseSpec parseSpec)
+  {
+    this(parseSpec, null);
+  }
+
+  @Override
+  public List<InputRow> parseBatch(ByteBuffer input)
+  {
+    return Utils.nullableListOf(parseMap(buildStringKeyMap(input)));
+  }
+
+  @JsonProperty
+  @Override
+  public ParseSpec getParseSpec()
+  {
+    return parseSpec;
+  }
+
+  @JsonProperty
+  public String getEncoding()
+  {
+    return charset.name();
+  }
+
+  @Override
+  public StringInputRowParser withParseSpec(ParseSpec parseSpec)
+  {
+    return new StringInputRowParser(parseSpec, getEncoding());
+  }
+
+  private Map<String, Object> buildStringKeyMap(ByteBuffer input)
+  {
+    int payloadSize = input.remaining();
+
+    if (chars == null || chars.remaining() < payloadSize) {
+      chars = CharBuffer.allocate(payloadSize);
+    }
+
+    final CoderResult coderResult = charset.newDecoder()
+                                           .onMalformedInput(CodingErrorAction.REPLACE)
+                                           .onUnmappableCharacter(CodingErrorAction.REPLACE)
+                                           .decode(input, chars, true);
+
+    Map<String, Object> theMap;
+    if (coderResult.isUnderflow()) {
+      chars.flip();
+      try {
+        theMap = parseString(chars.toString());
+      }
+      finally {
+        chars.clear();
+      }
+    } else {
+      throw new ParseException("Failed with CoderResult[%s]", coderResult);
+    }
+    return theMap;
+  }
+
+  public void initializeParser()
+  {
+    if (parser == null) {
+      // parser should be created when it is really used to avoid unnecessary initialization of the underlying
+      // parseSpec.
+      parser = parseSpec.makeParser();
+    }
+  }
+
+  public void startFileFromBeginning()
+  {
+    initializeParser();
+    parser.startFileFromBeginning();
+  }
+
+  @Nullable
+  public InputRow parse(@Nullable String input)
+  {
+    return parseMap(parseString(input));
+  }
+
+  @Nullable
+  private Map<String, Object> parseString(@Nullable String inputString)
+  {
+    initializeParser();
+    return parser.parseToMap(inputString);
+  }
+
+  @Nullable
+  private InputRow parseMap(@Nullable Map<String, Object> theMap)
+  {
+    // If a header is present in the data (and with proper configurations), a null is returned
+    if (theMap == null) {
+      return null;
+    }
+    return Iterators.getOnlyElement(mapParser.parseBatch(theMap).iterator());
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/TimeAndDimsParseSpec.java b/api/src/main/java/org/apache/druid/data/input/impl/TimeAndDimsParseSpec.java
new file mode 100644
index 00000000000..969ea9a908e
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/TimeAndDimsParseSpec.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import org.apache.druid.java.util.common.parsers.Parser;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ */
+public class TimeAndDimsParseSpec extends ParseSpec
+{
+  @JsonCreator
+  public TimeAndDimsParseSpec(
+      @JsonProperty("timestampSpec") TimestampSpec timestampSpec,
+      @JsonProperty("dimensionsSpec") DimensionsSpec dimensionsSpec
+  )
+  {
+    super(
+        timestampSpec != null ? timestampSpec : new TimestampSpec(null, null, null),
+        dimensionsSpec != null ? dimensionsSpec : new DimensionsSpec(null, null, null)
+    );
+  }
+
+  @Override
+  public Parser<String, Object> makeParser()
+  {
+    return new Parser<String, Object>()
+    {
+      @Override
+      public Map<String, Object> parseToMap(String input)
+      {
+        throw new UnsupportedOperationException("not supported");
+      }
+
+      @Override
+      public void setFieldNames(Iterable<String> fieldNames)
+      {
+        throw new UnsupportedOperationException("not supported");
+      }
+
+      @Override
+      public List<String> getFieldNames()
+      {
+        throw new UnsupportedOperationException("not supported");
+      }
+    };
+  }
+
+  @Override
+  public ParseSpec withTimestampSpec(TimestampSpec spec)
+  {
+    return new TimeAndDimsParseSpec(spec, getDimensionsSpec());
+  }
+
+  @Override
+  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
+  {
+    return new TimeAndDimsParseSpec(getTimestampSpec(), spec);
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/TimestampSpec.java b/api/src/main/java/org/apache/druid/data/input/impl/TimestampSpec.java
new file mode 100644
index 00000000000..a61f98370f2
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/TimestampSpec.java
@@ -0,0 +1,177 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.common.base.Function;
+import org.apache.druid.guice.annotations.PublicApi;
+import org.apache.druid.java.util.common.parsers.TimestampParser;
+import org.joda.time.DateTime;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+/**
+ */
+@PublicApi
+public class TimestampSpec
+{
+  private static class ParseCtx
+  {
+    Object lastTimeObject = null;
+    DateTime lastDateTime = null;
+  }
+
+  private static final String DEFAULT_COLUMN = "timestamp";
+  private static final String DEFAULT_FORMAT = "auto";
+  private static final DateTime DEFAULT_MISSING_VALUE = null;
+
+  private final String timestampColumn;
+  private final String timestampFormat;
+  // this value should never be set for production data
+  private final DateTime missingValue;
+  /** This field is a derivative of {@link #timestampFormat}; not checked in {@link #equals} and {@link #hashCode} */
+  private final Function<Object, DateTime> timestampConverter;
+
+  // remember last value parsed
+  private static final ThreadLocal<ParseCtx> parseCtx = ThreadLocal.withInitial(ParseCtx::new);
+
+  @JsonCreator
+  public TimestampSpec(
+      @JsonProperty("column") String timestampColumn,
+      @JsonProperty("format") String format,
+      // this value should never be set for production data
+      @JsonProperty("missingValue") DateTime missingValue
+  )
+  {
+    this.timestampColumn = (timestampColumn == null) ? DEFAULT_COLUMN : timestampColumn;
+    this.timestampFormat = format == null ? DEFAULT_FORMAT : format;
+    this.timestampConverter = TimestampParser.createObjectTimestampParser(timestampFormat);
+    this.missingValue = missingValue == null
+                        ? DEFAULT_MISSING_VALUE
+                        : missingValue;
+  }
+
+  @JsonProperty("column")
+  public String getTimestampColumn()
+  {
+    return timestampColumn;
+  }
+
+  @JsonProperty("format")
+  public String getTimestampFormat()
+  {
+    return timestampFormat;
+  }
+
+  @JsonProperty("missingValue")
+  public DateTime getMissingValue()
+  {
+    return missingValue;
+  }
+
+  public DateTime extractTimestamp(Map<String, Object> input)
+  {
+    return parseDateTime(input.get(timestampColumn));
+  }
+
+  public DateTime parseDateTime(Object input)
+  {
+    DateTime extracted = missingValue;
+    if (input != null) {
+      ParseCtx ctx = parseCtx.get();
+      // Check if the input is equal to the last input, so we don't need to parse it again
+      if (input.equals(ctx.lastTimeObject)) {
+        extracted = ctx.lastDateTime;
+      } else {
+        extracted = timestampConverter.apply(input);
+        ParseCtx newCtx = new ParseCtx();
+        newCtx.lastTimeObject = input;
+        newCtx.lastDateTime = extracted;
+        parseCtx.set(newCtx);
+      }
+    }
+    return extracted;
+  }
+
+  @Override
+  public boolean equals(Object o)
+  {
+    if (this == o) {
+      return true;
+    }
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+
+    TimestampSpec that = (TimestampSpec) o;
+
+    if (!timestampColumn.equals(that.timestampColumn)) {
+      return false;
+    }
+    if (!timestampFormat.equals(that.timestampFormat)) {
+      return false;
+    }
+    return !(missingValue != null ? !missingValue.equals(that.missingValue) : that.missingValue != null);
+
+  }
+
+  @Override
+  public int hashCode()
+  {
+    int result = timestampColumn.hashCode();
+    result = 31 * result + timestampFormat.hashCode();
+    result = 31 * result + (missingValue != null ? missingValue.hashCode() : 0);
+    return result;
+  }
+
+  @Override
+  public String toString()
+  {
+    return "TimestampSpec{" +
+           "timestampColumn='" + timestampColumn + '\'' +
+           ", timestampFormat='" + timestampFormat + '\'' +
+           ", missingValue=" + missingValue +
+           '}';
+  }
+
+  //simple merge strategy on timestampSpec that checks if all are equal or else
+  //returns null. this can be improved in future but is good enough for most use-cases.
+  public static TimestampSpec mergeTimestampSpec(List<TimestampSpec> toMerge)
+  {
+    if (toMerge == null || toMerge.size() == 0) {
+      return null;
+    }
+
+    TimestampSpec result = toMerge.get(0);
+    for (int i = 1; i < toMerge.size(); i++) {
+      if (toMerge.get(i) == null) {
+        continue;
+      }
+      if (!Objects.equals(result, toMerge.get(i))) {
+        return null;
+      }
+    }
+
+    return result;
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/prefetch/CacheManager.java b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/CacheManager.java
new file mode 100644
index 00000000000..28675b8a10a
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/CacheManager.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl.prefetch;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.druid.java.util.common.ISE;
+import org.apache.druid.java.util.common.logger.Logger;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * A class managing cached files used by {@link PrefetchableTextFilesFirehoseFactory}.
+ */
+class CacheManager<T>
+{
+  private static final Logger LOG = new Logger(CacheManager.class);
+
+  // A roughly max size of total cached objects which means the actual cached size can be bigger. The reason is our
+  // current client implementations for cloud storages like s3 don't support range scan yet, so we must download the
+  // whole file at once. It's still possible for the size of cached data to not exceed these variables by estimating the
+  // after-fetch size, but it makes us to consider the case when any files cannot be fetched due to their large size,
+  // which makes the implementation complicated.
+  private final long maxCacheCapacityBytes;
+
+  private final List<FetchedFile<T>> files = new ArrayList<>();
+
+  private long totalCachedBytes;
+
+  CacheManager(long maxCacheCapacityBytes)
+  {
+    this.maxCacheCapacityBytes = maxCacheCapacityBytes;
+  }
+
+  boolean isEnabled()
+  {
+    return maxCacheCapacityBytes > 0;
+  }
+
+  boolean cacheable()
+  {
+    // maxCacheCapacityBytes is a rough limit, so if totalCachedBytes is larger than it, no more caching is
+    // allowed.
+    return totalCachedBytes < maxCacheCapacityBytes;
+  }
+
+  FetchedFile<T> cache(FetchedFile<T> fetchedFile)
+  {
+    if (!cacheable()) {
+      throw new ISE(
+          "Cache space is full. totalCachedBytes[%d], maxCacheCapacityBytes[%d]",
+          totalCachedBytes,
+          maxCacheCapacityBytes
+      );
+    }
+
+    final FetchedFile<T> cachedFile = fetchedFile.cache();
+    files.add(cachedFile);
+    totalCachedBytes += cachedFile.length();
+
+    LOG.info("Object[%s] is cached. Current cached bytes is [%d]", cachedFile.getObject(), totalCachedBytes);
+    return cachedFile;
+  }
+
+  List<FetchedFile<T>> getFiles()
+  {
+    return files;
+  }
+
+  @VisibleForTesting
+  long getTotalCachedBytes()
+  {
+    return totalCachedBytes;
+  }
+
+  long getMaxCacheCapacityBytes()
+  {
+    return maxCacheCapacityBytes;
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/prefetch/FetchedFile.java b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/FetchedFile.java
new file mode 100644
index 00000000000..9874820e072
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/FetchedFile.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl.prefetch;
+
+import java.io.Closeable;
+import java.io.File;
+
+/**
+ * A class containing meta information about fetched objects.  This class used by {@link Fetcher}.
+ */
+class FetchedFile<T>
+{
+  // Original object
+  private final T object;
+  // Fetched file stored in local disk
+  private final File file;
+  // Closer which is called when the file is not needed anymore. Usually this deletes the file except for cached files.
+  private final Closeable resourceCloser;
+
+  FetchedFile(T object, File file, Closeable resourceCloser)
+  {
+    this.object = object;
+    this.file = file;
+    this.resourceCloser = resourceCloser;
+  }
+
+  long length()
+  {
+    return file.length();
+  }
+
+  T getObject()
+  {
+    return object;
+  }
+
+  File getFile()
+  {
+    return file;
+  }
+
+  Closeable getResourceCloser()
+  {
+    return resourceCloser;
+  }
+
+  FetchedFile<T> cache()
+  {
+    return new FetchedFile<>(object, file, () -> {});
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/prefetch/Fetcher.java b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/Fetcher.java
new file mode 100644
index 00000000000..85276ea96d7
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/Fetcher.java
@@ -0,0 +1,281 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl.prefetch;
+
+import com.google.common.base.Preconditions;
+import com.google.common.base.Throwables;
+import org.apache.druid.java.util.common.ISE;
+import org.apache.druid.java.util.common.logger.Logger;
+
+import javax.annotation.Nullable;
+import java.io.Closeable;
+import java.io.File;
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.List;
+import java.util.NoSuchElementException;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicLong;
+
+/**
+ * A file fetcher used by {@link PrefetchableTextFilesFirehoseFactory} and {@link PrefetchSqlFirehoseFactory}.
+ * See the javadoc of {@link PrefetchableTextFilesFirehoseFactory} for more details.
+ */
+public abstract class Fetcher<T> implements Iterator<OpenedObject<T>>
+{
+  private static final Logger LOG = new Logger(Fetcher.class);
+  private static final String FETCH_FILE_PREFIX = "fetch-";
+  private final CacheManager<T> cacheManager;
+  private final List<T> objects;
+  private final ExecutorService fetchExecutor;
+
+  @Nullable
+  private final File temporaryDirectory;
+
+  private final boolean prefetchEnabled;
+
+  private final LinkedBlockingQueue<FetchedFile<T>> fetchedFiles = new LinkedBlockingQueue<>();
+
+  // Number of bytes of current fetched files.
+  // This is updated when a file is successfully fetched, a fetched file is deleted, or a fetched file is
+  // cached.
+  private final AtomicLong fetchedBytes = new AtomicLong(0);
+  private Future<Void> fetchFuture;
+  private PrefetchConfig prefetchConfig;
+
+  // nextFetchIndex indicates which object should be downloaded when fetch is triggered.
+  // This variable is always read by the same thread regardless of prefetch is enabled or not.
+  private int nextFetchIndex;
+
+  private int numRemainingObjects;
+
+  Fetcher(
+      CacheManager<T> cacheManager,
+      List<T> objects,
+      ExecutorService fetchExecutor,
+      @Nullable File temporaryDirectory,
+      PrefetchConfig prefetchConfig
+  )
+  {
+    this.cacheManager = cacheManager;
+    this.objects = objects;
+    this.fetchExecutor = fetchExecutor;
+    this.temporaryDirectory = temporaryDirectory;
+    this.prefetchConfig = prefetchConfig;
+    this.prefetchEnabled = prefetchConfig.getMaxFetchCapacityBytes() > 0;
+    this.numRemainingObjects = objects.size();
+
+    // (*) If cache is initialized, put all cached files to the queue.
+    this.fetchedFiles.addAll(cacheManager.getFiles());
+    this.nextFetchIndex = fetchedFiles.size();
+    if (cacheManager.isEnabled() || prefetchEnabled) {
+      Preconditions.checkNotNull(temporaryDirectory, "temporaryDirectory");
+    }
+    if (prefetchEnabled) {
+      fetchIfNeeded(0L);
+    }
+  }
+
+  /**
+   * Submit a fetch task if remainingBytes is smaller than prefetchTriggerBytes.
+   */
+  private void fetchIfNeeded(long remainingBytes)
+  {
+    if ((fetchFuture == null || fetchFuture.isDone())
+        && remainingBytes <= prefetchConfig.getPrefetchTriggerBytes()) {
+      fetchFuture = fetchExecutor.submit(() -> {
+        fetch();
+        return null;
+      });
+    }
+  }
+
+  /**
+   * Fetch objects to a local disk up to {@link PrefetchConfig#maxFetchCapacityBytes}.
+   * This method is not thread safe and must be called by a single thread.  Note that even
+   * {@link PrefetchConfig#maxFetchCapacityBytes} is 0, at least 1 file is always fetched.
+   * This is for simplifying design, and should be improved when our client implementations for cloud storages
+   * like S3 support range scan.
+   * <p>
+   * This method is called by {@link #fetchExecutor} if prefetch is enabled.  Otherwise, it is called by the same
+   * thread.
+   */
+  private void fetch() throws Exception
+  {
+    for (; nextFetchIndex < objects.size()
+           && fetchedBytes.get() <= prefetchConfig.getMaxFetchCapacityBytes(); nextFetchIndex++) {
+      final T object = objects.get(nextFetchIndex);
+      LOG.info("Fetching [%d]th object[%s], fetchedBytes[%d]", nextFetchIndex, object, fetchedBytes.get());
+      final File outFile = File.createTempFile(FETCH_FILE_PREFIX, null, temporaryDirectory);
+      fetchedBytes.addAndGet(download(object, outFile));
+      fetchedFiles.put(new FetchedFile<>(object, outFile, getFileCloser(outFile, fetchedBytes)));
+    }
+  }
+
+  /**
+   * Downloads an object into a file. The download process could be retried depending on the object source.
+   *
+   * @param object  an object to be downloaded
+   * @param outFile a file which the object data is stored
+   *
+   * @return number of downloaded bytes
+   */
+  protected abstract long download(T object, File outFile) throws IOException;
+
+  /**
+   * Generates an instance of {@link OpenedObject} for the given object.
+   */
+  protected abstract OpenedObject<T> generateOpenObject(T object) throws IOException;
+
+
+  @Override
+  public boolean hasNext()
+  {
+    return numRemainingObjects > 0;
+  }
+
+  @Override
+  public OpenedObject<T> next()
+  {
+    if (!hasNext()) {
+      throw new NoSuchElementException();
+    }
+
+    // If fetch() fails, hasNext() always returns true and next() is always called. The below method checks that
+    // fetch() threw an exception and propagates it if exists.
+    checkFetchException(false);
+
+    try {
+      final OpenedObject<T> openedObject = prefetchEnabled ? openObjectFromLocal() : openObjectFromRemote();
+      numRemainingObjects--;
+      return openedObject;
+    }
+    catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  private void checkFetchException(boolean wait)
+  {
+    try {
+      if (wait) {
+        fetchFuture.get(prefetchConfig.getFetchTimeout(), TimeUnit.MILLISECONDS);
+        fetchFuture = null;
+      } else if (fetchFuture != null && fetchFuture.isDone()) {
+        fetchFuture.get();
+        fetchFuture = null;
+      }
+    }
+    catch (InterruptedException | ExecutionException e) {
+      throw new RuntimeException(e);
+    }
+    catch (TimeoutException e) {
+      throw new ISE(e, "Failed to fetch, but cannot check the reason in [%d] ms", prefetchConfig.getFetchTimeout());
+    }
+  }
+
+  private OpenedObject<T> openObjectFromLocal() throws IOException
+  {
+    final FetchedFile<T> fetchedFile;
+
+    if (!fetchedFiles.isEmpty()) {
+      // If there are already fetched files, use them
+      fetchedFile = fetchedFiles.poll();
+    } else {
+      // Otherwise, wait for fetching
+      try {
+        fetchIfNeeded(fetchedBytes.get());
+        fetchedFile = fetchedFiles.poll(prefetchConfig.getFetchTimeout(), TimeUnit.MILLISECONDS);
+        if (fetchedFile == null) {
+          // Check the latest fetch is failed
+          checkFetchException(true);
+          // Or throw a timeout exception
+          throw new RuntimeException(new TimeoutException());
+        }
+      }
+      catch (InterruptedException e) {
+        throw Throwables.propagate(e);
+      }
+    }
+    final FetchedFile<T> maybeCached = cacheIfPossible(fetchedFile);
+    // trigger fetch again for subsequent next() calls
+    fetchIfNeeded(fetchedBytes.get());
+    return new OpenedObject<>(maybeCached);
+  }
+
+  private OpenedObject<T> openObjectFromRemote() throws IOException
+  {
+    if (fetchedFiles.size() > 0) {
+      // If fetchedFiles is not empty even though prefetching is disabled, they should be cached files.
+      // We use them first. See (*).
+      return new OpenedObject<>(fetchedFiles.poll());
+    } else if (cacheManager.cacheable()) {
+      // If cache is enabled, first download an object to local storage and cache it.
+      try {
+        // Since maxFetchCapacityBytes is 0, at most one file is fetched.
+        fetch();
+        FetchedFile<T> fetchedFile = fetchedFiles.poll();
+        if (fetchedFile == null) {
+          throw new ISE("Cannot fetch object[%s]", objects.get(nextFetchIndex - 1));
+        }
+        final FetchedFile<T> cached = cacheIfPossible(fetchedFile);
+        return new OpenedObject<>(cached);
+      }
+      catch (Exception e) {
+        throw Throwables.propagate(e);
+      }
+    } else {
+      final T object = objects.get(nextFetchIndex);
+      LOG.info("Reading [%d]th object[%s]", nextFetchIndex, object);
+      nextFetchIndex++;
+      return generateOpenObject(object);
+    }
+  }
+
+  private FetchedFile<T> cacheIfPossible(FetchedFile<T> fetchedFile)
+  {
+    if (cacheManager.cacheable()) {
+      final FetchedFile<T> cachedFile = cacheManager.cache(fetchedFile);
+      // If the fetchedFile is cached, make a room for fetching more data immediately.
+      // This is because cache space and fetch space are separated.
+      fetchedBytes.addAndGet(-fetchedFile.length());
+      return cachedFile;
+    } else {
+      return fetchedFile;
+    }
+  }
+
+  private static Closeable getFileCloser(
+      final File file,
+      final AtomicLong fetchedBytes
+  )
+  {
+    return () -> {
+      final long fileSize = file.length();
+      file.delete();
+      fetchedBytes.addAndGet(-fileSize);
+    };
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/prefetch/FileFetcher.java b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/FileFetcher.java
new file mode 100644
index 00000000000..ac31aeb4cef
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/FileFetcher.java
@@ -0,0 +1,134 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl.prefetch;
+
+import com.google.common.base.Predicate;
+
+import org.apache.druid.java.util.common.RetryUtils;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.commons.io.IOUtils;
+
+import javax.annotation.Nullable;
+import java.io.Closeable;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.List;
+
+import java.util.concurrent.ExecutorService;
+
+/**
+ * A file fetcher used by {@link PrefetchableTextFilesFirehoseFactory}.
+ * See the javadoc of {@link PrefetchableTextFilesFirehoseFactory} for more details.
+ */
+public class FileFetcher<T> extends Fetcher<T>
+
+{
+  private static final int BUFFER_SIZE = 1024 * 4;
+  private final ObjectOpenFunction<T> openObjectFunction;
+  private final Predicate<Throwable> retryCondition;
+  private final byte[] buffer;
+  // maximum retry for fetching an object from the remote site
+  private final int maxFetchRetry;
+
+  public int getMaxFetchRetry()
+  {
+    return maxFetchRetry;
+  }
+
+  FileFetcher(
+      CacheManager<T> cacheManager,
+      List<T> objects,
+      ExecutorService fetchExecutor,
+      @Nullable File temporaryDirectory,
+      PrefetchConfig prefetchConfig,
+      ObjectOpenFunction<T> openObjectFunction,
+      Predicate<Throwable> retryCondition,
+      Integer maxFetchRetries
+  )
+  {
+
+    super(
+        cacheManager,
+        objects,
+        fetchExecutor,
+        temporaryDirectory,
+        prefetchConfig
+    );
+
+    this.openObjectFunction = openObjectFunction;
+    this.retryCondition = retryCondition;
+    this.buffer = new byte[BUFFER_SIZE];
+    this.maxFetchRetry = maxFetchRetries;
+  }
+
+  /**
+   * Downloads an object. It retries downloading {@link #maxFetchRetry}
+   * times and throws an exception.
+   *
+   * @param object  an object to be downloaded
+   * @param outFile a file which the object data is stored
+   *
+   * @return number of downloaded bytes
+   */
+  @Override
+  protected long download(T object, File outFile) throws IOException
+  {
+    try {
+      return RetryUtils.retry(
+          () -> {
+            try (final InputStream is = openObjectFunction.open(object);
+                 final OutputStream os = new FileOutputStream(outFile)) {
+              return IOUtils.copyLarge(is, os, buffer);
+            }
+          },
+          retryCondition,
+          outFile::delete,
+          maxFetchRetry + 1,
+          StringUtils.format("Failed to download object[%s]", object)
+      );
+    }
+    catch (Exception e) {
+      throw new IOException(e);
+    }
+  }
+
+  /**
+   * Generates an instance of {@link OpenedObject} for which the underlying stream may be re-opened and retried
+   * based on the exception and retry condition.
+   */
+  @Override
+  protected OpenedObject<T> generateOpenObject(T object) throws IOException
+  {
+    return new OpenedObject<>(
+        object,
+        new RetryingInputStream<>(object, openObjectFunction, retryCondition, getMaxFetchRetry()),
+        getNoopCloser()
+    );
+  }
+
+  private static Closeable getNoopCloser()
+  {
+    return () -> {
+    };
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/prefetch/JsonIterator.java b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/JsonIterator.java
new file mode 100644
index 00000000000..142938b37e3
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/JsonIterator.java
@@ -0,0 +1,140 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.druid.data.input.impl.prefetch;
+
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonToken;
+import com.fasterxml.jackson.core.ObjectCodec;
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.druid.java.util.common.IAE;
+import org.apache.druid.java.util.common.guava.CloseQuietly;
+import org.apache.druid.java.util.common.io.Closer;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+
+/**
+ * An iterator over an array of JSON objects. Uses {@link ObjectCodec} to deserialize regular Java objects.
+ *
+ * @param <T> the type of object returned by this iterator
+ */
+public class JsonIterator<T> implements Iterator<T>, Closeable
+{
+  private JsonParser jp;
+  private ObjectCodec objectCodec;
+  private final TypeReference typeRef;
+  private final InputStream inputStream;
+  private final Closeable resourceCloser;
+  private final ObjectMapper objectMapper;
+
+  /**
+   * @param typeRef        the object type that the JSON object should be deserialized into
+   * @param inputStream    stream containing an array of JSON objects
+   * @param resourceCloser a {@code Closeable} implementation to release resources that the object is holding
+   * @param objectMapper   object mapper, used for deserialization
+   */
+  public JsonIterator(
+      TypeReference typeRef,
+      InputStream inputStream,
+      Closeable resourceCloser,
+      ObjectMapper objectMapper
+  )
+  {
+    this.typeRef = typeRef;
+    this.inputStream = inputStream;
+    this.resourceCloser = resourceCloser;
+    this.objectMapper = objectMapper;
+    init();
+  }
+
+  /**
+   * Returns {@code true} if there are more objects to be read.
+   *
+   * @return {@code true} if there are more objects to be read, else return {@code false}
+   */
+  @Override
+  public boolean hasNext()
+  {
+    if (jp.isClosed()) {
+      return false;
+    }
+    if (jp.getCurrentToken() == JsonToken.END_ARRAY) {
+      CloseQuietly.close(jp);
+      return false;
+    }
+    return true;
+  }
+
+  /**
+   * Retrieves the next deserialized object from the stream of JSON objects.
+   *
+   * @return the next deserialized object from the stream of JSON ovbjects
+   */
+  @Override
+  public T next()
+  {
+    if (!hasNext()) {
+      throw new NoSuchElementException("No more objects to read!");
+    }
+    try {
+      final T retVal = objectCodec.readValue(jp, typeRef);
+      jp.nextToken();
+      return retVal;
+    }
+    catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  private void init()
+  {
+    try {
+      if (inputStream == null) {
+        throw new UnsupportedOperationException();
+      } else {
+        jp = objectMapper.getFactory().createParser(inputStream);
+      }
+      final JsonToken nextToken = jp.nextToken();
+      if (nextToken != JsonToken.START_ARRAY) {
+        throw new IAE("First token should be START_ARRAY", jp.getCurrentToken());
+      } else {
+        jp.nextToken();
+        objectCodec = jp.getCodec();
+      }
+    }
+    catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  @Override
+  public void close() throws IOException
+  {
+    Closer closer = Closer.create();
+    if (jp != null) {
+      closer.register(jp);
+    }
+    closer.register(resourceCloser);
+    closer.close();
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/prefetch/ObjectOpenFunction.java b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/ObjectOpenFunction.java
new file mode 100644
index 00000000000..730a9baffc0
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/ObjectOpenFunction.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl.prefetch;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+
+interface ObjectOpenFunction<T>
+{
+  InputStream open(T object) throws IOException;
+
+  default InputStream open(T object, long start) throws IOException
+  {
+    return open(object);
+  }
+
+  default InputStream open(T object, File outFile) throws IOException
+  {
+    return open(object);
+  }
+
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/prefetch/OpenedObject.java b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/OpenedObject.java
new file mode 100644
index 00000000000..fc0f64cdeb7
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/OpenedObject.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl.prefetch;
+
+import org.apache.commons.io.FileUtils;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.io.InputStream;
+
+/**
+ * A class containing meta information about an opened object.  This class is used to put related objects together.  It
+ * contains an original object, an objectStream from the object, and a resourceCloser which knows how to release
+ * associated resources on closing.
+ *
+ * {@link PrefetchableTextFilesFirehoseFactory.ResourceCloseableLineIterator} consumes the objectStream and closes
+ * it with the resourceCloser.
+ */
+class OpenedObject<T>
+{
+  // Original object
+  private final T object;
+  // Input stream from the object
+  private final InputStream objectStream;
+  // Closer which is called when the file is not needed anymore. Usually this deletes the file except for cached files.
+  private final Closeable resourceCloser;
+
+  OpenedObject(FetchedFile<T> fetchedFile) throws IOException
+  {
+    this(fetchedFile.getObject(), FileUtils.openInputStream(fetchedFile.getFile()), fetchedFile.getResourceCloser());
+  }
+
+  OpenedObject(T object, InputStream objectStream, Closeable resourceCloser)
+  {
+    this.object = object;
+    this.objectStream = objectStream;
+    this.resourceCloser = resourceCloser;
+  }
+
+  T getObject()
+  {
+    return object;
+  }
+
+  InputStream getObjectStream()
+  {
+    return objectStream;
+  }
+
+  Closeable getResourceCloser()
+  {
+    return resourceCloser;
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/prefetch/PrefetchConfig.java b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/PrefetchConfig.java
new file mode 100644
index 00000000000..354aeb9cf55
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/PrefetchConfig.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.druid.data.input.impl.prefetch;
+
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Holds the essential configuration required by {@link Fetcher} for prefetching purposes.
+ */
+public class PrefetchConfig
+{
+  public static final long DEFAULT_MAX_CACHE_CAPACITY_BYTES = 1024 * 1024 * 1024; // 1GB
+  public static final long DEFAULT_MAX_FETCH_CAPACITY_BYTES = 1024 * 1024 * 1024; // 1GB
+  public static final long DEFAULT_FETCH_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(60);
+
+  // A roughly max size of total fetched objects, but the actual fetched size can be bigger. The reason is our current
+  // client implementations for cloud storages like s3 don't support range scan yet, so we must download the whole file
+  // at once. It's still possible for the size of cached/fetched data to not exceed these variables by estimating the
+  // after-fetch size, but it makes us consider the case when any files cannot be fetched due to their large size, which
+  // makes the implementation complicated.
+  private final long maxFetchCapacityBytes;
+
+  private final long maxCacheCapacityBytes;
+
+  private final long prefetchTriggerBytes;
+
+  // timeout for fetching an object from the remote site
+  private final long fetchTimeout;
+
+
+  public PrefetchConfig(
+      Long maxCacheCapacityBytes,
+      Long maxFetchCapacityBytes,
+      Long prefetchTriggerBytes,
+      Long fetchTimeout
+  )
+  {
+    this.maxCacheCapacityBytes = maxCacheCapacityBytes == null
+                                 ? DEFAULT_MAX_CACHE_CAPACITY_BYTES
+                                 : maxCacheCapacityBytes;
+    this.maxFetchCapacityBytes = maxFetchCapacityBytes == null
+                                 ? DEFAULT_MAX_FETCH_CAPACITY_BYTES
+                                 : maxFetchCapacityBytes;
+    this.prefetchTriggerBytes = prefetchTriggerBytes == null
+                                ? this.maxFetchCapacityBytes / 2
+                                : prefetchTriggerBytes;
+    this.fetchTimeout = fetchTimeout == null ? DEFAULT_FETCH_TIMEOUT_MS : fetchTimeout;
+  }
+
+  public long getMaxCacheCapacityBytes()
+  {
+    return maxCacheCapacityBytes;
+  }
+
+  public long getMaxFetchCapacityBytes()
+  {
+    return maxFetchCapacityBytes;
+  }
+
+  public long getPrefetchTriggerBytes()
+  {
+    return prefetchTriggerBytes;
+  }
+
+  public long getFetchTimeout()
+  {
+    return fetchTimeout;
+  }
+
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/prefetch/PrefetchSqlFirehoseFactory.java b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/PrefetchSqlFirehoseFactory.java
new file mode 100644
index 00000000000..4cf70b293c7
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/PrefetchSqlFirehoseFactory.java
@@ -0,0 +1,253 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl.prefetch;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableList;
+import org.apache.druid.data.input.Firehose;
+import org.apache.druid.data.input.FirehoseFactory;
+import org.apache.druid.data.input.impl.InputRowParser;
+import org.apache.druid.data.input.impl.SqlFirehose;
+import org.apache.druid.java.util.common.ISE;
+import org.apache.druid.java.util.common.concurrent.Execs;
+import org.apache.druid.java.util.common.logger.Logger;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.commons.io.LineIterator;
+
+import javax.annotation.Nullable;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.NoSuchElementException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * PrefetchSqlFirehoseFactory is an abstract firehose factory for reading prefetched sql resultset data. Regardless
+ * of whether prefetching is enabled or not, for each sql object the entire result set is fetched into a file in the local disk.
+ * This class defines prefetching as caching the resultsets into local disk in case multiple sql queries are present.
+ * When prefetching is enabled, the following functionalities are provided:
+ * <p/>
+ * <p>
+ * - Caching: for the first call of {@link #connect(InputRowParser, File)}, it caches objects in a local disk
+ * up to maxCacheCapacityBytes.  These caches are NOT deleted until the process terminates, and thus can be used for
+ * future reads.
+ * <br/>
+ * - Fetching: when it reads all cached data, it fetches remaining objects into a local disk and reads data from
+ * them.  For the performance reason, prefetch technique is used, that is, when the size of remaining fetched data is
+ * smaller than {@link PrefetchConfig#prefetchTriggerBytes}, a background prefetch thread automatically starts to fetch remaining
+ * objects.
+ * <br/>
+ * <p/>
+ * <p>
+ * This implementation aims to avoid maintaining a persistent connection to the database by prefetching the resultset into disk.
+ * <br/>
+ * Prefetching can be turned on/off by setting maxFetchCapacityBytes.  Depending on prefetching is enabled or
+ * disabled, the behavior of the firehose is different like below.
+ * <p/>
+ * <p>
+ * 1. If prefetch is enabled this firehose can fetch input objects in background.
+ * <br/>
+ * 2. When next() is called, it first checks that there are already fetched files in local storage.
+ * <br/>
+ * 2.1 If exists, it simply chooses a fetched file and returns a {@link LineIterator} reading that file.
+ * <br/>
+ * 2.2 If there is no fetched files in local storage but some objects are still remained to be read, the firehose
+ * fetches one of input objects in background immediately. Finally, the firehose returns an iterator of {@link JsonIterator}
+ * for deserializing the saved resultset.
+ * <br/>
+ * 3. If prefetch is disabled, the firehose saves the resultset to file and returns an iterator of {@link JsonIterator}
+ * which directly reads the stream opened by {@link #openObjectStream}. If there is an IOException, it will throw it
+ * and the read will fail.
+ */
+public abstract class PrefetchSqlFirehoseFactory<T>
+    implements FirehoseFactory<InputRowParser<Map<String, Object>>>
+{
+  private static final Logger LOG = new Logger(PrefetchSqlFirehoseFactory.class);
+
+  private final PrefetchConfig prefetchConfig;
+  private final CacheManager<T> cacheManager;
+  private List<T> objects;
+  private ObjectMapper objectMapper;
+
+
+  public PrefetchSqlFirehoseFactory(
+      Long maxCacheCapacityBytes,
+      Long maxFetchCapacityBytes,
+      Long prefetchTriggerBytes,
+      Long fetchTimeout,
+      ObjectMapper objectMapper
+  )
+  {
+    this.prefetchConfig = new PrefetchConfig(
+        maxCacheCapacityBytes,
+        maxFetchCapacityBytes,
+        prefetchTriggerBytes,
+        fetchTimeout
+    );
+    this.cacheManager = new CacheManager<>(
+        prefetchConfig.getMaxCacheCapacityBytes()
+    );
+    this.objectMapper = objectMapper;
+  }
+
+  @JsonProperty
+  public long getMaxCacheCapacityBytes()
+  {
+    return cacheManager.getMaxCacheCapacityBytes();
+  }
+
+  @JsonProperty
+  public long getMaxFetchCapacityBytes()
+  {
+    return prefetchConfig.getMaxFetchCapacityBytes();
+  }
+
+  @JsonProperty
+  public long getPrefetchTriggerBytes()
+  {
+    return prefetchConfig.getPrefetchTriggerBytes();
+  }
+
+  @JsonProperty
+  public long getFetchTimeout()
+  {
+    return prefetchConfig.getFetchTimeout();
+  }
+
+  @VisibleForTesting
+  CacheManager<T> getCacheManager()
+  {
+    return cacheManager;
+  }
+
+  @Override
+  public Firehose connect(InputRowParser<Map<String, Object>> firehoseParser, @Nullable File temporaryDirectory)
+      throws IOException
+  {
+    if (objects == null) {
+      objects = ImmutableList.copyOf(Preconditions.checkNotNull(initObjects(), "objects"));
+    }
+    if (cacheManager.isEnabled() || prefetchConfig.getMaxFetchCapacityBytes() > 0) {
+      Preconditions.checkNotNull(temporaryDirectory, "temporaryDirectory");
+      Preconditions.checkArgument(
+          temporaryDirectory.exists(),
+          "temporaryDirectory[%s] does not exist",
+          temporaryDirectory
+      );
+      Preconditions.checkArgument(
+          temporaryDirectory.isDirectory(),
+          "temporaryDirectory[%s] is not a directory",
+          temporaryDirectory
+      );
+    }
+
+    LOG.info("Create a new firehose for [%d] queries", objects.size());
+
+    // fetchExecutor is responsible for background data fetching
+    final ExecutorService fetchExecutor = Execs.singleThreaded("firehose_fetch_%d");
+    final Fetcher<T> fetcher = new SqlFetcher<>(
+        cacheManager,
+        objects,
+        fetchExecutor,
+        temporaryDirectory,
+        prefetchConfig,
+        new ObjectOpenFunction<T>()
+        {
+          @Override
+          public InputStream open(T object, File outFile) throws IOException
+          {
+            return openObjectStream(object, outFile);
+          }
+
+          @Override
+          public InputStream open(T object) throws IOException
+          {
+            final File outFile = File.createTempFile("sqlresults_", null, temporaryDirectory);
+            return openObjectStream(object, outFile);
+          }
+        }
+    );
+
+    return new SqlFirehose(
+        new Iterator<JsonIterator<Map<String, Object>>>()
+        {
+          @Override
+          public boolean hasNext()
+          {
+            return fetcher.hasNext();
+          }
+
+          @Override
+          public JsonIterator<Map<String, Object>> next()
+          {
+            if (!hasNext()) {
+              throw new NoSuchElementException();
+            }
+            try {
+              TypeReference<Map<String, Object>> type = new TypeReference<Map<String, Object>>()
+              {
+              };
+              final OpenedObject<T> openedObject = fetcher.next();
+              final InputStream stream = openedObject.getObjectStream();
+              return new JsonIterator<>(type, stream, openedObject.getResourceCloser(), objectMapper);
+            }
+            catch (Exception ioe) {
+              throw new RuntimeException(ioe);
+            }
+          }
+        },
+        firehoseParser,
+        () -> {
+          fetchExecutor.shutdownNow();
+          try {
+            Preconditions.checkState(fetchExecutor.awaitTermination(
+                prefetchConfig.getFetchTimeout(),
+                TimeUnit.MILLISECONDS
+            ));
+          }
+          catch (InterruptedException e) {
+            Thread.currentThread().interrupt();
+            throw new ISE("Failed to shutdown fetch executor during close");
+          }
+        }
+    );
+  }
+
+  /**
+   * Open an input stream from the given object.  The object is fetched into the file and an input
+   * stream to the file is provided.
+   *
+   * @param object   an object to be read
+   * @param filename file to which the object is fetched into
+   *
+   * @return an input stream to the file
+   */
+  protected abstract InputStream openObjectStream(T object, File filename) throws IOException;
+
+  protected abstract Collection<T> initObjects();
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactory.java b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactory.java
new file mode 100644
index 00000000000..901b5d635c9
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactory.java
@@ -0,0 +1,300 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl.prefetch;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.common.base.Predicate;
+import com.google.common.collect.ImmutableList;
+import org.apache.druid.data.input.Firehose;
+import org.apache.druid.data.input.impl.AbstractTextFilesFirehoseFactory;
+import org.apache.druid.data.input.impl.FileIteratingFirehose;
+import org.apache.druid.data.input.impl.StringInputRowParser;
+import org.apache.druid.java.util.common.ISE;
+import org.apache.druid.java.util.common.concurrent.Execs;
+import org.apache.druid.java.util.common.logger.Logger;
+import org.apache.commons.io.LineIterator;
+
+import javax.annotation.Nullable;
+import java.io.Closeable;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.nio.charset.StandardCharsets;
+import java.util.Iterator;
+import java.util.List;
+import java.util.NoSuchElementException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * PrefetchableTextFilesFirehoseFactory is an abstract firehose factory for reading text files.  The firehose returned
+ * by this class provides three key functionalities.
+ * <p/>
+ * <p>
+ * - Caching: for the first call of {@link #connect(StringInputRowParser, File)}, it caches objects in a local disk
+ * up to maxCacheCapacityBytes.  These caches are NOT deleted until the process terminates, and thus can be used for
+ * future reads.
+ * <br/>
+ * - Fetching: when it reads all cached data, it fetches remaining objects into a local disk and reads data from
+ * them.  For the performance reason, prefetch technique is used, that is, when the size of remaining fetched data is
+ * smaller than {@link PrefetchConfig#prefetchTriggerBytes}, a background prefetch thread automatically starts to fetch remaining
+ * objects.
+ * <br/>
+ * - Retry: if an exception occurs while downloading an object, it retries again up to {@link #maxFetchRetry}.
+ * <p/>
+ * <p>
+ * This implementation can be useful when the cost for reading input objects is large as reading from AWS S3 because
+ * batch tasks like IndexTask or HadoopIndexTask can read the whole data twice for determining partition specs and
+ * generating segments if the intervals of GranularitySpec is not specified.
+ * <br/>
+ * Prefetching can be turned on/off by setting maxFetchCapacityBytes.  Depending on prefetching is enabled or
+ * disabled, the behavior of the firehose is different like below.
+ * <p/>
+ * <p>
+ * 1. If prefetch is enabled, this firehose can fetch input objects in background.
+ * <br/>
+ * 2. When next() is called, it first checks that there are already fetched files in local storage.
+ * <br/>
+ * 2.1 If exists, it simply chooses a fetched file and returns a {@link LineIterator} reading that file.
+ * <br/>
+ * 2.2 If there is no fetched files in local storage but some objects are still remained to be read, the firehose
+ * fetches one of input objects in background immediately. If an IOException occurs while downloading the object,
+ * it retries up to the maximum retry count. Finally, the firehose returns a {@link LineIterator} only when the
+ * download operation is successfully finished.
+ * <br/>
+ * 3. If prefetch is disabled, the firehose returns a {@link LineIterator} which directly reads the stream opened by
+ * {@link #openObjectStream}. If there is an IOException, it will throw it and the read will fail.
+ */
+public abstract class PrefetchableTextFilesFirehoseFactory<T>
+    extends AbstractTextFilesFirehoseFactory<T>
+{
+  private static final Logger LOG = new Logger(PrefetchableTextFilesFirehoseFactory.class);
+
+  public static final int DEFAULT_MAX_FETCH_RETRY = 3;
+
+  private final CacheManager<T> cacheManager;
+  private final PrefetchConfig prefetchConfig;
+
+  private List<T> objects;
+  private final int maxFetchRetry;
+
+  public PrefetchableTextFilesFirehoseFactory(
+      Long maxCacheCapacityBytes,
+      Long maxFetchCapacityBytes,
+      Long prefetchTriggerBytes,
+      Long fetchTimeout,
+      Integer maxFetchRetry
+  )
+  {
+    this.prefetchConfig = new PrefetchConfig(
+        maxCacheCapacityBytes,
+        maxFetchCapacityBytes,
+        prefetchTriggerBytes,
+        fetchTimeout
+    );
+    this.cacheManager = new CacheManager<>(
+        prefetchConfig.getMaxCacheCapacityBytes()
+    );
+    this.maxFetchRetry = maxFetchRetry == null ? DEFAULT_MAX_FETCH_RETRY : maxFetchRetry;
+  }
+
+  @JsonProperty
+  public long getMaxCacheCapacityBytes()
+  {
+    return cacheManager.getMaxCacheCapacityBytes();
+  }
+
+  @JsonProperty
+  public long getMaxFetchCapacityBytes()
+  {
+    return prefetchConfig.getMaxFetchCapacityBytes();
+  }
+
+  @JsonProperty
+  public long getPrefetchTriggerBytes()
+  {
+    return prefetchConfig.getPrefetchTriggerBytes();
+  }
+
+  @JsonProperty
+  public long getFetchTimeout()
+  {
+    return prefetchConfig.getFetchTimeout();
+  }
+
+  @JsonProperty
+  public int getMaxFetchRetry()
+  {
+    return maxFetchRetry;
+  }
+
+  @VisibleForTesting
+  CacheManager<T> getCacheManager()
+  {
+    return cacheManager;
+  }
+
+  @Override
+  public Firehose connect(StringInputRowParser firehoseParser, @Nullable File temporaryDirectory) throws IOException
+  {
+    if (objects == null) {
+      objects = ImmutableList.copyOf(Preconditions.checkNotNull(initObjects(), "objects"));
+    }
+
+    if (cacheManager.isEnabled() || prefetchConfig.getMaxFetchCapacityBytes() > 0) {
+      Preconditions.checkNotNull(temporaryDirectory, "temporaryDirectory");
+      Preconditions.checkArgument(
+          temporaryDirectory.exists(),
+          "temporaryDirectory[%s] does not exist",
+          temporaryDirectory
+      );
+      Preconditions.checkArgument(
+          temporaryDirectory.isDirectory(),
+          "temporaryDirectory[%s] is not a directory",
+          temporaryDirectory
+      );
+    }
+
+    LOG.info("Create a new firehose for [%d] objects", objects.size());
+
+    // fetchExecutor is responsible for background data fetching
+    final ExecutorService fetchExecutor = Execs.singleThreaded("firehose_fetch_%d");
+    final FileFetcher<T> fetcher = new FileFetcher<T>(
+        cacheManager,
+        objects,
+        fetchExecutor,
+        temporaryDirectory,
+        prefetchConfig,
+        new ObjectOpenFunction<T>()
+        {
+          @Override
+          public InputStream open(T object) throws IOException
+          {
+            return openObjectStream(object);
+          }
+
+          @Override
+          public InputStream open(T object, long start) throws IOException
+          {
+            return openObjectStream(object, start);
+          }
+        },
+        getRetryCondition(),
+        getMaxFetchRetry()
+    );
+
+    return new FileIteratingFirehose(
+        new Iterator<LineIterator>()
+        {
+          @Override
+          public boolean hasNext()
+          {
+            return fetcher.hasNext();
+          }
+
+          @Override
+          public LineIterator next()
+          {
+            if (!hasNext()) {
+              throw new NoSuchElementException();
+            }
+
+            final OpenedObject<T> openedObject = fetcher.next();
+            final InputStream stream;
+            try {
+              stream = wrapObjectStream(
+                  openedObject.getObject(),
+                  openedObject.getObjectStream()
+              );
+            }
+            catch (IOException e) {
+              throw new RuntimeException(e);
+            }
+
+            return new ResourceCloseableLineIterator(
+                new InputStreamReader(stream, StandardCharsets.UTF_8),
+                openedObject.getResourceCloser()
+            );
+          }
+        },
+        firehoseParser,
+        () -> {
+          fetchExecutor.shutdownNow();
+          try {
+            Preconditions.checkState(fetchExecutor.awaitTermination(
+                prefetchConfig.getFetchTimeout(),
+                TimeUnit.MILLISECONDS
+            ));
+          }
+          catch (InterruptedException e) {
+            Thread.currentThread().interrupt();
+            throw new ISE("Failed to shutdown fetch executor during close");
+          }
+        }
+    );
+  }
+
+  /**
+   * Returns a predicate describing retry conditions. {@link Fetcher} and {@link RetryingInputStream} will retry on the
+   * errors satisfying this condition.
+   */
+  protected abstract Predicate<Throwable> getRetryCondition();
+
+  /**
+   * Open an input stream from the given object.  If the object is compressed, this method should return a byte stream
+   * as it is compressed.  The object compression should be handled in {@link #wrapObjectStream(Object, InputStream)}.
+   *
+   * @param object an object to be read
+   * @param start  start offset
+   *
+   * @return an input stream for the object
+   */
+  protected abstract InputStream openObjectStream(T object, long start) throws IOException;
+
+  /**
+   * This class calls the {@link Closeable#close()} method of the resourceCloser when it is closed.
+   */
+  static class ResourceCloseableLineIterator extends LineIterator
+  {
+    private final Closeable resourceCloser;
+
+    ResourceCloseableLineIterator(Reader reader, Closeable resourceCloser) throws IllegalArgumentException
+    {
+      super(reader);
+      this.resourceCloser = resourceCloser;
+    }
+
+    @Override
+    public void close()
+    {
+      super.close();
+      try {
+        resourceCloser.close();
+      }
+      catch (IOException e) {
+        throw new RuntimeException(e);
+      }
+    }
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/prefetch/RetryingInputStream.java b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/RetryingInputStream.java
new file mode 100644
index 00000000000..af401e9eb12
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/RetryingInputStream.java
@@ -0,0 +1,184 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl.prefetch;
+
+import com.google.common.base.Predicate;
+import com.google.common.base.Throwables;
+import com.google.common.io.CountingInputStream;
+import org.apache.druid.java.util.common.RetryUtils;
+import org.apache.druid.java.util.common.logger.Logger;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.SocketException;
+
+/**
+ * This class is used by {@link Fetcher} when prefetch is disabled. It's responsible for re-opening the underlying input
+ * stream for the input object on the socket connection reset as well as the given {@link #retryCondition}.
+ *
+ * @param <T> object type
+ */
+class RetryingInputStream<T> extends InputStream
+{
+  private static final Logger log = new Logger(RetryingInputStream.class);
+
+  private final T object;
+  private final ObjectOpenFunction<T> objectOpenFunction;
+  private final Predicate<Throwable> retryCondition;
+  private final int maxRetry;
+
+  private CountingInputStream delegate;
+  private long startOffset;
+
+  RetryingInputStream(
+      T object,
+      ObjectOpenFunction<T> objectOpenFunction,
+      Predicate<Throwable> retryCondition,
+      int maxRetry
+  ) throws IOException
+  {
+    this.object = object;
+    this.objectOpenFunction = objectOpenFunction;
+    this.retryCondition = retryCondition;
+    this.maxRetry = maxRetry;
+    this.delegate = new CountingInputStream(objectOpenFunction.open(object));
+  }
+
+  private boolean isConnectionReset(Throwable t)
+  {
+    return (t instanceof SocketException && (t.getMessage() != null && t.getMessage().contains("Connection reset"))) ||
+           (t.getCause() != null && isConnectionReset(t.getCause()));
+  }
+
+  private void waitOrThrow(Throwable t, int nTry) throws IOException
+  {
+    final boolean isConnectionReset = isConnectionReset(t);
+    if (isConnectionReset || retryCondition.apply(t)) {
+      if (isConnectionReset) {
+        // Re-open the input stream on connection reset
+        startOffset += delegate.getCount();
+        try {
+          delegate.close();
+        }
+        catch (IOException e) {
+          // ignore this exception
+          log.warn(e, "Error while closing the delegate input stream");
+        }
+      }
+      try {
+        // Wait for the next try
+        RetryUtils.awaitNextRetry(t, null, nTry + 1, maxRetry, false);
+
+        if (isConnectionReset) {
+          log.info("retrying from offset[%d]", startOffset);
+          delegate = new CountingInputStream(objectOpenFunction.open(object, startOffset));
+        }
+      }
+      catch (InterruptedException | IOException e) {
+        t.addSuppressed(e);
+        throwAsIOException(t);
+      }
+    } else {
+      throwAsIOException(t);
+    }
+  }
+
+  private static void throwAsIOException(Throwable t) throws IOException
+  {
+    Throwables.propagateIfInstanceOf(t, IOException.class);
+    throw new IOException(t);
+  }
+
+  @Override
+  public int read() throws IOException
+  {
+    for (int nTry = 0; nTry < maxRetry; nTry++) {
+      try {
+        return delegate.read();
+      }
+      catch (Throwable t) {
+        waitOrThrow(t, nTry);
+      }
+    }
+    return delegate.read();
+  }
+
+  @Override
+  public int read(byte b[]) throws IOException
+  {
+    for (int nTry = 0; nTry < maxRetry; nTry++) {
+      try {
+        return delegate.read(b);
+      }
+      catch (Throwable t) {
+        waitOrThrow(t, nTry);
+      }
+    }
+    return delegate.read(b);
+  }
+
+  @Override
+  public int read(byte b[], int off, int len) throws IOException
+  {
+    for (int nTry = 0; nTry < maxRetry; nTry++) {
+      try {
+        return delegate.read(b, off, len);
+      }
+      catch (Throwable t) {
+        waitOrThrow(t, nTry);
+      }
+    }
+    return delegate.read(b, off, len);
+  }
+
+  @Override
+  public long skip(long n) throws IOException
+  {
+    for (int nTry = 0; nTry < maxRetry; nTry++) {
+      try {
+        return delegate.skip(n);
+      }
+      catch (Throwable t) {
+        waitOrThrow(t, nTry);
+      }
+    }
+    return delegate.skip(n);
+  }
+
+  @Override
+  public int available() throws IOException
+  {
+    for (int nTry = 0; nTry < maxRetry; nTry++) {
+      try {
+        return delegate.available();
+      }
+      catch (Throwable t) {
+        waitOrThrow(t, nTry);
+      }
+    }
+    return delegate.available();
+  }
+
+  @Override
+  public void close() throws IOException
+  {
+    delegate.close();
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/data/input/impl/prefetch/SqlFetcher.java b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/SqlFetcher.java
new file mode 100644
index 00000000000..e59958c4577
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/data/input/impl/prefetch/SqlFetcher.java
@@ -0,0 +1,96 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.data.input.impl.prefetch;
+
+import javax.annotation.Nullable;
+import java.io.File;
+import java.io.IOException;
+import java.util.List;
+import java.util.concurrent.ExecutorService;
+
+
+/**
+ * A file fetcher used by {@link PrefetchSqlFirehoseFactory}.
+ * See the javadoc of {@link PrefetchSqlFirehoseFactory} for more details.
+ */
+public class SqlFetcher<T> extends Fetcher<T>
+
+{
+  private static final String FETCH_FILE_PREFIX = "sqlfetch-";
+
+  @Nullable
+  private final File temporaryDirectory;
+
+  private final ObjectOpenFunction<T> openObjectFunction;
+
+  SqlFetcher(
+      CacheManager<T> cacheManager,
+      List<T> objects,
+      ExecutorService fetchExecutor,
+      @Nullable File temporaryDirectory,
+      PrefetchConfig prefetchConfig,
+      ObjectOpenFunction<T> openObjectFunction
+  )
+  {
+
+    super(
+        cacheManager,
+        objects,
+        fetchExecutor,
+        temporaryDirectory,
+        prefetchConfig
+    );
+    this.temporaryDirectory = temporaryDirectory;
+    this.openObjectFunction = openObjectFunction;
+  }
+
+  /**
+   * Downloads the entire resultset object into a file. This avoids maintaining a
+   * persistent connection to the database. The retry is performed at the query execution layer.
+   *
+   * @param object  sql query for which the resultset is to be downloaded
+   * @param outFile a file which the object data is stored
+   *
+   * @return size of downloaded resultset
+   */
+
+  @Override
+  protected long download(T object, File outFile) throws IOException
+  {
+    openObjectFunction.open(object, outFile);
+    return outFile.length();
+  }
+
+  /**
+   * Generates an instance of {@link OpenedObject} for the given object. This is usually called
+   * when prefetching is disabled. The retry is performed at the query execution layer.
+   */
+
+  @Override
+  protected OpenedObject<T> generateOpenObject(T object) throws IOException
+  {
+    final File outFile = File.createTempFile(FETCH_FILE_PREFIX, null, temporaryDirectory);
+    return new OpenedObject<>(
+        object,
+        openObjectFunction.open(object, outFile),
+        outFile::delete
+    );
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/guice/Binders.java b/api/src/main/java/org/apache/druid/guice/Binders.java
new file mode 100644
index 00000000000..6110607f1dc
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/guice/Binders.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.guice;
+
+import com.google.inject.Binder;
+import com.google.inject.Key;
+import com.google.inject.multibindings.MapBinder;
+import org.apache.druid.guice.annotations.PublicApi;
+import org.apache.druid.segment.loading.DataSegmentArchiver;
+import org.apache.druid.segment.loading.DataSegmentFinder;
+import org.apache.druid.segment.loading.DataSegmentKiller;
+import org.apache.druid.segment.loading.DataSegmentMover;
+import org.apache.druid.segment.loading.DataSegmentPusher;
+import org.apache.druid.tasklogs.TaskLogs;
+
+/**
+ */
+@PublicApi
+public class Binders
+{
+
+  public static MapBinder<String, DataSegmentKiller> dataSegmentKillerBinder(Binder binder)
+  {
+    return MapBinder.newMapBinder(binder, String.class, DataSegmentKiller.class);
+  }
+
+  public static MapBinder<String, DataSegmentMover> dataSegmentMoverBinder(Binder binder)
+  {
+    return MapBinder.newMapBinder(binder, String.class, DataSegmentMover.class);
+  }
+
+  public static MapBinder<String, DataSegmentArchiver> dataSegmentArchiverBinder(Binder binder)
+  {
+    return MapBinder.newMapBinder(binder, String.class, DataSegmentArchiver.class);
+  }
+
+  public static MapBinder<String, DataSegmentPusher> dataSegmentPusherBinder(Binder binder)
+  {
+    return PolyBind.optionBinder(binder, Key.get(DataSegmentPusher.class));
+  }
+
+  public static MapBinder<String, DataSegmentFinder> dataSegmentFinderBinder(Binder binder)
+  {
+    return PolyBind.optionBinder(binder, Key.get(DataSegmentFinder.class));
+  }
+
+  public static MapBinder<String, TaskLogs> taskLogsBinder(Binder binder)
+  {
+    return PolyBind.optionBinder(binder, Key.get(TaskLogs.class));
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/guice/ConditionalMultibind.java b/api/src/main/java/org/apache/druid/guice/ConditionalMultibind.java
new file mode 100644
index 00000000000..7cba8225792
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/guice/ConditionalMultibind.java
@@ -0,0 +1,247 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.guice;
+
+import com.google.common.base.Predicate;
+import com.google.inject.Binder;
+import com.google.inject.TypeLiteral;
+import com.google.inject.multibindings.Multibinder;
+import org.apache.druid.guice.annotations.PublicApi;
+
+import java.lang.annotation.Annotation;
+import java.util.Properties;
+
+/**
+ * Provides the ability to conditionally bind an item to a set. The condition is based on the value set in the
+ * runtime.properties.
+ *
+ * Usage example:
+ *
+ * ConditionalMultibind.create(props, binder, Animal.class)
+ *                     .addConditionBinding("animal.type", Predicates.equalTo("cat"), Cat.class)
+ *                     .addConditionBinding("animal.type", Predicates.equalTo("dog"), Dog.class);
+ *
+ * At binding time, this will check the value set for property "animal.type" in props. If the value is "cat", it will
+ * add a binding to Cat.class. If the value is "dog", it will add a binding to Dog.class.
+ *
+ * At injection time, you will get the items that satisfy their corresponding predicates by calling
+ * injector.getInstance(Key.get(new TypeLiteral<Set<Animal>>(){}))
+ */
+@PublicApi
+public class ConditionalMultibind<T>
+{
+
+  /**
+   * Create a ConditionalMultibind that resolves items to be added to the set at "binding" time.
+   *
+   * @param properties the runtime properties.
+   * @param binder     the binder for the injector that is being configured.
+   * @param type       the type that will be injected.
+   * @param <T>        interface type.
+   *
+   * @return An instance of ConditionalMultibind that can be used to add conditional bindings.
+   */
+  public static <T> ConditionalMultibind<T> create(Properties properties, Binder binder, Class<T> type)
+  {
+    return new ConditionalMultibind<T>(properties, Multibinder.newSetBinder(binder, type));
+  }
+
+  /**
+   * Create a ConditionalMultibind that resolves items to be added to the set at "binding" time.
+   *
+   * @param properties     the runtime properties.
+   * @param binder         the binder for the injector that is being configured.
+   * @param type           the type that will be injected.
+   * @param <T>            interface type.
+   * @param annotationType the binding annotation.
+   *
+   * @return An instance of ConditionalMultibind that can be used to add conditional bindings.
+   */
+  public static <T> ConditionalMultibind<T> create(
+      Properties properties,
+      Binder binder,
+      Class<T> type,
+      Class<? extends Annotation> annotationType
+  )
+  {
+    return new ConditionalMultibind<T>(properties, Multibinder.newSetBinder(binder, type, annotationType));
+  }
+
+  /**
+   * Create a ConditionalMultibind that resolves items to be added to the set at "binding" time.
+   *
+   * @param properties the runtime properties.
+   * @param binder     the binder for the injector that is being configured.
+   * @param type       the type that will be injected.
+   * @param <T>        interface type.
+   *
+   * @return An instance of ConditionalMultibind that can be used to add conditional bindings.
+   */
+  public static <T> ConditionalMultibind<T> create(Properties properties, Binder binder, TypeLiteral<T> type)
+  {
+    return new ConditionalMultibind<T>(properties, Multibinder.newSetBinder(binder, type));
+  }
+
+  /**
+   * Create a ConditionalMultibind that resolves items to be added to the set at "binding" time.
+   *
+   * @param properties     the runtime properties.
+   * @param binder         the binder for the injector that is being configured.
+   * @param type           the type that will be injected.
+   * @param <T>            interface type.
+   * @param annotationType the binding annotation.
+   *
+   * @return An instance of ConditionalMultibind that can be used to add conditional bindings.
+   */
+  public static <T> ConditionalMultibind<T> create(
+      Properties properties,
+      Binder binder,
+      TypeLiteral<T> type,
+      Class<? extends Annotation> annotationType
+  )
+  {
+    return new ConditionalMultibind<T>(properties, Multibinder.newSetBinder(binder, type, annotationType));
+  }
+
+
+  private final Properties properties;
+  private final Multibinder<T> multibinder;
+
+  public ConditionalMultibind(Properties properties, Multibinder<T> multibinder)
+  {
+    this.properties = properties;
+    this.multibinder = multibinder;
+  }
+
+  /**
+   * Unconditionally bind target to the set.
+   *
+   * @param target the target class to which it adds a binding.
+   *
+   * @return self to support a continuous syntax for adding more conditional bindings.
+   */
+  public ConditionalMultibind<T> addBinding(Class<? extends T> target)
+  {
+    multibinder.addBinding().to(target);
+    return this;
+  }
+
+  /**
+   * Unconditionally bind target to the set.
+   *
+   * @param target the target instance to which it adds a binding.
+   *
+   * @return self to support a continuous syntax for adding more conditional bindings.
+   */
+  public ConditionalMultibind<T> addBinding(T target)
+  {
+    multibinder.addBinding().toInstance(target);
+    return this;
+  }
+
+  /**
+   * Unconditionally bind target to the set.
+   *
+   * @param target the target type to which it adds a binding.
+   *
+   * @return self to support a continuous syntax for adding more conditional bindings.
+   */
+  public ConditionalMultibind<T> addBinding(TypeLiteral<T> target)
+  {
+    multibinder.addBinding().to(target);
+    return this;
+  }
+
+  /**
+   * Conditionally bind target to the set. If "condition" returns true, add a binding to "target".
+   *
+   * @param property  the property to inspect on
+   * @param condition the predicate used to verify whether to add a binding to "target"
+   * @param target    the target class to which it adds a binding.
+   *
+   * @return self to support a continuous syntax for adding more conditional bindings.
+   */
+  public ConditionalMultibind<T> addConditionBinding(
+      String property,
+      Predicate<String> condition,
+      Class<? extends T> target
+  )
+  {
+    final String value = properties.getProperty(property);
+    if (value == null) {
+      return this;
+    }
+    if (condition.apply(value)) {
+      multibinder.addBinding().to(target);
+    }
+    return this;
+  }
+
+  /**
+   * Conditionally bind target to the set. If "condition" returns true, add a binding to "target".
+   *
+   * @param property  the property to inspect on
+   * @param condition the predicate used to verify whether to add a binding to "target"
+   * @param target    the target instance to which it adds a binding.
+   *
+   * @return self to support a continuous syntax for adding more conditional bindings.
+   */
+  public ConditionalMultibind<T> addConditionBinding(
+      String property,
+      Predicate<String> condition,
+      T target
+  )
+  {
+    final String value = properties.getProperty(property);
+    if (value == null) {
+      return this;
+    }
+    if (condition.apply(value)) {
+      multibinder.addBinding().toInstance(target);
+    }
+    return this;
+  }
+
+  /**
+   * Conditionally bind target to the set. If "condition" returns true, add a binding to "target".
+   *
+   * @param property  the property to inspect on
+   * @param condition the predicate used to verify whether to add a binding to "target"
+   * @param target    the target type to which it adds a binding.
+   *
+   * @return self to support a continuous syntax for adding more conditional bindings.
+   */
+  @PublicApi
+  public ConditionalMultibind<T> addConditionBinding(
+      String property,
+      Predicate<String> condition,
+      TypeLiteral<T> target
+  )
+  {
+    final String value = properties.getProperty(property);
+    if (value == null) {
+      return this;
+    }
+    if (condition.apply(value)) {
+      multibinder.addBinding().to(target);
+    }
+    return this;
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/guice/DruidGuiceExtensions.java b/api/src/main/java/org/apache/druid/guice/DruidGuiceExtensions.java
new file mode 100644
index 00000000000..f400644808c
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/guice/DruidGuiceExtensions.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.guice;
+
+import com.google.inject.Binder;
+import com.google.inject.Module;
+import org.apache.druid.guice.annotations.PublicApi;
+
+/**
+ */
+@PublicApi
+public class DruidGuiceExtensions implements Module
+{
+  @Override
+  public void configure(Binder binder)
+  {
+    binder.bindScope(LazySingleton.class, DruidScopes.SINGLETON);
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/guice/DruidScopes.java b/api/src/main/java/org/apache/druid/guice/DruidScopes.java
new file mode 100644
index 00000000000..e63b2426724
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/guice/DruidScopes.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.guice;
+
+import com.google.inject.Key;
+import com.google.inject.Provider;
+import com.google.inject.Scope;
+import com.google.inject.Scopes;
+import org.apache.druid.guice.annotations.PublicApi;
+
+/**
+ */
+@PublicApi
+public class DruidScopes
+{
+  public static final Scope SINGLETON = new Scope()
+  {
+    @Override
+    public <T> Provider<T> scope(Key<T> key, Provider<T> unscoped)
+    {
+      return Scopes.SINGLETON.scope(key, unscoped);
+    }
+
+    @Override
+    public String toString()
+    {
+      return "DruidScopes.SINGLETON";
+    }
+  };
+}
diff --git a/api/src/main/java/org/apache/druid/guice/Jerseys.java b/api/src/main/java/org/apache/druid/guice/Jerseys.java
new file mode 100644
index 00000000000..08f09bdd5bc
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/guice/Jerseys.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.guice;
+
+import com.google.inject.Binder;
+import com.google.inject.TypeLiteral;
+import com.google.inject.multibindings.Multibinder;
+import org.apache.druid.guice.annotations.JSR311Resource;
+import org.apache.druid.guice.annotations.PublicApi;
+
+/**
+ */
+@PublicApi
+public class Jerseys
+{
+  public static void addResource(Binder binder, Class<?> resourceClazz)
+  {
+    Multibinder.newSetBinder(binder, new TypeLiteral<Class<?>>(){}, JSR311Resource.class)
+               .addBinding()
+               .toInstance(resourceClazz);
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/guice/JsonConfigProvider.java b/api/src/main/java/org/apache/druid/guice/JsonConfigProvider.java
new file mode 100644
index 00000000000..4899908b432
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/guice/JsonConfigProvider.java
@@ -0,0 +1,213 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.guice;
+
+import com.google.common.base.Supplier;
+import com.google.common.base.Suppliers;
+import com.google.inject.Binder;
+import com.google.inject.Inject;
+import com.google.inject.Key;
+import com.google.inject.Provider;
+import com.google.inject.util.Types;
+import org.apache.druid.guice.annotations.PublicApi;
+
+import java.lang.annotation.Annotation;
+import java.lang.reflect.ParameterizedType;
+import java.util.Properties;
+
+
+/**
+ * Provides a singleton value of type {@code <T>} from {@code Properties} bound in guice.
+ * <br/>
+ * <h3>Usage</h3>
+ * To install this provider, bind it in your guice module, like below.
+ *
+ * <pre>
+ * JsonConfigProvider.bind(binder, "druid.server", DruidServerConfig.class);
+ * </pre>
+ * <br/>
+ * In the above case, {@code druid.server} should be a key found in the {@code Properties} bound elsewhere.
+ * The value of that key should directly relate to the fields in {@code DruidServerConfig.class}.
+ *
+ * <h3>Implementation</h3>
+ * <br/>
+ * The state of {@code <T>} is defined by the value of the property {@code propertyBase}.
+ * This value is a json structure, decoded via {@link JsonConfigurator#configurate(Properties, String, Class)}.
+ * <br/>
+ *
+ * An example might be if DruidServerConfig.class were
+ *
+ * <pre>
+ *   public class DruidServerConfig
+ *   {
+ *     @JsonProperty @NotNull public String hostname = null;
+ *     @JsonProperty @Min(1025) public int port = 8080;
+ *   }
+ * </pre>
+ *
+ * And your Properties object had in it
+ *
+ * <pre>
+ *   druid.server.hostname=0.0.0.0
+ *   druid.server.port=3333
+ * </pre>
+ *
+ * Then this would bind a singleton instance of a DruidServerConfig object with hostname = "0.0.0.0" and port = 3333.
+ *
+ * If the port weren't set in the properties, then the default of 8080 would be taken.  Essentially, it is the same as
+ * subtracting the "druid.server" prefix from the properties and building a Map which is then passed into
+ * ObjectMapper.convertValue()
+ *
+ * @param <T> type of config object to provide.
+ */
+@PublicApi
+public class JsonConfigProvider<T> implements Provider<Supplier<T>>
+{
+  @SuppressWarnings("unchecked")
+  public static <T> void bind(Binder binder, String propertyBase, Class<T> classToProvide)
+  {
+    bind(
+        binder,
+        propertyBase,
+        classToProvide,
+        Key.get(classToProvide),
+        (Key) Key.get(Types.newParameterizedType(Supplier.class, classToProvide))
+    );
+  }
+
+  @SuppressWarnings("unchecked")
+  public static <T> void bind(Binder binder, String propertyBase, Class<T> classToProvide, Annotation annotation)
+  {
+    bind(
+        binder,
+        propertyBase,
+        classToProvide,
+        Key.get(classToProvide, annotation),
+        (Key) Key.get(Types.newParameterizedType(Supplier.class, classToProvide), annotation)
+    );
+  }
+
+  @SuppressWarnings("unchecked")
+  public static <T> void bind(
+      Binder binder,
+      String propertyBase,
+      Class<T> classToProvide,
+      Class<? extends Annotation> annotation
+  )
+  {
+    bind(
+        binder,
+        propertyBase,
+        classToProvide,
+        Key.get(classToProvide, annotation),
+        (Key) Key.get(Types.newParameterizedType(Supplier.class, classToProvide), annotation)
+    );
+  }
+
+  @SuppressWarnings("unchecked")
+  public static <T> void bind(
+      Binder binder,
+      String propertyBase,
+      Class<T> clazz,
+      Key<T> instanceKey,
+      Key<Supplier<T>> supplierKey
+  )
+  {
+    binder.bind(supplierKey).toProvider(of(propertyBase, clazz)).in(LazySingleton.class);
+    binder.bind(instanceKey).toProvider(new SupplierProvider<>(supplierKey));
+  }
+
+  @SuppressWarnings("unchecked")
+  public static <T> void bindInstance(
+      Binder binder,
+      Key<T> bindKey,
+      T instance
+  )
+  {
+    binder.bind(bindKey).toInstance(instance);
+
+    final ParameterizedType supType = Types.newParameterizedType(Supplier.class, bindKey.getTypeLiteral().getType());
+    final Key supplierKey;
+
+    if (bindKey.getAnnotationType() != null) {
+      supplierKey = Key.get(supType, bindKey.getAnnotationType());
+    } else if (bindKey.getAnnotation() != null) {
+      supplierKey = Key.get(supType, bindKey.getAnnotation());
+    } else {
+      supplierKey = Key.get(supType);
+    }
+
+    binder.bind(supplierKey).toInstance(Suppliers.ofInstance(instance));
+  }
+
+  public static <T> JsonConfigProvider<T> of(String propertyBase, Class<T> classToProvide)
+  {
+    return new JsonConfigProvider<T>(propertyBase, classToProvide);
+  }
+
+  private final String propertyBase;
+  private final Class<T> classToProvide;
+
+  private Properties props;
+  private JsonConfigurator configurator;
+
+  private Supplier<T> retVal = null;
+
+  public JsonConfigProvider(
+      String propertyBase,
+      Class<T> classToProvide
+  )
+  {
+    this.propertyBase = propertyBase;
+    this.classToProvide = classToProvide;
+  }
+
+  @Inject
+  public void inject(
+      Properties props,
+      JsonConfigurator configurator
+  )
+  {
+    this.props = props;
+    this.configurator = configurator;
+  }
+
+  @Override
+  public Supplier<T> get()
+  {
+    if (retVal != null) {
+      return retVal;
+    }
+
+    try {
+      final T config = configurator.configurate(props, propertyBase, classToProvide);
+      retVal = Suppliers.ofInstance(config);
+    }
+    catch (RuntimeException e) {
+      // When a runtime exception gets thrown out, this provider will get called again if the object is asked for again.
+      // This will have the same failed result, 'cause when it's called no parameters will have actually changed.
+      // Guice will then report the same error multiple times, which is pretty annoying. Cache a null supplier and
+      // return that instead.  This is technically enforcing a singleton, but such is life.
+      retVal = Suppliers.ofInstance(null);
+      throw e;
+    }
+    return retVal;
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/guice/JsonConfigurator.java b/api/src/main/java/org/apache/druid/guice/JsonConfigurator.java
new file mode 100644
index 00000000000..d08bd701245
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/guice/JsonConfigurator.java
@@ -0,0 +1,226 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.guice;
+
+import com.fasterxml.jackson.annotation.JacksonInject;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.introspect.AnnotatedField;
+import com.fasterxml.jackson.databind.introspect.BeanPropertyDefinition;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Function;
+import com.google.common.base.Strings;
+import com.google.common.base.Throwables;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.inject.Inject;
+import com.google.inject.ProvisionException;
+import com.google.inject.spi.Message;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.java.util.common.logger.Logger;
+
+import javax.validation.ConstraintViolation;
+import javax.validation.ElementKind;
+import javax.validation.Path;
+import javax.validation.Validator;
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+
+/**
+ */
+public class JsonConfigurator
+{
+  private static final Logger log = new Logger(JsonConfigurator.class);
+
+  private final ObjectMapper jsonMapper;
+  private final Validator validator;
+
+  @Inject
+  public JsonConfigurator(
+      ObjectMapper jsonMapper,
+      Validator validator
+  )
+  {
+    this.jsonMapper = jsonMapper;
+    this.validator = validator;
+  }
+
+  public <T> T configurate(Properties props, String propertyPrefix, Class<T> clazz) throws ProvisionException
+  {
+    verifyClazzIsConfigurable(jsonMapper, clazz);
+
+    // Make it end with a period so we only include properties with sub-object thingies.
+    final String propertyBase = propertyPrefix.endsWith(".") ? propertyPrefix : propertyPrefix + ".";
+
+    Map<String, Object> jsonMap = Maps.newHashMap();
+    for (String prop : props.stringPropertyNames()) {
+      if (prop.startsWith(propertyBase)) {
+        final String propValue = props.getProperty(prop);
+        Object value;
+        try {
+          // If it's a String Jackson wants it to be quoted, so check if it's not an object or array and quote.
+          String modifiedPropValue = propValue;
+          if (!(modifiedPropValue.startsWith("[") || modifiedPropValue.startsWith("{"))) {
+            modifiedPropValue = jsonMapper.writeValueAsString(propValue);
+          }
+          value = jsonMapper.readValue(modifiedPropValue, Object.class);
+        }
+        catch (IOException e) {
+          log.info(e, "Unable to parse [%s]=[%s] as a json object, using as is.", prop, propValue);
+          value = propValue;
+        }
+        hieraricalPutValue(propertyPrefix, prop, prop.substring(propertyBase.length()), value, jsonMap);
+      }
+    }
+
+    final T config;
+    try {
+      config = jsonMapper.convertValue(jsonMap, clazz);
+    }
+    catch (IllegalArgumentException e) {
+      throw new ProvisionException(
+          StringUtils.format("Problem parsing object at prefix[%s]: %s.", propertyPrefix, e.getMessage()), e
+      );
+    }
+
+    final Set<ConstraintViolation<T>> violations = validator.validate(config);
+    if (!violations.isEmpty()) {
+      List<String> messages = Lists.newArrayList();
+
+      for (ConstraintViolation<T> violation : violations) {
+        StringBuilder path = new StringBuilder();
+        try {
+          Class<?> beanClazz = violation.getRootBeanClass();
+          final Iterator<Path.Node> iter = violation.getPropertyPath().iterator();
+          while (iter.hasNext()) {
+            Path.Node next = iter.next();
+            if (next.getKind() == ElementKind.PROPERTY) {
+              final String fieldName = next.getName();
+              final Field theField = beanClazz.getDeclaredField(fieldName);
+
+              if (theField.getAnnotation(JacksonInject.class) != null) {
+                path = new StringBuilder(StringUtils.format(" -- Injected field[%s] not bound!?", fieldName));
+                break;
+              }
+
+              JsonProperty annotation = theField.getAnnotation(JsonProperty.class);
+              final boolean noAnnotationValue = annotation == null || Strings.isNullOrEmpty(annotation.value());
+              final String pathPart = noAnnotationValue ? fieldName : annotation.value();
+              if (path.length() == 0) {
+                path.append(pathPart);
+              } else {
+                path.append(".").append(pathPart);
+              }
+            }
+          }
+        }
+        catch (NoSuchFieldException e) {
+          throw Throwables.propagate(e);
+        }
+
+        messages.add(StringUtils.format("%s - %s", path.toString(), violation.getMessage()));
+      }
+
+      throw new ProvisionException(
+          Iterables.transform(
+              messages,
+              new Function<String, Message>()
+              {
+                @Override
+                public Message apply(String input)
+                {
+                  return new Message(StringUtils.format("%s%s", propertyBase, input));
+                }
+              }
+          )
+      );
+    }
+
+    log.info("Loaded class[%s] from props[%s] as [%s]", clazz, propertyBase, config);
+
+    return config;
+  }
+
+  private static void hieraricalPutValue(
+      String propertyPrefix,
+      String originalProperty,
+      String property,
+      Object value,
+      Map<String, Object> targetMap
+  )
+  {
+    int dotIndex = property.indexOf('.');
+    // Always put property with name even if it is of form a.b. This will make sure the property is available for classes
+    // where JsonProperty names are of the form a.b
+    // Note:- this will cause more than required properties to be present in the jsonMap.
+    targetMap.put(property, value);
+    if (dotIndex < 0) {
+      return;
+    }
+    if (dotIndex == 0) {
+      throw new ProvisionException(StringUtils.format("Double dot in property: %s", originalProperty));
+    }
+    if (dotIndex == property.length() - 1) {
+      throw new ProvisionException(StringUtils.format("Dot at the end of property: %s", originalProperty));
+    }
+    String nestedKey = property.substring(0, dotIndex);
+    Object nested = targetMap.computeIfAbsent(nestedKey, k -> new HashMap<String, Object>());
+    if (!(nested instanceof Map)) {
+      // Clash is possible between properties, which are used to configure different objects: e. g.
+      // druid.emitter=parametrized is used to configure Emitter class, and druid.emitter.parametrized.xxx=yyy is used
+      // to configure ParametrizedUriEmitterConfig object. So skipping xxx=yyy key-value pair when configuring Emitter
+      // doesn't make any difference. That is why we just log this situation, instead of throwing an exception.
+      log.info(
+          "Skipping %s property: one of it's prefixes is also used as a property key. Prefix: %s",
+          originalProperty,
+          propertyPrefix
+      );
+      return;
+    }
+    Map<String, Object> nestedMap = (Map<String, Object>) nested;
+    hieraricalPutValue(propertyPrefix, originalProperty, property.substring(dotIndex + 1), value, nestedMap);
+  }
+
+  @VisibleForTesting
+  public static <T> void verifyClazzIsConfigurable(ObjectMapper mapper, Class<T> clazz)
+  {
+    final List<BeanPropertyDefinition> beanDefs = mapper.getSerializationConfig()
+                                                        .introspect(mapper.constructType(clazz))
+                                                        .findProperties();
+    for (BeanPropertyDefinition beanDef : beanDefs) {
+      final AnnotatedField field = beanDef.getField();
+      if (field == null || !field.hasAnnotation(JsonProperty.class)) {
+        throw new ProvisionException(
+            StringUtils.format(
+                "JsonConfigurator requires Jackson-annotated Config objects to have field annotations. %s doesn't",
+                clazz
+            )
+        );
+      }
+    }
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/guice/KeyHolder.java b/api/src/main/java/org/apache/druid/guice/KeyHolder.java
new file mode 100644
index 00000000000..51e49e21153
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/guice/KeyHolder.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.guice;
+
+import com.google.inject.Key;
+
+/**
+ */
+public class KeyHolder<T>
+{
+  private final Key<? extends T> key;
+
+  public KeyHolder(
+      Key<? extends T> key
+  )
+  {
+    this.key = key;
+  }
+
+  public Key<? extends T> getKey()
+  {
+    return key;
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/guice/LazySingleton.java b/api/src/main/java/org/apache/druid/guice/LazySingleton.java
new file mode 100644
index 00000000000..3213b130370
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/guice/LazySingleton.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.guice;
+
+import com.google.inject.ScopeAnnotation;
+import org.apache.druid.guice.annotations.PublicApi;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ */
+@Target({ElementType.TYPE, ElementType.METHOD})
+@Retention(RetentionPolicy.RUNTIME)
+@ScopeAnnotation
+@PublicApi
+public @interface LazySingleton
+{
+}
diff --git a/api/src/main/java/org/apache/druid/guice/LifecycleModule.java b/api/src/main/java/org/apache/druid/guice/LifecycleModule.java
new file mode 100644
index 00000000000..bbf9be416ff
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/guice/LifecycleModule.java
@@ -0,0 +1,142 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.guice;
+
+import com.google.inject.Binder;
+import com.google.inject.Injector;
+import com.google.inject.Key;
+import com.google.inject.Module;
+import com.google.inject.Provides;
+import com.google.inject.TypeLiteral;
+import com.google.inject.multibindings.Multibinder;
+import com.google.inject.name.Names;
+import org.apache.druid.java.util.common.lifecycle.Lifecycle;
+
+import java.lang.annotation.Annotation;
+import java.util.Set;
+
+/**
+ * A Module to add lifecycle management to the injector.  {@link DruidGuiceExtensions} must also be included.
+ */
+public class LifecycleModule implements Module
+{
+  private final LifecycleScope scope = new LifecycleScope(Lifecycle.Stage.NORMAL);
+  private final LifecycleScope lastScope = new LifecycleScope(Lifecycle.Stage.LAST);
+
+  /**
+   * Registers a class to instantiate eagerly.  Classes mentioned here will be pulled out of
+   * the injector with an injector.getInstance() call when the lifecycle is created.
+   *
+   * Eagerly loaded classes will *not* be automatically added to the Lifecycle unless they are bound to the proper
+   * scope.  That is, they are generally eagerly loaded because the loading operation will produce some beneficial
+   * side-effect even if nothing actually directly depends on the instance.
+   *
+   * This mechanism exists to allow the {@link Lifecycle} to be the primary entry point from the injector, not to
+   * auto-register things with the {@link Lifecycle}.  It is also possible to just bind things eagerly with Guice,
+   * it is not clear which is actually the best approach.  This is more explicit, but eager bindings inside of modules
+   * is less error-prone.
+   *
+   * @param clazz, the class to instantiate
+   * @return this, for chaining.
+   */
+  public static void register(Binder binder, Class<?> clazz)
+  {
+    registerKey(binder, Key.get(clazz));
+  }
+
+  /**
+   * Registers a class/annotation combination to instantiate eagerly.  Classes mentioned here will be pulled out of
+   * the injector with an injector.getInstance() call when the lifecycle is created.
+   *
+   * Eagerly loaded classes will *not* be automatically added to the Lifecycle unless they are bound to the proper
+   * scope.  That is, they are generally eagerly loaded because the loading operation will produce some beneficial
+   * side-effect even if nothing actually directly depends on the instance.
+   *
+   * This mechanism exists to allow the {@link Lifecycle} to be the primary entry point from the injector, not to
+   * auto-register things with the {@link Lifecycle}.  It is also possible to just bind things eagerly with Guice,
+   * it is not clear which is actually the best approach.  This is more explicit, but eager bindings inside of modules
+   * is less error-prone.
+   *
+   * @param clazz, the class to instantiate
+   * @param annotation The annotation class to register with Guice
+   * @return this, for chaining
+   */
+  public static void register(Binder binder, Class<?> clazz, Class<? extends Annotation> annotation)
+  {
+    registerKey(binder, Key.get(clazz, annotation));
+  }
+
+  /**
+   * Registers a key to instantiate eagerly.  {@link Key}s mentioned here will be pulled out of
+   * the injector with an injector.getInstance() call when the lifecycle is created.
+   *
+   * Eagerly loaded classes will *not* be automatically added to the Lifecycle unless they are bound to the proper
+   * scope.  That is, they are generally eagerly loaded because the loading operation will produce some beneficial
+   * side-effect even if nothing actually directly depends on the instance.
+   *
+   * This mechanism exists to allow the {@link Lifecycle} to be the primary entry point
+   * from the injector, not to auto-register things with the {@link Lifecycle}.  It is
+   * also possible to just bind things eagerly with Guice, it is not clear which is actually the best approach.
+   * This is more explicit, but eager bindings inside of modules is less error-prone.
+   *
+   * @param key The key to use in finding the DruidNode instance
+   */
+  public static void registerKey(Binder binder, Key<?> key)
+  {
+    getEagerBinder(binder).addBinding().toInstance(new KeyHolder<Object>(key));
+  }
+
+  private static Multibinder<KeyHolder> getEagerBinder(Binder binder)
+  {
+    return Multibinder.newSetBinder(binder, KeyHolder.class, Names.named("lifecycle"));
+  }
+
+  @Override
+  public void configure(Binder binder)
+  {
+    getEagerBinder(binder); // Load up the eager binder so that it will inject the empty set at a minimum.
+
+    binder.bindScope(ManageLifecycle.class, scope);
+    binder.bindScope(ManageLifecycleLast.class, lastScope);
+  }
+
+  @Provides @LazySingleton
+  public Lifecycle getLifecycle(final Injector injector)
+  {
+    final Key<Set<KeyHolder>> keyHolderKey = Key.get(new TypeLiteral<Set<KeyHolder>>(){}, Names.named("lifecycle"));
+    final Set<KeyHolder> eagerClasses = injector.getInstance(keyHolderKey);
+
+    Lifecycle lifecycle = new Lifecycle()
+    {
+      @Override
+      public void start() throws Exception
+      {
+        for (KeyHolder<?> holder : eagerClasses) {
+          injector.getInstance(holder.getKey()); // Pull the key so as to "eagerly" load up the class.
+        }
+        super.start();
+      }
+    };
+    scope.setLifecycle(lifecycle);
+    lastScope.setLifecycle(lifecycle);
+
+    return lifecycle;
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/guice/LifecycleScope.java b/api/src/main/java/org/apache/druid/guice/LifecycleScope.java
new file mode 100644
index 00000000000..5c3bedf74d3
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/guice/LifecycleScope.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.guice;
+
+import com.google.common.collect.Lists;
+import com.google.inject.Key;
+import com.google.inject.Provider;
+import com.google.inject.Scope;
+import org.apache.druid.java.util.common.lifecycle.Lifecycle;
+import org.apache.druid.java.util.common.logger.Logger;
+
+import java.util.List;
+
+/**
+ * A scope that adds objects to the Lifecycle.  This is by definition also a lazy singleton scope.
+ */
+public class LifecycleScope implements Scope
+{
+  private static final Logger log = new Logger(LifecycleScope.class);
+  private final Lifecycle.Stage stage;
+
+  private Lifecycle lifecycle;
+  private final List<Object> instances = Lists.newLinkedList();
+
+  public LifecycleScope(Lifecycle.Stage stage)
+  {
+    this.stage = stage;
+  }
+
+  public void setLifecycle(Lifecycle lifecycle)
+  {
+    synchronized (instances) {
+      this.lifecycle = lifecycle;
+      for (Object instance : instances) {
+        lifecycle.addManagedInstance(instance, stage);
+      }
+    }
+  }
+
+  @Override
+  public <T> Provider<T> scope(final Key<T> key, final Provider<T> unscoped)
+  {
+    return new Provider<T>()
+    {
+      private volatile T value = null;
+
+      @Override
+      public synchronized T get()
+      {
+        if (value == null) {
+          final T retVal = unscoped.get();
+
+          synchronized (instances) {
+            if (lifecycle == null) {
+              instances.add(retVal);
+            } else {
+              try {
+                lifecycle.addMaybeStartManagedInstance(retVal, stage);
+              }
+              catch (Exception e) {
+                log.warn(e, "Caught exception when trying to create a[%s]", key);
+                return null;
+              }
+            }
+          }
+
+          value = retVal;
+        }
+
+        return value;
+      }
+    };
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/guice/ManageLifecycle.java b/api/src/main/java/org/apache/druid/guice/ManageLifecycle.java
new file mode 100644
index 00000000000..0e6790d37d7
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/guice/ManageLifecycle.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.guice;
+
+import com.google.inject.ScopeAnnotation;
+import org.apache.druid.guice.annotations.PublicApi;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Marks the object to be managed by {@link org.apache.druid.java.util.common.lifecycle.Lifecycle}
+ *
+ * This Scope gets defined by {@link LifecycleModule}
+ */
+@Target({ ElementType.TYPE, ElementType.METHOD })
+@Retention(RetentionPolicy.RUNTIME)
+@ScopeAnnotation
+@PublicApi
+public @interface ManageLifecycle
+{
+}
diff --git a/api/src/main/java/org/apache/druid/guice/ManageLifecycleLast.java b/api/src/main/java/org/apache/druid/guice/ManageLifecycleLast.java
new file mode 100644
index 00000000000..e893719f7d5
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/guice/ManageLifecycleLast.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.guice;
+
+import com.google.inject.ScopeAnnotation;
+import org.apache.druid.guice.annotations.PublicApi;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Marks the object to be managed by {@link org.apache.druid.java.util.common.lifecycle.Lifecycle} and set to be on Stage.LAST
+ *
+ * This Scope gets defined by {@link LifecycleModule}
+ */
+@Target({ ElementType.TYPE, ElementType.METHOD })
+@Retention(RetentionPolicy.RUNTIME)
+@ScopeAnnotation
+@PublicApi
+public @interface ManageLifecycleLast
+{
+}
diff --git a/api/src/main/java/org/apache/druid/guice/PolyBind.java b/api/src/main/java/org/apache/druid/guice/PolyBind.java
new file mode 100644
index 00000000000..f5c1a9d32f7
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/guice/PolyBind.java
@@ -0,0 +1,207 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.guice;
+
+import com.google.common.base.Preconditions;
+import com.google.inject.Binder;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import com.google.inject.Key;
+import com.google.inject.Provider;
+import com.google.inject.ProvisionException;
+import com.google.inject.TypeLiteral;
+import com.google.inject.binder.ScopedBindingBuilder;
+import com.google.inject.multibindings.MapBinder;
+import com.google.inject.util.Types;
+import org.apache.druid.guice.annotations.PublicApi;
+import org.apache.druid.java.util.common.StringUtils;
+
+import javax.annotation.Nullable;
+import java.lang.reflect.ParameterizedType;
+import java.util.Map;
+import java.util.Properties;
+
+/**
+ * Provides the ability to create "polymorphic" bindings.  Where the polymorphism is actually just making a decision
+ * based on a value in a Properties.
+ *
+ * The workflow is that you first create a choice by calling createChoice().  Then you create options using the binder
+ * returned by the optionBinder() method.  Multiple different modules can call optionBinder and all options will be
+ * reflected at injection time as long as equivalent interface Key objects are passed into the various methods.
+ */
+@PublicApi
+public class PolyBind
+{
+  /**
+   * Sets up a "choice" for the injector to resolve at injection time.
+   *
+   * @param binder the binder for the injector that is being configured
+   * @param property the property that will be checked to determine the implementation choice
+   * @param interfaceKey the interface that will be injected using this choice
+   * @param defaultKey the default instance to be injected if the property doesn't match a choice.  Can be null
+   * @param <T> interface type
+   * @return A ScopedBindingBuilder so that scopes can be added to the binding, if required.
+   */
+  public static <T> ScopedBindingBuilder createChoice(
+      Binder binder,
+      String property,
+      Key<T> interfaceKey,
+      @Nullable Key<? extends T> defaultKey
+  )
+  {
+    ConfiggedProvider<T> provider = new ConfiggedProvider<>(interfaceKey, property, defaultKey, null);
+    return binder.bind(interfaceKey).toProvider(provider);
+  }
+
+  /**
+   * @deprecated use {@link #createChoiceWithDefault(Binder, String, Key, String)}
+   * instead. {@code defaultKey} argument is ignored.
+   */
+  @Deprecated
+  public static <T> ScopedBindingBuilder createChoiceWithDefault(
+      Binder binder,
+      String property,
+      Key<T> interfaceKey,
+      Key<? extends T> defaultKey,
+      String defaultPropertyValue
+  )
+  {
+    return createChoiceWithDefault(binder, property, interfaceKey, defaultPropertyValue);
+  }
+
+  /**
+   * Sets up a "choice" for the injector to resolve at injection time.
+   *
+   * @param binder the binder for the injector that is being configured
+   * @param property the property that will be checked to determine the implementation choice
+   * @param interfaceKey the interface that will be injected using this choice
+   * @param defaultPropertyValue the default property value to use if the property is not set.
+   * @param <T> interface type
+   * @return A ScopedBindingBuilder so that scopes can be added to the binding, if required.
+   */
+  public static <T> ScopedBindingBuilder createChoiceWithDefault(
+      Binder binder,
+      String property,
+      Key<T> interfaceKey,
+      String defaultPropertyValue
+  )
+  {
+    Preconditions.checkNotNull(defaultPropertyValue);
+    ConfiggedProvider<T> provider = new ConfiggedProvider<>(interfaceKey, property, null, defaultPropertyValue);
+    return binder.bind(interfaceKey).toProvider(provider);
+  }
+
+  /**
+   * Binds an option for a specific choice.  The choice must already be registered on the injector for this to work.
+   *
+   * @param binder the binder for the injector that is being configured
+   * @param interfaceKey the interface that will have an option added to it.  This must equal the
+   *                     Key provided to createChoice
+   * @param <T> interface type
+   * @return A MapBinder that can be used to create the actual option bindings.
+   */
+  public static <T> MapBinder<String, T> optionBinder(Binder binder, Key<T> interfaceKey)
+  {
+    final TypeLiteral<T> interfaceType = interfaceKey.getTypeLiteral();
+
+    if (interfaceKey.getAnnotation() != null) {
+      return MapBinder.newMapBinder(
+          binder, TypeLiteral.get(String.class), interfaceType, interfaceKey.getAnnotation()
+      );
+    } else if (interfaceKey.getAnnotationType() != null) {
+      return MapBinder.newMapBinder(
+          binder, TypeLiteral.get(String.class), interfaceType, interfaceKey.getAnnotationType()
+      );
+    } else {
+      return MapBinder.newMapBinder(binder, TypeLiteral.get(String.class), interfaceType);
+    }
+  }
+
+  static class ConfiggedProvider<T> implements Provider<T>
+  {
+    private final Key<T> key;
+    private final String property;
+    @Nullable
+    private final Key<? extends T> defaultKey;
+    @Nullable
+    private final String defaultPropertyValue;
+
+    private Injector injector;
+    private Properties props;
+
+    ConfiggedProvider(
+        Key<T> key,
+        String property,
+        @Nullable Key<? extends T> defaultKey,
+        @Nullable String defaultPropertyValue
+    )
+    {
+      this.key = key;
+      this.property = property;
+      this.defaultKey = defaultKey;
+      this.defaultPropertyValue = defaultPropertyValue;
+    }
+
+    @Inject
+    void configure(Injector injector, Properties props)
+    {
+      this.injector = injector;
+      this.props = props;
+    }
+
+    @Override
+    @SuppressWarnings("unchecked")
+    public T get()
+    {
+      final ParameterizedType mapType = Types.mapOf(
+          String.class, Types.newParameterizedType(Provider.class, key.getTypeLiteral().getType())
+      );
+
+      final Map<String, Provider<T>> implsMap;
+      if (key.getAnnotation() != null) {
+        implsMap = (Map<String, Provider<T>>) injector.getInstance(Key.get(mapType, key.getAnnotation()));
+      } else if (key.getAnnotationType() != null) {
+        implsMap = (Map<String, Provider<T>>) injector.getInstance(Key.get(mapType, key.getAnnotation()));
+      } else {
+        implsMap = (Map<String, Provider<T>>) injector.getInstance(Key.get(mapType));
+      }
+
+      String implName = props.getProperty(property);
+      if (implName == null) {
+        if (defaultPropertyValue == null) {
+          if (defaultKey == null) {
+            throw new ProvisionException(StringUtils.format("Some value must be configured for [%s]", key));
+          }
+          return injector.getInstance(defaultKey);
+        }
+        implName = defaultPropertyValue;
+      }
+      final Provider<T> provider = implsMap.get(implName);
+
+      if (provider == null) {
+        throw new ProvisionException(
+            StringUtils.format("Unknown provider[%s] of %s, known options[%s]", implName, key, implsMap.keySet())
+        );
+      }
+
+      return provider.get();
+    }
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/guice/SupplierProvider.java b/api/src/main/java/org/apache/druid/guice/SupplierProvider.java
new file mode 100644
index 00000000000..107f3d92f4c
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/guice/SupplierProvider.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.guice;
+
+import com.google.common.base.Supplier;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import com.google.inject.Key;
+import com.google.inject.Provider;
+
+/**
+ */
+public class SupplierProvider<T> implements Provider<T>
+{
+  private final Key<Supplier<T>> supplierKey;
+
+  private Provider<Supplier<T>> supplierProvider;
+
+  public SupplierProvider(
+      Key<Supplier<T>> supplierKey
+  )
+  {
+    this.supplierKey = supplierKey;
+  }
+
+  @Inject
+  public void configure(Injector injector)
+  {
+    this.supplierProvider = injector.getProvider(supplierKey);
+  }
+
+  @Override
+  public T get()
+  {
+    return supplierProvider.get().get();
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/guice/annotations/EscalatedGlobal.java b/api/src/main/java/org/apache/druid/guice/annotations/EscalatedGlobal.java
new file mode 100644
index 00000000000..f597e2f75f9
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/guice/annotations/EscalatedGlobal.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.guice.annotations;
+
+import com.google.inject.BindingAnnotation;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ */
+@BindingAnnotation
+@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
+@Retention(RetentionPolicy.RUNTIME)
+@PublicApi
+public @interface EscalatedGlobal
+{
+}
diff --git a/api/src/main/java/org/apache/druid/guice/annotations/Global.java b/api/src/main/java/org/apache/druid/guice/annotations/Global.java
new file mode 100644
index 00000000000..1ea90d621c8
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/guice/annotations/Global.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.guice.annotations;
+
+import com.google.inject.BindingAnnotation;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ */
+@BindingAnnotation
+@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
+@Retention(RetentionPolicy.RUNTIME)
+@PublicApi
+public @interface Global
+{
+}
diff --git a/api/src/main/java/org/apache/druid/guice/annotations/JSR311Resource.java b/api/src/main/java/org/apache/druid/guice/annotations/JSR311Resource.java
new file mode 100644
index 00000000000..e3cd7e245c6
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/guice/annotations/JSR311Resource.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.guice.annotations;
+
+import com.google.inject.BindingAnnotation;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ */
+@BindingAnnotation
+@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
+@Retention(RetentionPolicy.RUNTIME)
+@PublicApi
+public @interface JSR311Resource
+{
+}
diff --git a/api/src/main/java/org/apache/druid/guice/annotations/Json.java b/api/src/main/java/org/apache/druid/guice/annotations/Json.java
new file mode 100644
index 00000000000..775d856a37b
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/guice/annotations/Json.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.guice.annotations;
+
+import com.google.inject.BindingAnnotation;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ */
+@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
+@Retention(RetentionPolicy.RUNTIME)
+@BindingAnnotation
+@PublicApi
+public @interface Json
+{
+}
diff --git a/api/src/main/java/org/apache/druid/guice/annotations/Self.java b/api/src/main/java/org/apache/druid/guice/annotations/Self.java
new file mode 100644
index 00000000000..7fb9341907b
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/guice/annotations/Self.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.guice.annotations;
+
+import com.google.inject.BindingAnnotation;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ */
+@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
+@Retention(RetentionPolicy.RUNTIME)
+@BindingAnnotation
+@PublicApi
+public @interface Self
+{
+}
diff --git a/api/src/main/java/org/apache/druid/guice/annotations/Smile.java b/api/src/main/java/org/apache/druid/guice/annotations/Smile.java
new file mode 100644
index 00000000000..fb4ef0eeef2
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/guice/annotations/Smile.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.guice.annotations;
+
+import com.google.inject.BindingAnnotation;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ */
+@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
+@Retention(RetentionPolicy.RUNTIME)
+@BindingAnnotation
+@PublicApi
+public @interface Smile
+{
+}
diff --git a/api/src/main/java/org/apache/druid/indexer/IngestionState.java b/api/src/main/java/org/apache/druid/indexer/IngestionState.java
new file mode 100644
index 00000000000..26d46166674
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/indexer/IngestionState.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.indexer;
+
+public enum IngestionState
+{
+  NOT_STARTED,
+  DETERMINE_PARTITIONS,
+  BUILD_SEGMENTS,
+  COMPLETED
+}
diff --git a/api/src/main/java/org/apache/druid/indexer/RunnerTaskState.java b/api/src/main/java/org/apache/druid/indexer/RunnerTaskState.java
new file mode 100644
index 00000000000..221c49fe84b
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/indexer/RunnerTaskState.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.druid.indexer;
+/**
+ * This includes the state of a task in the task runner not covered by {@link TaskState}, this state is not stored in database
+ */
+public enum RunnerTaskState
+{
+  WAITING,
+  PENDING,
+  RUNNING,
+  NONE // is used for a completed task
+}
diff --git a/api/src/main/java/org/apache/druid/indexer/TaskInfo.java b/api/src/main/java/org/apache/druid/indexer/TaskInfo.java
new file mode 100644
index 00000000000..5be8099179a
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/indexer/TaskInfo.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.druid.indexer;
+
+import com.google.common.base.Preconditions;
+import org.joda.time.DateTime;
+
+import javax.annotation.Nullable;
+
+/**
+ * This class is used to store task info from runner query and cache in OverlordResource
+ */
+public class TaskInfo<EntryType, StatusType>
+{
+  private final String id;
+  private final DateTime createdTime;
+  private final StatusType status;
+  private final String dataSource;
+  @Nullable
+  private final EntryType task;
+
+  public TaskInfo(
+      String id,
+      DateTime createdTime,
+      StatusType status,
+      String dataSource,
+      @Nullable EntryType task
+  )
+  {
+    this.id = Preconditions.checkNotNull(id, "id");
+    this.createdTime = Preconditions.checkNotNull(createdTime, "createdTime");
+    this.status = Preconditions.checkNotNull(status, "status");
+    this.dataSource = Preconditions.checkNotNull(dataSource, "dataSource");
+    this.task = task;
+  }
+
+  public String getId()
+  {
+    return id;
+  }
+
+  public DateTime getCreatedTime()
+  {
+    return createdTime;
+  }
+
+  public StatusType getStatus()
+  {
+    return status;
+  }
+
+  public String getDataSource()
+  {
+    return dataSource;
+  }
+
+  @Nullable
+  public EntryType getTask()
+  {
+    return task;
+  }
+}
+
diff --git a/api/src/main/java/org/apache/druid/indexer/TaskLocation.java b/api/src/main/java/org/apache/druid/indexer/TaskLocation.java
new file mode 100644
index 00000000000..bf99378ac97
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/indexer/TaskLocation.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.indexer;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import java.util.Objects;
+
+public class TaskLocation
+{
+  private static final TaskLocation UNKNOWN = new TaskLocation(null, -1, -1);
+
+  private final String host;
+  private final int port;
+  private final int tlsPort;
+
+  public static TaskLocation create(String host, int port, int tlsPort)
+  {
+    return new TaskLocation(host, port, tlsPort);
+  }
+
+  public static TaskLocation unknown()
+  {
+    return TaskLocation.UNKNOWN;
+  }
+
+  @JsonCreator
+  public TaskLocation(
+      @JsonProperty("host") String host,
+      @JsonProperty("port") int port,
+      @JsonProperty("tlsPort") int tlsPort
+  )
+  {
+    this.host = host;
+    this.port = port;
+    this.tlsPort = tlsPort;
+  }
+
+  @JsonProperty
+  public String getHost()
+  {
+    return host;
+  }
+
+  @JsonProperty
+  public int getPort()
+  {
+    return port;
+  }
+
+  @JsonProperty
+  public int getTlsPort()
+  {
+    return tlsPort;
+  }
+
+  @Override
+  public boolean equals(Object o)
+  {
+    if (this == o) {
+      return true;
+    }
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+
+    TaskLocation that = (TaskLocation) o;
+
+    return port == that.port && tlsPort == that.tlsPort &&
+           Objects.equals(host, that.host);
+  }
+
+  @Override
+  public int hashCode()
+  {
+    int result = host.hashCode();
+    result = 31 * result + port;
+    result = 31 * result + tlsPort;
+    return result;
+  }
+
+  @Override
+  public String toString()
+  {
+    return "TaskLocation{" +
+           "host='" + host + '\'' +
+           ", port=" + port +
+           ", tlsPort=" + tlsPort +
+           '}';
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/indexer/TaskMetricsGetter.java b/api/src/main/java/org/apache/druid/indexer/TaskMetricsGetter.java
new file mode 100644
index 00000000000..682e9de2f67
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/indexer/TaskMetricsGetter.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.indexer;
+
+import java.util.List;
+import java.util.Map;
+
+public interface TaskMetricsGetter
+{
+  List<String> getKeys();
+  Map<String, Number> getTotalMetrics();
+}
diff --git a/api/src/main/java/org/apache/druid/indexer/TaskMetricsUtils.java b/api/src/main/java/org/apache/druid/indexer/TaskMetricsUtils.java
new file mode 100644
index 00000000000..67f63c7e620
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/indexer/TaskMetricsUtils.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.indexer;
+
+import com.google.common.collect.Maps;
+
+import java.util.Map;
+
+public class TaskMetricsUtils
+{
+  public static final String ROWS_PROCESSED = "rowsProcessed";
+  public static final String ROWS_PROCESSED_WITH_ERRORS = "rowsProcessedWithErrors";
+  public static final String ROWS_UNPARSEABLE = "rowsUnparseable";
+  public static final String ROWS_THROWN_AWAY = "rowsThrownAway";
+
+  public static Map<String, Object> makeIngestionRowMetrics(
+      long rowsProcessed,
+      long rowsProcessedWithErrors,
+      long rowsUnparseable,
+      long rowsThrownAway
+  )
+  {
+    Map<String, Object> metricsMap = Maps.newHashMap();
+    metricsMap.put(ROWS_PROCESSED, rowsProcessed);
+    metricsMap.put(ROWS_PROCESSED_WITH_ERRORS, rowsProcessedWithErrors);
+    metricsMap.put(ROWS_UNPARSEABLE, rowsUnparseable);
+    metricsMap.put(ROWS_THROWN_AWAY, rowsThrownAway);
+    return metricsMap;
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/indexer/TaskState.java b/api/src/main/java/org/apache/druid/indexer/TaskState.java
new file mode 100644
index 00000000000..9048e878ec1
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/indexer/TaskState.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.indexer;
+
+public enum TaskState
+{
+  RUNNING,
+  SUCCESS,
+  FAILED;
+
+  public boolean isRunnable()
+  {
+    return this == RUNNING;
+  }
+
+  public boolean isComplete()
+  {
+    return this != RUNNING;
+  }
+
+  public boolean isSuccess()
+  {
+    return this == SUCCESS;
+  }
+
+  public boolean isFailure()
+  {
+    return this == FAILED;
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/indexer/TaskStatus.java b/api/src/main/java/org/apache/druid/indexer/TaskStatus.java
new file mode 100644
index 00000000000..6b90fef0617
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/indexer/TaskStatus.java
@@ -0,0 +1,210 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.indexer;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.common.base.Objects;
+import com.google.common.base.Preconditions;
+
+/**
+ * Represents the status of a task from the perspective of the coordinator. The task may be ongoing
+ * ({@link #isComplete()} false) or it may be complete ({@link #isComplete()} true).
+ *
+ * TaskStatus objects are immutable.
+ */
+public class TaskStatus
+{
+  public static final int MAX_ERROR_MSG_LENGTH = 100;
+
+  public static TaskStatus running(String taskId)
+  {
+    return new TaskStatus(taskId, TaskState.RUNNING, -1, null);
+  }
+
+  public static TaskStatus success(String taskId)
+  {
+    return new TaskStatus(taskId, TaskState.SUCCESS, -1, null);
+  }
+
+  public static TaskStatus success(String taskId, String errorMsg)
+  {
+    return new TaskStatus(taskId, TaskState.SUCCESS, -1, errorMsg);
+  }
+
+  public static TaskStatus failure(String taskId)
+  {
+    return new TaskStatus(taskId, TaskState.FAILED, -1, null);
+  }
+
+  public static TaskStatus failure(String taskId, String errorMsg)
+  {
+    return new TaskStatus(taskId, TaskState.FAILED, -1, errorMsg);
+  }
+
+  public static TaskStatus fromCode(String taskId, TaskState code)
+  {
+    return new TaskStatus(taskId, code, -1, null);
+  }
+
+  // The error message can be large, so truncate it to avoid storing large objects in zookeeper/metadata storage.
+  // The full error message will be available via a TaskReport.
+  private static String truncateErrorMsg(String errorMsg)
+  {
+    if (errorMsg != null && errorMsg.length() > MAX_ERROR_MSG_LENGTH) {
+      return errorMsg.substring(0, MAX_ERROR_MSG_LENGTH) + "...";
+    } else {
+      return errorMsg;
+    }
+  }
+
+  private final String id;
+  private final TaskState status;
+  private final long duration;
+  private final String errorMsg;
+
+  @JsonCreator
+  protected TaskStatus(
+      @JsonProperty("id") String id,
+      @JsonProperty("status") TaskState status,
+      @JsonProperty("duration") long duration,
+      @JsonProperty("errorMsg") String errorMsg
+  )
+  {
+    this.id = id;
+    this.status = status;
+    this.duration = duration;
+    this.errorMsg = truncateErrorMsg(errorMsg);
+
+    // Check class invariants.
+    Preconditions.checkNotNull(id, "id");
+    Preconditions.checkNotNull(status, "status");
+  }
+
+  @JsonProperty("id")
+  public String getId()
+  {
+    return id;
+  }
+
+  @JsonProperty("status")
+  public TaskState getStatusCode()
+  {
+    return status;
+  }
+
+  @JsonProperty("duration")
+  public long getDuration()
+  {
+    return duration;
+  }
+
+  @JsonProperty("errorMsg")
+  public String getErrorMsg()
+  {
+    return errorMsg;
+  }
+
+  /**
+   * Signals that a task is not yet complete, and is still runnable on a worker. Exactly one of isRunnable,
+   * isSuccess, or isFailure will be true at any one time.
+   *
+   * @return whether the task is runnable.
+   */
+  @JsonIgnore
+  public boolean isRunnable()
+  {
+    return status == TaskState.RUNNING;
+  }
+
+  /**
+   * Inverse of {@link #isRunnable}.
+   *
+   * @return whether the task is complete.
+   */
+  @JsonIgnore
+  public boolean isComplete()
+  {
+    return !isRunnable();
+  }
+
+  /**
+   * Returned by tasks when they spawn subtasks. Exactly one of isRunnable, isSuccess, or isFailure will
+   * be true at any one time.
+   *
+   * @return whether the task succeeded.
+   */
+  @JsonIgnore
+  public boolean isSuccess()
+  {
+    return status == TaskState.SUCCESS;
+  }
+
+  /**
+   * Returned by tasks when they complete unsuccessfully. Exactly one of isRunnable, isSuccess, or
+   * isFailure will be true at any one time.
+   *
+   * @return whether the task failed
+   */
+  @JsonIgnore
+  public boolean isFailure()
+  {
+    return status == TaskState.FAILED;
+  }
+
+  public TaskStatus withDuration(long _duration)
+  {
+    return new TaskStatus(id, status, _duration, errorMsg);
+  }
+
+  @Override
+  public String toString()
+  {
+    return Objects.toStringHelper(this)
+                  .add("id", id)
+                  .add("status", status)
+                  .add("duration", duration)
+                  .add("errorMsg", errorMsg)
+                  .toString();
+  }
+
+  @Override
+  public boolean equals(Object o)
+  {
+    if (this == o) {
+      return true;
+    }
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+    TaskStatus that = (TaskStatus) o;
+    return getDuration() == that.getDuration() &&
+           java.util.Objects.equals(getId(), that.getId()) &&
+           status == that.status &&
+           java.util.Objects.equals(getErrorMsg(), that.getErrorMsg());
+  }
+
+  @Override
+  public int hashCode()
+  {
+    return java.util.Objects.hash(getId(), status, getDuration(), getErrorMsg());
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/indexer/TaskStatusPlus.java b/api/src/main/java/org/apache/druid/indexer/TaskStatusPlus.java
new file mode 100644
index 00000000000..82c76b6867a
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/indexer/TaskStatusPlus.java
@@ -0,0 +1,191 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.indexer;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.common.base.Preconditions;
+import org.joda.time.DateTime;
+
+import javax.annotation.Nullable;
+import java.util.Objects;
+
+public class TaskStatusPlus
+{
+  private final String id;
+  private final String type;
+  private final DateTime createdTime;
+  private final DateTime queueInsertionTime;
+  private final TaskState state;
+  private final RunnerTaskState runnerTaskState;
+  private final Long duration;
+  private final TaskLocation location;
+  private final String dataSource;
+
+  @Nullable
+  private final String errorMsg;
+
+  @JsonCreator
+  public TaskStatusPlus(
+      @JsonProperty("id") String id,
+      @JsonProperty("type") @Nullable String type, // nullable for backward compatibility
+      @JsonProperty("createdTime") DateTime createdTime,
+      @JsonProperty("queueInsertionTime") DateTime queueInsertionTime,
+      @JsonProperty("statusCode") @Nullable TaskState state,
+      @JsonProperty("runnerStatusCode") @Nullable RunnerTaskState runnerTaskState,
+      @JsonProperty("duration") @Nullable Long duration,
+      @JsonProperty("location") TaskLocation location,
+      @JsonProperty("dataSource") @Nullable String dataSource, // nullable for backward compatibility
+      @JsonProperty("errorMsg") @Nullable String errorMsg
+  )
+  {
+    if (state != null && state.isComplete()) {
+      Preconditions.checkNotNull(duration, "duration");
+    }
+    this.id = Preconditions.checkNotNull(id, "id");
+    this.type = type;
+    this.createdTime = Preconditions.checkNotNull(createdTime, "createdTime");
+    this.queueInsertionTime = Preconditions.checkNotNull(queueInsertionTime, "queueInsertionTime");
+    this.state = state;
+    this.runnerTaskState = runnerTaskState;
+    this.duration = duration;
+    this.location = Preconditions.checkNotNull(location, "location");
+    this.dataSource = dataSource;
+    this.errorMsg = errorMsg;
+  }
+
+  @JsonProperty
+  public String getId()
+  {
+    return id;
+  }
+
+  @Nullable
+  @JsonProperty
+  public String getType()
+  {
+    return type;
+  }
+
+  @JsonProperty
+  public DateTime getCreatedTime()
+  {
+    return createdTime;
+  }
+
+  @JsonProperty
+  public DateTime getQueueInsertionTime()
+  {
+    return queueInsertionTime;
+  }
+
+  @Nullable
+  @JsonProperty("statusCode")
+  public TaskState getState()
+  {
+    return state;
+  }
+
+  @Nullable
+  @JsonProperty("runnerStatusCode")
+  public RunnerTaskState getRunnerTaskState()
+  {
+    return runnerTaskState;
+  }
+
+  @Nullable
+  @JsonProperty
+  public Long getDuration()
+  {
+    return duration;
+  }
+
+  @JsonProperty
+  public TaskLocation getLocation()
+  {
+    return location;
+  }
+
+  @JsonProperty
+  public String getDataSource()
+  {
+    return dataSource;
+  }
+
+  @Nullable
+  @JsonProperty("errorMsg")
+  public String getErrorMsg()
+  {
+    return errorMsg;
+  }
+
+  @Override
+  public boolean equals(Object o)
+  {
+    if (this == o) {
+      return true;
+    }
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+    TaskStatusPlus that = (TaskStatusPlus) o;
+    return Objects.equals(getId(), that.getId()) &&
+           Objects.equals(getType(), that.getType()) &&
+           Objects.equals(getCreatedTime(), that.getCreatedTime()) &&
+           Objects.equals(getQueueInsertionTime(), that.getQueueInsertionTime()) &&
+           getState() == that.getState() &&
+           Objects.equals(getDuration(), that.getDuration()) &&
+           Objects.equals(getLocation(), that.getLocation()) &&
+           Objects.equals(getDataSource(), that.getDataSource()) &&
+           Objects.equals(getErrorMsg(), that.getErrorMsg());
+  }
+
+  @Override
+  public int hashCode()
+  {
+    return Objects.hash(
+        getId(),
+        getType(),
+        getCreatedTime(),
+        getQueueInsertionTime(),
+        getState(),
+        getDuration(),
+        getLocation(),
+        getDataSource(),
+        getErrorMsg()
+    );
+  }
+
+  @Override
+  public String toString()
+  {
+    return "TaskStatusPlus{" +
+           "id='" + id + '\'' +
+           ", type='" + type + '\'' +
+           ", createdTime=" + createdTime +
+           ", queueInsertionTime=" + queueInsertionTime +
+           ", state=" + state +
+           ", duration=" + duration +
+           ", location=" + location +
+           ", dataSource='" + dataSource + '\'' +
+           ", errorMsg='" + errorMsg + '\'' +
+           '}';
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/initialization/DruidModule.java b/api/src/main/java/org/apache/druid/initialization/DruidModule.java
new file mode 100644
index 00000000000..ba298c9be6f
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/initialization/DruidModule.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.initialization;
+
+import com.fasterxml.jackson.databind.Module;
+import org.apache.druid.guice.annotations.ExtensionPoint;
+
+import java.util.List;
+
+/**
+ */
+@ExtensionPoint
+public interface DruidModule extends com.google.inject.Module
+{
+  List<? extends Module> getJacksonModules();
+}
diff --git a/api/src/main/java/org/apache/druid/jackson/CommaListJoinDeserializer.java b/api/src/main/java/org/apache/druid/jackson/CommaListJoinDeserializer.java
new file mode 100644
index 00000000000..bda1f36fe43
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/jackson/CommaListJoinDeserializer.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.jackson;
+
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.DeserializationContext;
+import com.fasterxml.jackson.databind.deser.std.StdScalarDeserializer;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ */
+public class CommaListJoinDeserializer extends StdScalarDeserializer<List<String>>
+{
+  protected CommaListJoinDeserializer()
+  {
+    super(List.class);
+  }
+
+  @Override
+  public List<String> deserialize(JsonParser jsonParser, DeserializationContext deserializationContext)
+      throws IOException, JsonProcessingException
+  {
+    return Arrays.asList(jsonParser.getText().split(","));
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/jackson/CommaListJoinSerializer.java b/api/src/main/java/org/apache/druid/jackson/CommaListJoinSerializer.java
new file mode 100644
index 00000000000..8d62da41d37
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/jackson/CommaListJoinSerializer.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.jackson;
+
+import com.fasterxml.jackson.core.JsonGenerationException;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.ser.std.StdScalarSerializer;
+import com.google.common.base.Joiner;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ */
+public class CommaListJoinSerializer extends StdScalarSerializer<List<String>>
+{
+  private static final Joiner joiner = Joiner.on(",");
+
+  protected CommaListJoinSerializer()
+  {
+    super(List.class, true);
+  }
+
+  @Override
+  public void serialize(List<String> value, JsonGenerator jgen, SerializerProvider provider)
+      throws IOException, JsonGenerationException
+  {
+    jgen.writeString(joiner.join(value));
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/js/JavaScriptConfig.java b/api/src/main/java/org/apache/druid/js/JavaScriptConfig.java
new file mode 100644
index 00000000000..10447102470
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/js/JavaScriptConfig.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.js;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import org.apache.druid.guice.annotations.PublicApi;
+
+/**
+ * Should be used by extension filters, aggregators, etc, that use JavaScript to determine if JavaScript is enabled
+ * or not.
+ */
+@PublicApi
+public class JavaScriptConfig
+{
+  public static final int DEFAULT_OPTIMIZATION_LEVEL = 9;
+
+  private static final JavaScriptConfig ENABLED_INSTANCE = new JavaScriptConfig(true);
+
+  @JsonProperty
+  private final boolean enabled;
+
+  @JsonCreator
+  public JavaScriptConfig(
+      @JsonProperty("enabled") boolean enabled
+  )
+  {
+    this.enabled = enabled;
+  }
+
+  public boolean isEnabled()
+  {
+    return enabled;
+  }
+
+  @Override
+  public boolean equals(Object o)
+  {
+    if (this == o) {
+      return true;
+    }
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+
+    JavaScriptConfig that = (JavaScriptConfig) o;
+
+    return enabled == that.enabled;
+
+  }
+
+  @Override
+  public int hashCode()
+  {
+    return (enabled ? 1 : 0);
+  }
+
+  @Override
+  public String toString()
+  {
+    return "JavaScriptConfig{" +
+           "enabled=" + enabled +
+           '}';
+  }
+
+  public static JavaScriptConfig getEnabledInstance()
+  {
+    return ENABLED_INSTANCE;
+  }
+}
diff --git a/api/src/main/java/org/apache/druid/query/SegmentDescriptor.java b/api/src/main/java/org/apache/druid/query/SegmentDescriptor.java
new file mode 100644
index 00000000000..6932dd19df3
--- /dev/null
+++ b/api/src/main/java/org/apache/druid/query/SegmentDescriptor.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.query;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import org.joda.time.Interval;
+
+/**
+*/
+public class SegmentDescriptor
+{
+  private final Interval interval;
+  private final String version;
+  private final int partitionNumber;
+
+  @JsonCreator
+  public SegmentDescriptor(
+      @JsonProperty("itvl") Interval interval,
+      @JsonProperty("ver") String version,
+      @JsonProperty("part") int partitionNumber
+  )
+  {
+    this.interval = interval;
+    this.version = version;
+    this.partitionNumber = partitionNumber;
+  }
+
+  @JsonProperty("itvl")
+  public Interval getInterval()
+  {
+    return interval;
+  }
+
+  @JsonProperty("ver")
+  public String getVersion()
+  {
+    return version;
+  }
+
+  @JsonProperty("part")
+  public int getPartitionNumber()
+  {
+    return partitionNumber;
+  }
+
+  @Override
+  public boolean equals(Object o)
+  {
+    if (this == o) {
+      return true;
+    }
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+
+    SegmentDescriptor that = (SegmentDescriptor) o;
+
+    if (partitionNumber != that.partitionNumber) {
+      return false;
+    }
+    if (interval != null ? !interval.equals(that.interval) : that.interval != null) {
+      return false;
+    }
+    if (version != null ? !version.equals(that.version) : that.version != null) {
+      return false;

  (This diff was longer than 20,000 lines, and has been truncated...)


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@druid.apache.org
For additional commands, e-mail: commits-help@druid.apache.org