You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@druid.apache.org by GitBox <gi...@apache.org> on 2018/11/10 15:16:41 UTC

[GitHub] zhaomoran closed pull request #6598: 0.12.2

zhaomoran closed pull request #6598: 0.12.2
URL: https://github.com/apache/incubator-druid/pull/6598
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/.idea/inspectionProfiles/Druid.xml b/.idea/inspectionProfiles/Druid.xml
index c48e735b20d..4ada21411dc 100644
--- a/.idea/inspectionProfiles/Druid.xml
+++ b/.idea/inspectionProfiles/Druid.xml
@@ -9,26 +9,37 @@
     <inspection_tool class="ArrayEquals" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="ArrayHashCode" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="ArrayObjectsEquals" enabled="true" level="ERROR" enabled_by_default="true" />
+    <inspection_tool class="ArraysAsListWithZeroOrOneArgument" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="AssertWithSideEffects" enabled="true" level="ERROR" enabled_by_default="true" />
+    <inspection_tool class="CapturingCleaner" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="CastConflictsWithInstanceof" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="CastToIncompatibleInterface" enabled="true" level="ERROR" enabled_by_default="true" />
+    <inspection_tool class="CatchMayIgnoreException" enabled="true" level="WARNING" enabled_by_default="true">
+      <option name="m_ignoreCatchBlocksWithComments" value="false" />
+    </inspection_tool>
     <inspection_tool class="CheckValidXmlInScriptTagBody" enabled="true" level="WARNING" enabled_by_default="true" />
+    <inspection_tool class="ClassGetClass" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="ClassNewInstance" enabled="true" level="WARNING" enabled_by_default="true" />
     <inspection_tool class="CollectionAddedToSelf" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="ComparableImplementedButEqualsNotOverridden" enabled="true" level="WARNING" enabled_by_default="true" />
     <inspection_tool class="ComparatorMethodParameterNotUsed" enabled="true" level="ERROR" enabled_by_default="true" />
+    <inspection_tool class="ComparatorResultComparison" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="CompareToUsesNonFinalVariable" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="ConstantAssertCondition" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="Contract" enabled="true" level="ERROR" enabled_by_default="true" />
+    <inspection_tool class="CopyConstructorMissesField" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="CovariantEquals" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="EmptyInitializer" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="EmptyStatementBody" enabled="true" level="WARNING" enabled_by_default="true">
       <option name="m_reportEmptyBlocks" value="true" />
       <option name="commentsAreContent" value="true" />
     </inspection_tool>
+    <inspection_tool class="EndlessStream" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="EqualsAndHashcode" enabled="true" level="WARNING" enabled_by_default="true" />
     <inspection_tool class="EqualsBetweenInconvertibleTypes" enabled="true" level="ERROR" enabled_by_default="true" />
+    <inspection_tool class="EqualsOnSuspiciousObject" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="EqualsUsesNonFinalVariable" enabled="true" level="WARNING" enabled_by_default="true" />
+    <inspection_tool class="EqualsWhichDoesntCheckParameterClass" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="EqualsWithItself" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="FieldCanBeLocal" enabled="true" level="WARNING" enabled_by_default="true">
       <option name="EXCLUDE_ANNOS">
@@ -59,16 +70,26 @@
     <inspection_tool class="InvalidComparatorMethodReference" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="IteratorHasNextCallsIteratorNext" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="IteratorNextDoesNotThrowNoSuchElementException" enabled="true" level="WARNING" enabled_by_default="true" />
-    <inspection_tool class="JavadocReference" enabled="true" level="WARNING" enabled_by_default="true" />
     <inspection_tool class="JsonStandardCompliance" enabled="true" level="WARNING" enabled_by_default="true" />
     <inspection_tool class="MalformedFormatString" enabled="true" level="ERROR" enabled_by_default="true">
-      <option name="additionalClasses" value="io.druid.java.util.common.StringUtils,io.druid.java.util.common.logger.Logger" />
+      <option name="additionalClasses" value="org.apache.druid.java.util.common.StringUtils,org.apache.druid.java.util.common.logger.Logger" />
       <option name="additionalMethods" value="trace,debug,info,warn,error,wtf,format,nonStrictFormat" />
     </inspection_tool>
     <inspection_tool class="MalformedRegex" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="MathRandomCastToInt" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="MavenModelInspection" enabled="true" level="WARNING" enabled_by_default="true" />
     <inspection_tool class="MismatchedArrayReadWrite" enabled="true" level="ERROR" enabled_by_default="true" />
+    <inspection_tool class="MismatchedCollectionQueryUpdate" enabled="true" level="ERROR" enabled_by_default="true">
+      <option name="queryNames">
+        <value />
+      </option>
+      <option name="updateNames">
+        <value />
+      </option>
+      <option name="ignoredClasses">
+        <value />
+      </option>
+    </inspection_tool>
     <inspection_tool class="MismatchedStringBuilderQueryUpdate" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="MissingOverrideAnnotation" enabled="true" level="WARNING" enabled_by_default="true">
       <scope name="NonGeneratedFiles" level="ERROR" enabled="true">
@@ -90,7 +111,10 @@
     </inspection_tool>
     <inspection_tool class="ObjectEqualsNull" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="ObjectToString" enabled="true" level="ERROR" enabled_by_default="true" />
+    <inspection_tool class="OverwrittenKey" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="PrimitiveArrayArgumentToVariableArgMethod" enabled="true" level="ERROR" enabled_by_default="true" />
+    <inspection_tool class="RedundantThrows" enabled="true" level="ERROR" enabled_by_default="true" />
+    <inspection_tool class="RedundantTypeArguments" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="ReflectionForUnavailableAnnotation" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="ReplaceAllDot" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="ResultOfObjectAllocationIgnored" enabled="true" level="WARNING" enabled_by_default="true">
@@ -103,21 +127,112 @@
       <option name="m_reportCollectionMethods" value="true" />
       <option name="m_ignorePrivateMethods" value="false" />
     </inspection_tool>
+    <inspection_tool class="SSBasedInspection" enabled="true" level="ERROR" enabled_by_default="true">
+      <searchConfiguration name="Suboptimal IndexedInts iteration" text="$x$ &lt; $y$.size()" recursive="false" caseInsensitive="true" type="JAVA">
+        <constraint name="__context__" target="true" within="" contains="" />
+        <constraint name="x" within="" contains="" />
+        <constraint name="y" nameOfExprType="IndexedInts" expressionTypes="IndexedInts" exprTypeWithinHierarchy="true" within="" contains="" />
+      </searchConfiguration>
+      <searchConfiguration name="Lists.newArrayList() with a single argument. Use Collections.singletonList() instead" created="1532737126203" text="Lists.newArrayList($x$)" recursive="false" caseInsensitive="true" type="JAVA">
+        <constraint name="x" nameOfExprType="java\.lang\.Iterable|java\.util\.Iterator|Object\[\]" expressionTypes="java.lang.Iterable|java.util.Iterator|Object[]" exprTypeWithinHierarchy="true" negateName="true" negateExprType="true" within="" contains="" />
+        <constraint name="__context__" target="true" within="" contains="" />
+      </searchConfiguration>
+      <searchConfiguration name="Math.abs(rnd.nextInt()) doensn't guarantee positive result. Use nextInt() &amp; Integer.MAX_VALUE or nextInt(Integer.MAX_VALUE)" created="1535067616084" text="$Math$.abs($x$.nextInt())" recursive="false" caseInsensitive="true" type="JAVA">
+        <constraint name="__context__" target="true" within="" contains="" />
+        <constraint name="x" nameOfFormalType="java\.util\.Random" exceptedTypes="java.util.Random" exprTypeWithinHierarchy="true" formalTypeWithinHierarchy="true" within="" contains="" />
+        <constraint name="Math" within="" contains="" />
+      </searchConfiguration>
+      <searchConfiguration name="Math.abs(rnd.nextLong()) doesn't guarantee positive result. Use nextLong() &amp; Long.MAX_VALUE" created="1535067616084" text="$Math$.abs($x$.nextLong())" recursive="false" caseInsensitive="true" type="JAVA">
+        <constraint name="__context__" target="true" within="" contains="" />
+        <constraint name="x" nameOfFormalType="java\.util\.Random" exceptedTypes="java.util.Random" exprTypeWithinHierarchy="true" formalTypeWithinHierarchy="true" within="" contains="" />
+        <constraint name="Math" within="" contains="" />
+      </searchConfiguration>
+      <searchConfiguration name="Use nextInt(bound) instead" created="1535068047572" text="$x$.nextInt() % $a$" recursive="false" caseInsensitive="true" type="JAVA">
+        <constraint name="__context__" target="true" within="" contains="" />
+        <constraint name="x" nameOfFormalType="java\.util\.Random" exceptedTypes="java.util.Random" exprTypeWithinHierarchy="true" formalTypeWithinHierarchy="true" within="" contains="" />
+        <constraint name="a" within="" contains="" />
+      </searchConfiguration>
+      <searchConfiguration name="Use RE (a Druid's class)" created="1539352150701" text="new $E$(org.apache.druid.java.util.common.StringUtils.format($x$))" recursive="false" caseInsensitive="true" type="JAVA">
+        <constraint name="__context__" target="true" within="" contains="" />
+        <constraint name="E" regexp="java\.lang\.RuntimeException" within="" contains="" />
+        <constraint name="x" minCount="0" maxCount="2147483647" within="" contains="" />
+      </searchConfiguration>
+      <searchConfiguration name="Use RE (a Druid's class) with cause" created="1539353059868" text="new $E$(org.apache.druid.java.util.common.StringUtils.format($x$),$y$)" recursive="false" caseInsensitive="true" type="JAVA">
+        <constraint name="__context__" target="true" within="" contains="" />
+        <constraint name="E" regexp="java\.lang\.RuntimeException" within="" contains="" />
+        <constraint name="x" minCount="0" maxCount="2147483647" within="" contains="" />
+        <constraint name="y" minCount="0" maxCount="2147483647" within="" contains="" />
+      </searchConfiguration>
+      <searchConfiguration name="Use ISE (a Druid's class)" created="1539353519594" text="new $E$(org.apache.druid.java.util.common.StringUtils.format($x$))" recursive="false" caseInsensitive="true" type="JAVA">
+        <constraint name="__context__" target="true" within="" contains="" />
+        <constraint name="E" regexp="java\.lang\.IllegalStateException" within="" contains="" />
+        <constraint name="x" minCount="0" maxCount="2147483647" within="" contains="" />
+      </searchConfiguration>
+      <searchConfiguration name="Use ISE (a Druid's class) with cause" created="1539353595734" text="new $E$(org.apache.druid.java.util.common.StringUtils.format($x$),$y$)" recursive="false" caseInsensitive="true" type="JAVA">
+        <constraint name="__context__" target="true" within="" contains="" />
+        <constraint name="E" regexp="java\.lang\.IllegalStateException" within="" contains="" />
+        <constraint name="x" minCount="0" maxCount="2147483647" within="" contains="" />
+        <constraint name="y" minCount="0" maxCount="2147483647" within="" contains="" />
+      </searchConfiguration>
+      <searchConfiguration name="Use IAE (a Druid's class)" created="1539353691746" text="new $E$(org.apache.druid.java.util.common.StringUtils.format($x$))" recursive="false" caseInsensitive="true" type="JAVA">
+        <constraint name="__context__" target="true" within="" contains="" />
+        <constraint name="E" regexp="java\.lang\.IllegalArgumentException" within="" contains="" />
+        <constraint name="x" minCount="0" maxCount="2147483647" within="" contains="" />
+      </searchConfiguration>
+      <searchConfiguration name="Use IAE (a Druid's class) with cause" created="1539353766336" text="new $E$(org.apache.druid.java.util.common.StringUtils.format($x$),$y$)" recursive="false" caseInsensitive="true" type="JAVA">
+        <constraint name="__context__" target="true" within="" contains="" />
+        <constraint name="E" regexp="java\.lang\.IllegalArgumentException" within="" contains="" />
+        <constraint name="x" minCount="0" maxCount="2147483647" within="" contains="" />
+        <constraint name="y" minCount="0" maxCount="2147483647" within="" contains="" />
+      </searchConfiguration>
+      <searchConfiguration name="Use IOE (a Druid's class)" created="1539353913074" text="new $E$(org.apache.druid.java.util.common.StringUtils.format($x$))" recursive="false" caseInsensitive="true" type="JAVA">
+        <constraint name="__context__" target="true" within="" contains="" />
+        <constraint name="E" regexp="java\.io\.IOException" within="" contains="" />
+        <constraint name="x" minCount="0" maxCount="2147483647" within="" contains="" />
+      </searchConfiguration>
+      <searchConfiguration name="Use IOE (a Druid's class) with cause" created="1539354009031" text="new $E$(org.apache.druid.java.util.common.StringUtils.format($x$),$y$)" recursive="false" caseInsensitive="true" type="JAVA">
+        <constraint name="__context__" target="true" within="" contains="" />
+        <constraint name="E" regexp="java\.io\.IOException" within="" contains="" />
+        <constraint name="x" minCount="0" maxCount="2147483647" within="" contains="" />
+        <constraint name="y" minCount="0" maxCount="2147483647" within="" contains="" />
+      </searchConfiguration>
+      <searchConfiguration name="Use UOE (a Druid's class)" created="1539354091201" text="new $E$(org.apache.druid.java.util.common.StringUtils.format($x$))" recursive="false" caseInsensitive="true" type="JAVA">
+        <constraint name="__context__" target="true" within="" contains="" />
+        <constraint name="E" regexp="java\.lang\.UnsupportedOperationException" within="" contains="" />
+        <constraint name="x" minCount="0" maxCount="2147483647" within="" contains="" />
+      </searchConfiguration>
+      <searchConfiguration name="Use TypeReference&lt;List&lt;...&gt;&gt; instead" created="1539884261626" text="TypeReference&lt;ArrayList&lt;$E$&gt;&gt;" recursive="false" caseInsensitive="true" type="JAVA">
+        <constraint name="__context__" target="true" within="" contains="" />
+        <constraint name="E" within="" contains="" />
+      </searchConfiguration>
+      <searchConfiguration name="Use TypeReference&lt;Map&lt;...&gt;&gt; instead" created="1539884261626" text="TypeReference&lt;HashMap&lt;$K$, $V$&gt;&gt;" recursive="false" caseInsensitive="true" type="JAVA">
+        <constraint name="__context__" target="true" within="" contains="" />
+        <constraint name="K" within="" contains="" />
+        <constraint name="V" within="" contains="" />
+      </searchConfiguration>
+      <searchConfiguration name="Use TypeReference&lt;Set&lt;...&gt;&gt; instead" created="1539884261626" text="TypeReference&lt;HashSet&lt;$E$&gt;&gt;" recursive="false" caseInsensitive="true" type="JAVA">
+        <constraint name="__context__" target="true" within="" contains="" />
+        <constraint name="E" within="" contains="" />
+      </searchConfiguration>
+    </inspection_tool>
     <inspection_tool class="SpellCheckingInspection" enabled="false" level="TYPO" enabled_by_default="false">
       <option name="processCode" value="true" />
       <option name="processLiterals" value="true" />
       <option name="processComments" value="true" />
     </inspection_tool>
-    <inspection_tool class="StaticCallOnSubclass" enabled="true" level="WARNING" enabled_by_default="true" />
-    <inspection_tool class="StaticFieldReferenceOnSubclass" enabled="true" level="WARNING" enabled_by_default="true" />
+    <inspection_tool class="StaticCallOnSubclass" enabled="true" level="ERROR" enabled_by_default="true" />
+    <inspection_tool class="StaticFieldReferenceOnSubclass" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="StringConcatenationInFormatCall" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="StringConcatenationInMessageFormatCall" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="StringConcatenationMissingWhitespace" enabled="true" level="WARNING" enabled_by_default="true" />
     <inspection_tool class="StringEquality" enabled="true" level="ERROR" enabled_by_default="true" />
+    <inspection_tool class="StringEqualsCharSequence" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="StringTokenizerDelimiter" enabled="true" level="ERROR" enabled_by_default="true" />
-    <inspection_tool class="SubtractionInCompareTo" enabled="true" level="WARNING" enabled_by_default="true" />
+    <inspection_tool class="SubtractionInCompareTo" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="SuspiciousArrayCast" enabled="true" level="WARNING" enabled_by_default="true" />
+    <inspection_tool class="SuspiciousArrayMethodCall" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="SuspiciousIndentAfterControlStatement" enabled="true" level="ERROR" enabled_by_default="true" />
+    <inspection_tool class="SuspiciousListRemoveInLoop" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="SuspiciousMethodCalls" enabled="true" level="ERROR" enabled_by_default="true">
       <option name="REPORT_CONVERTIBLE_METHOD_CALLS" value="true" />
     </inspection_tool>
@@ -133,6 +248,9 @@
     <inspection_tool class="SyntaxError" enabled="true" level="WARNING" enabled_by_default="true" />
     <inspection_tool class="TextLabelInSwitchStatement" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="ThrowableNotThrown" enabled="true" level="ERROR" enabled_by_default="true" />
+    <inspection_tool class="ToArrayCallWithZeroLengthArrayArgument" enabled="true" level="WARNING" enabled_by_default="true">
+      <option name="myMode" value="BY_LEVEL" />
+    </inspection_tool>
     <inspection_tool class="UnnecessaryEnumModifier" enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="UnnecessaryFullyQualifiedName" enabled="true" level="WARNING" enabled_by_default="true">
       <scope name="NonGeneratedFiles" level="ERROR" enabled="true">
@@ -143,6 +261,11 @@
       <option name="ignoreInModuleStatements" value="true" />
     </inspection_tool>
     <inspection_tool class="UnnecessaryInterfaceModifier" enabled="true" level="ERROR" enabled_by_default="true" />
+    <inspection_tool class="UnusedAssignment" enabled="true" level="ERROR" enabled_by_default="true">
+      <option name="REPORT_PREFIX_EXPRESSIONS" value="true" />
+      <option name="REPORT_POSTFIX_EXPRESSIONS" value="true" />
+      <option name="REPORT_REDUNDANT_INITIALIZER" value="true" />
+    </inspection_tool>
     <inspection_tool class="UnusedCatchParameter" enabled="true" level="WARNING" enabled_by_default="true">
       <option name="m_ignoreCatchBlocksWithComments" value="false" />
       <option name="m_ignoreTestCases" value="false" />
diff --git a/.idea/misc.xml b/.idea/misc.xml
index a84c344e393..9d0b0220e00 100644
--- a/.idea/misc.xml
+++ b/.idea/misc.xml
@@ -1,26 +1,34 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <project version="4">
   <component name="EntryPointsManager">
-    <list size="12">
+    <list size="14">
       <item index="0" class="java.lang.String" itemvalue="com.fasterxml.jackson.annotation.JsonCreator" />
       <item index="1" class="java.lang.String" itemvalue="com.fasterxml.jackson.annotation.JsonProperty" />
       <item index="2" class="java.lang.String" itemvalue="com.fasterxml.jackson.annotation.JsonValue" />
       <item index="3" class="java.lang.String" itemvalue="com.google.inject.Inject" />
       <item index="4" class="java.lang.String" itemvalue="com.google.inject.Provides" />
-      <item index="5" class="java.lang.String" itemvalue="io.druid.annotations.UsedInGeneratedCode" />
-      <item index="6" class="java.lang.String" itemvalue="io.druid.guice.annotations.ExtensionPoint" />
-      <item index="7" class="java.lang.String" itemvalue="io.druid.guice.annotations.PublicApi" />
-      <item index="8" class="java.lang.String" itemvalue="io.druid.java.util.common.lifecycle.LifecycleStart" />
-      <item index="9" class="java.lang.String" itemvalue="io.druid.java.util.common.lifecycle.LifecycleStop" />
-      <item index="10" class="java.lang.String" itemvalue="javax.inject.Inject" />
-      <item index="11" class="java.lang.String" itemvalue="org.openjdk.jmh.annotations.Benchmark" />
+      <item index="5" class="java.lang.String" itemvalue="io.airlift.airline.Command" />
+      <item index="6" class="java.lang.String" itemvalue="org.apache.druid.annotations.UsedByJUnitParamsRunner" />
+      <item index="7" class="java.lang.String" itemvalue="org.apache.druid.annotations.UsedInGeneratedCode" />
+      <item index="8" class="java.lang.String" itemvalue="org.apache.druid.guice.annotations.ExtensionPoint" />
+      <item index="9" class="java.lang.String" itemvalue="org.apache.druid.guice.annotations.PublicApi" />
+      <item index="10" class="java.lang.String" itemvalue="org.apache.druid.java.util.common.lifecycle.LifecycleStart" />
+      <item index="11" class="java.lang.String" itemvalue="org.apache.druid.java.util.common.lifecycle.LifecycleStop" />
+      <item index="12" class="java.lang.String" itemvalue="javax.inject.Inject" />
+      <item index="13" class="java.lang.String" itemvalue="org.openjdk.jmh.annotations.Benchmark" />
     </list>
+    <pattern value="org.apache.druid.cli.GuiceRunnable" hierarchically="true" method="run" />
+    <pattern value="org.apache.druid.cli.GuiceRunnable" hierarchically="true" />
+    <pattern value="org.apache.druid.initialization.DruidModule" hierarchically="true" method="getJacksonModules" />
     <writeAnnotations>
       <writeAnnotation name="com.fasterxml.jackson.annotation.JacksonInject" />
       <writeAnnotation name="com.fasterxml.jackson.annotation.JsonProperty" />
       <writeAnnotation name="com.google.caliper.Param" />
       <writeAnnotation name="io.airlift.airline.Option" />
+      <writeAnnotation name="org.easymock.Mock" />
+      <writeAnnotation name="org.mockito.Mock" />
       <writeAnnotation name="org.openjdk.jmh.annotations.Param" />
+      <writeAnnotation name="org.powermock.api.easymock.annotation.Mock" />
     </writeAnnotations>
   </component>
   <component name="MavenProjectsManager">
@@ -32,26 +40,35 @@
   </component>
   <component name="NullableNotNullManager">
     <option name="myDefaultNullable" value="javax.annotation.Nullable" />
-    <option name="myDefaultNotNull" value="org.jetbrains.annotations.NotNull" />
+    <option name="myDefaultNotNull" value="javax.annotation.Nonnull" />
     <option name="myNullables">
       <value>
-        <list size="6">
+        <list size="10">
           <item index="0" class="java.lang.String" itemvalue="org.jetbrains.annotations.Nullable" />
           <item index="1" class="java.lang.String" itemvalue="javax.annotation.Nullable" />
           <item index="2" class="java.lang.String" itemvalue="javax.annotation.CheckForNull" />
           <item index="3" class="java.lang.String" itemvalue="org.springframework.lang.Nullable" />
           <item index="4" class="java.lang.String" itemvalue="edu.umd.cs.findbugs.annotations.Nullable" />
           <item index="5" class="java.lang.String" itemvalue="android.support.annotation.Nullable" />
+          <item index="6" class="java.lang.String" itemvalue="androidx.annotation.Nullable" />
+          <item index="7" class="java.lang.String" itemvalue="org.checkerframework.checker.nullness.qual.Nullable" />
+          <item index="8" class="java.lang.String" itemvalue="org.checkerframework.checker.nullness.compatqual.NullableDecl" />
+          <item index="9" class="java.lang.String" itemvalue="org.checkerframework.checker.nullness.compatqual.NullableType" />
         </list>
       </value>
     </option>
     <option name="myNotNulls">
       <value>
-        <list size="4">
+        <list size="9">
           <item index="0" class="java.lang.String" itemvalue="org.jetbrains.annotations.NotNull" />
           <item index="1" class="java.lang.String" itemvalue="javax.annotation.Nonnull" />
           <item index="2" class="java.lang.String" itemvalue="edu.umd.cs.findbugs.annotations.NonNull" />
           <item index="3" class="java.lang.String" itemvalue="android.support.annotation.NonNull" />
+          <item index="4" class="java.lang.String" itemvalue="javax.validation.constraints.NotNull" />
+          <item index="5" class="java.lang.String" itemvalue="androidx.annotation.NonNull" />
+          <item index="6" class="java.lang.String" itemvalue="org.checkerframework.checker.nullness.qual.NonNull" />
+          <item index="7" class="java.lang.String" itemvalue="org.checkerframework.checker.nullness.compatqual.NonNullDecl" />
+          <item index="8" class="java.lang.String" itemvalue="org.checkerframework.checker.nullness.compatqual.NonNullType" />
         </list>
       </value>
     </option>
diff --git a/.idea/scopes/NonGeneratedFiles.xml b/.idea/scopes/NonGeneratedFiles.xml
index 22375373e96..5bd4a87fc46 100644
--- a/.idea/scopes/NonGeneratedFiles.xml
+++ b/.idea/scopes/NonGeneratedFiles.xml
@@ -1,3 +1,3 @@
 <component name="DependencyValidationManager">
-  <scope name="NonGeneratedFiles" pattern="(src:*..*||test:*..*)&amp;&amp;!test[druid-protobuf-extensions]:io.druid.data.input.protobuf.ProtoTestEventWrapper" />
+  <scope name="NonGeneratedFiles" pattern="(src:*..*||test:*..*)&amp;&amp;!test[druid-protobuf-extensions]:org.apache.druid.data.input.protobuf.ProtoTestEventWrapper" />
 </component>
\ No newline at end of file
diff --git a/.idea/scopes/UnusedInspectionsScope.xml b/.idea/scopes/UnusedInspectionsScope.xml
index 1cc1836884b..1688ededd85 100644
--- a/.idea/scopes/UnusedInspectionsScope.xml
+++ b/.idea/scopes/UnusedInspectionsScope.xml
@@ -1,3 +1,7 @@
 <component name="DependencyValidationManager">
   <scope name="UnusedInspectionsScope" pattern="src[druid-processing]:*..*" />
+  <scope name="UnusedInspectionsScope" pattern="src[java-util]:*..*" />
+  <scope name="UnusedInspectionsScope" pattern="src[druid-common]:*..*" />
+  <scope name="UnusedInspectionsScope" pattern="src[extendedset]:*..*" />
+  <scope name="UnusedInspectionsScope" pattern="src[druid-indexing-service]:*..*" />
 </component>
\ No newline at end of file
diff --git a/.travis.yml b/.travis.yml
index 4fb64a7bf9a..63c2495c577 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -2,8 +2,8 @@ language: java
 
 # On 12-12-2017, Travis updated their trusty image, which caused integration tests to fail.
 # The group: config instructs Travis to use the previous trusty image.
-# Please see https://github.com/druid-io/druid/pull/5155 for more information.
-sudo: required
+# Please see https://github.com/apache/incubator-druid/pull/5155 for more information.
+sudo: false
 dist: trusty
 group: deprecated-2017Q4
 
@@ -16,49 +16,91 @@ cache:
 
 matrix:
   include:
+      # license checks
+    - env:
+       - NAME="license checks"
+      install: true
+      script: MAVEN_OPTS='-Xmx3000m' mvn clean verify -Prat -DskipTests -B -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn
+
       # strict compilation
-    - sudo: false
-      env:
+    - env:
         - NAME="strict compilation"
       install: true
       # Strict compilation requires more than 2 GB
-      script: echo "MAVEN_OPTS='-Xmx3000m'" > ~/.mavenrc && mvn clean -Pstrict -pl '!benchmarks' compile test-compile -B
+      script: MAVEN_OPTS='-Xmx3000m' mvn clean -Pstrict -pl '!benchmarks' compile test-compile -B --fail-at-end
 
       # processing module test
-    - sudo: false
-      env:
+    - env:
         - NAME="processing module test"
-      install: echo "MAVEN_OPTS='-Xmx3000m'" > ~/.mavenrc && mvn install -q -ff -DskipTests -B
-      before_script:
-        - unset _JAVA_OPTIONS
-      script: echo "MAVEN_OPTS='-Xmx512m'" > ~/.mavenrc && mvn test -B -Pparallel-test -Dmaven.fork.count=2 -pl processing
+      install: MAVEN_OPTS='-Xmx3000m' mvn install -q -ff -DskipTests -B
+      before_script: unset _JAVA_OPTIONS
+      script:
+        # Set MAVEN_OPTS for Surefire launcher
+        - MAVEN_OPTS='-Xmx512m' mvn test -B -pl processing
+        - sh -c "dmesg | egrep -i '(oom|out of memory|kill process|killed).*' -C 1 || exit 0"
+        - free -m
+
+      # processing module tests with SQL Compatibility enabled
+    - env:
+        - NAME="processing module test with SQL Compatibility"
+      install: MAVEN_OPTS='-Xmx3000m' mvn install -q -ff -DskipTests -B
+      before_script: unset _JAVA_OPTIONS
+      script:
+        # Set MAVEN_OPTS for Surefire launcher
+        - MAVEN_OPTS='-Xmx512m' mvn test -B -Ddruid.generic.useDefaultValueForNull=false -pl processing
+        - sh -c "dmesg | egrep -i '(oom|out of memory|kill process|killed).*' -C 1 || exit 0"
+        - free -m
 
       # server module test
-    - sudo: false
-      env:
+    - env:
         - NAME="server module test"
-      install: echo "MAVEN_OPTS='-Xmx3000m'" > ~/.mavenrc && mvn install -q -ff -DskipTests -B
-      before_script:
-        - unset _JAVA_OPTIONS
-      # Server module test is run without the parallel-test option because it's memory sensitive and often fails with that option.
-      script: echo "MAVEN_OPTS='-Xmx512m'" > ~/.mavenrc && mvn test -B -pl server
+      install: MAVEN_OPTS='-Xmx3000m' mvn install -q -ff -DskipTests -B
+      before_script: unset _JAVA_OPTIONS
+      script:
+        # Set MAVEN_OPTS for Surefire launcher
+        - MAVEN_OPTS='-Xmx512m' mvn test -B -pl server
+
+      # server module test with SQL Compatibility enabled
+    - env:
+        - NAME="server module test with SQL Compatibility enabled"
+      install: MAVEN_OPTS='-Xmx3000m' mvn install -q -ff -DskipTests -B
+      before_script: unset _JAVA_OPTIONS
+      script:
+        # Set MAVEN_OPTS for Surefire launcher
+        - MAVEN_OPTS='-Xmx512m' mvn test -B -pl server -Ddruid.generic.useDefaultValueForNull=false
+
 
       # other modules test
-    - sudo: false
-      env:
+    - env:
         - NAME="other modules test"
-      install: echo "MAVEN_OPTS='-Xmx3000m'" > ~/.mavenrc && mvn install -q -ff -DskipTests -B
-      before_script:
-        - unset _JAVA_OPTIONS
-      script: echo "MAVEN_OPTS='-Xmx512m'" > ~/.mavenrc && mvn test -B -Pparallel-test -Dmaven.fork.count=2 -pl '!processing,!server'
+        - AWS_REGION=us-east-1 # set a aws region for unit tests
+      install: MAVEN_OPTS='-Xmx3000m' mvn install -q -ff -DskipTests -B
+      before_script: unset _JAVA_OPTIONS
+      script:
+        # Set MAVEN_OPTS for Surefire launcher
+        - MAVEN_OPTS='-Xmx512m' mvn test -B -pl '!processing,!server'
+        - sh -c "dmesg | egrep -i '(oom|out of memory|kill process|killed).*' -C 1 || exit 0"
+        - free -m
+
+      # other modules test with SQL Compatibility enabled
+    - env:
+        - NAME="other modules test with SQL Compatibility"
+        - AWS_REGION=us-east-1 # set a aws region for unit tests
+      install: MAVEN_OPTS='-Xmx3000m' mvn install -q -ff -DskipTests -B
+      before_script: unset _JAVA_OPTIONS
+      script:
+        # Set MAVEN_OPTS for Surefire launcher
+        - MAVEN_OPTS='-Xmx512m' mvn test -B -Ddruid.generic.useDefaultValueForNull=false -pl '!processing,!server'
+        - sh -c "dmesg | egrep -i '(oom|out of memory|kill process|killed).*' -C 1 || exit 0"
+        - free -m
 
       # run integration tests
     - sudo: required
       services:
         - docker
       env:
-        - NAME="integration test"
-        - DOCKER_IP=172.17.0.1
+        - NAME="integration test part 1"
+        - DOCKER_IP=127.0.0.1
       install:
         # Only errors will be shown with the -q option. This is to avoid generating too many logs which make travis build failed.
         - mvn install -q -ff -DskipTests -B
@@ -72,3 +114,24 @@ matrix:
           echo $v dmesg ======================== ;
           docker exec -it druid-$v sh -c 'dmesg | tail -3' ;
           done
+
+      # run integration tests
+    - sudo: required
+      services:
+        - docker
+      env:
+        - NAME="integration test part 2"
+        - DOCKER_IP=127.0.0.1
+      install:
+        # Only errors will be shown with the -q option. This is to avoid generating too many logs which make travis build failed.
+        - mvn install -q -ff -DskipTests -B
+      script:
+        - $TRAVIS_BUILD_DIR/ci/travis_script_integration_part2.sh
+      after_failure:
+        - for v in ~/shared/logs/*.log ; do
+          echo $v logtail ======================== ; tail -100 $v ;
+          done
+        - for v in broker middlemanager overlord router coordinator historical ; do
+          echo $v dmesg ======================== ;
+          docker exec -it druid-$v sh -c 'dmesg | tail -3' ;
+          done
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 0c3daead4c4..59a4ad0512c 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -4,15 +4,15 @@ When submitting a pull request (PR), please use the following guidelines:
 
 - Make sure your code respects existing formatting conventions. In general, follow
   the same coding style as the code that you are modifying.
-- For Intellij you can import our code style settings xml: [druid_intellij_formatting.xml](https://github.com/druid-io/druid/raw/master/druid_intellij_formatting.xml).
-- For Eclipse you can import our code style settings xml: [eclipse_formatting.xml](https://github.com/druid-io/druid/raw/master/eclipse_formatting.xml).
+- For Intellij you can import our code style settings xml: [druid_intellij_formatting.xml](https://github.com/apache/incubator-druid/raw/master/druid_intellij_formatting.xml).
+- For Eclipse you can import our code style settings xml: [eclipse_formatting.xml](https://github.com/apache/incubator-druid/raw/master/eclipse_formatting.xml).
 - Do add/update documentation appropriately for the change you are making.
-- If you are introducing a new feature you may want to first submit your idea
-  for feedback to the [mailing list](mailto:druid-development@googlegroups.com).
+- If you are introducing a new feature you may want to first write about your idea
+  for feedback to [dev@druid.apache.org](https://lists.apache.org/list.html?dev@druid.apache.org).
   Non-trivial features should include unit tests covering the new functionality.
 - Bugfixes should include a unit test or integration test reproducing the issue.
 - Do not use author tags/information in the code.
-- Always include license header on each java file your create. See [this example](https://github.com/druid-io/druid/blob/master/common/src/main/java/io/druid/metadata/PasswordProvider.java)
+- Always include license header on each java file your create. See [this example](https://github.com/apache/incubator-druid/blob/master/core/src/main/java/org/apache/druid/metadata/PasswordProvider.java)
 - Try to keep pull requests short and submit separate ones for unrelated
   features, but feel free to combine simple bugfixes/tests into one pull request.
 - Keep the number of commits small and combine commits for related changes.
@@ -22,9 +22,9 @@ When submitting a pull request (PR), please use the following guidelines:
 
 ## GitHub Workflow
 
-1. Fork the druid-io/druid repository into your GitHub account
+1. Fork the apache/incubator-druid repository into your GitHub account
 
-    https://github.com/druid-io/druid/fork
+    https://github.com/apache/incubator-druid/fork
 
 1. Clone your fork of the GitHub repository
 
@@ -37,7 +37,7 @@ When submitting a pull request (PR), please use the following guidelines:
 1. Add a remote to keep up with upstream changes
 
     ```
-    git remote add upstream https://github.com/druid-io/druid.git
+    git remote add upstream https://github.com/apache/incubator-druid.git
     ```
 
     If you already have a copy, fetch upstream changes
@@ -84,7 +84,7 @@ When submitting a pull request (PR), please use the following guidelines:
     If you recently pushed your changes GitHub will automatically pop up a
     `Compare & pull request` button for any branches you recently pushed to. If you
     click that button it will automatically offer you to submit your pull-request
-    to the druid-io/druid repository.
+    to the apache/incubator-druid repository.
 
     - Give your pull-request a meaningful title.
     - In the description, explain your changes and the problem they are solving.
diff --git a/DISCLAIMER b/DISCLAIMER
new file mode 100644
index 00000000000..f10a1108b57
--- /dev/null
+++ b/DISCLAIMER
@@ -0,0 +1,6 @@
+Apache Druid is an effort undergoing incubation at The Apache Software Foundation (ASF),
+sponsored by the Apache Incubator. Incubation is required of all newly accepted projects
+until a further review indicates that the infrastructure, communications, and decision
+making process have stabilized in a manner consistent with other successful ASF projects.
+While incubation status is not necessarily a reflection of the completeness or stability
+of the code, it does indicate that the project has yet to be fully endorsed by the ASF.
diff --git a/DruidCorporateCLA.pdf b/DruidCorporateCLA.pdf
deleted file mode 100644
index d3f24b3d79f..00000000000
Binary files a/DruidCorporateCLA.pdf and /dev/null differ
diff --git a/DruidIndividualCLA.pdf b/DruidIndividualCLA.pdf
deleted file mode 100644
index eee4c72eb89..00000000000
Binary files a/DruidIndividualCLA.pdf and /dev/null differ
diff --git a/INTELLIJ_SETUP.md b/INTELLIJ_SETUP.md
index fac77fb5e1b..8ee3c4dfb1d 100644
--- a/INTELLIJ_SETUP.md
+++ b/INTELLIJ_SETUP.md
@@ -33,8 +33,8 @@ You can configure application definitions in XML for import into IntelliJ. Below
 <component name="ProjectRunConfigurationManager">
   <configuration default="false" name="Historical" type="Application" factoryName="Application">
     <extension name="coverage" enabled="false" merge="false" sample_coverage="true" runner="idea" />
-    <option name="MAIN_CLASS_NAME" value="io.druid.cli.Main" />
-    <option name="VM_PARAMETERS" value="-server -Duser.timezone=UTC -Dfile.encoding=UTF-8 -Xmx2G -XX:MaxJavaStackTraceDepth=9999 -XX:+UseG1GC -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintAdaptiveSizePolicy -XX:+PrintReferenceGC -verbose:gc -XX:+PrintFlagsFinal -Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager -Dorg.jboss.logging.provider=slf4j -Dlog4j.configurationFile=$PROJECT_DIR$/common/src/main/resources/log4j2.debug.xml -Ddruid.host=localhost -Ddruid.service=historical -Ddruid.server.maxSize=10000000000 -Ddruid.processing.buffer.sizeBytes=100000000 -Ddruid.extensions.hadoopDependenciesDir=$PROJECT_DIR$/distribution/target/hadoop-dependencies/ -Ddruid.extensions.directory=$PROJECT_DIR$/distribution/target/extensions/ -Ddruid.extensions.loadList=[\&quot;druid-s3-extensions\&quot;,\&quot;druid-histogram\&quot;,\&quot;mysql-metadata-storage\&quot;] -Ddruid.historical.cache.useCache=false -Ddruid.historical.cache.populateCache=false -Ddruid.segmentCache.locations=&quot;[{\&quot;path\&quot;:\&quot;/tmp/druid/indexCache\&quot;,\&quot;maxSize\&quot;:10000000000}]&quot; -Ddruid.zk.service.host=localhost -Ddruid.processing.numThreads=1 -Ddruid.server.http.numThreads=50 -Ddruid.announcer.type=batch -Ddruid.emitter=logging" />
+    <option name="MAIN_CLASS_NAME" value="org.apache.druid.cli.Main" />
+    <option name="VM_PARAMETERS" value="-server -Duser.timezone=UTC -Dfile.encoding=UTF-8 -Xmx2G -XX:MaxJavaStackTraceDepth=9999 -XX:+UseG1GC -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintAdaptiveSizePolicy -XX:+PrintReferenceGC -verbose:gc -XX:+PrintFlagsFinal -Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager -Dorg.jboss.logging.provider=slf4j -Dlog4j.configurationFile=$PROJECT_DIR$/core/src/main/resources/log4j2.debug.xml -Ddruid.host=localhost -Ddruid.service=historical -Ddruid.server.maxSize=10000000000 -Ddruid.processing.buffer.sizeBytes=100000000 -Ddruid.extensions.hadoopDependenciesDir=$PROJECT_DIR$/distribution/target/hadoop-dependencies/ -Ddruid.extensions.directory=$PROJECT_DIR$/distribution/target/extensions/ -Ddruid.extensions.loadList=[\&quot;druid-s3-extensions\&quot;,\&quot;druid-histogram\&quot;,\&quot;mysql-metadata-storage\&quot;] -Ddruid.historical.cache.useCache=false -Ddruid.historical.cache.populateCache=false -Ddruid.segmentCache.locations=&quot;[{\&quot;path\&quot;:\&quot;/tmp/druid/indexCache\&quot;,\&quot;maxSize\&quot;:10000000000}]&quot; -Ddruid.zk.service.host=localhost -Ddruid.processing.numThreads=1 -Ddruid.server.http.numThreads=50 -Ddruid.announcer.type=batch -Ddruid.emitter=logging" />
     <option name="PROGRAM_PARAMETERS" value="server historical" />
     <option name="WORKING_DIRECTORY" value="file://$PROJECT_DIR$" />
     <option name="ALTERNATIVE_JRE_PATH_ENABLED" value="false" />
@@ -54,7 +54,7 @@ You can configure application definitions in XML for import into IntelliJ. Below
 <component name="ProjectRunConfigurationManager">
   <configuration default="false" name="Coordinator" type="Application" factoryName="Application">
     <extension name="coverage" enabled="false" merge="false" sample_coverage="true" runner="idea" />
-    <option name="MAIN_CLASS_NAME" value="io.druid.cli.Main" />
+    <option name="MAIN_CLASS_NAME" value="org.apache.druid.cli.Main" />
     <option name="VM_PARAMETERS" value="-server -Duser.timezone=UTC -Dfile.encoding=UTF-8 -Xmx256M -Xmx256M -XX:+UseG1GC -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintAdaptiveSizePolicy -XX:+PrintReferenceGC -verbose:gc -XX:+PrintFlagsFinal -Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager -Dorg.jboss.logging.provider=slf4j -Ddruid.host=localhost -Ddruid.service=coordinator -Ddruid.extensions.directory=$PROJECT_DIR$/distribution/target/extensions/ -Ddruid.extensions.loadList=[\&quot;druid-s3-extensions\&quot;,\&quot;druid-histogram\&quot;,\&quot;mysql-metadata-storage\&quot;] -Ddruid.zk.service.host=localhost -Ddruid.metadata.storage.type=mysql -Ddruid.metadata.storage.connector.connectURI=&quot;jdbc:mysql://localhost:3306/druid&quot; -Ddruid.metadata.storage.connector.user=druid -Ddruid.metadata.storage.connector.password=diurd -Ddruid.announcer.type=batch -Ddruid.emitter=logging -Ddruid.coordinator.period=PT10S -Ddruid.coordinator.startDelay=PT5S" />
     <option name="PROGRAM_PARAMETERS" value="server coordinator" />
     <option name="WORKING_DIRECTORY" value="file://$PROJECT_DIR$" />
diff --git a/NOTICE b/NOTICE
index 71a02dd4c63..8207daca8c9 100644
--- a/NOTICE
+++ b/NOTICE
@@ -1,7 +1,8 @@
-Druid - a distributed column store.
-Copyright 2012-2016 Metamarkets Group Inc.
-Copyright 2015-2016 Yahoo! Inc.
-Copyright 2015-2016 Imply Data, Inc.
+Apache Druid (incubating)
+Copyright 2018 The Apache Software Foundation
+
+This product includes software developed at
+The Apache Software Foundation (http://www.apache.org/).
 
 -------------------------------------------------------------------------------
 
@@ -11,7 +12,7 @@ This product contains a modified version of Andrew Duffy's java-alphanum library
   * HOMEPAGE:
     * https://github.com/amjjd/java-alphanum
 
-This product contains conjunctive normal form conversion code and a variance aggregator algorithm adapted from Apache Hive
+This product contains conjunctive normal form conversion code, a variance aggregator algorithm, and bloom filter adapted from Apache Hive
   * LICENSE:
     * https://github.com/apache/hive/blob/branch-2.0/LICENSE (Apache License, Version 2.0)
   * HOMEPAGE:
@@ -82,4 +83,12 @@ This product contains code adapted from Apache Hadoop
   * LICENSE:
     * https://github.com/apache/hadoop/blob/trunk/LICENSE.txt (Apache License, Version 2.0)
   * HOMEPAGE:
-    * http://hadoop.apache.org/
\ No newline at end of file
+    * http://hadoop.apache.org/
+
+This product contains modified versions of the Dockerfile and related configuration files from SequenceIQ's Hadoop Docker image:
+  * LICENSE:
+    * https://github.com/sequenceiq/hadoop-docker/blob/master/LICENSE (Apache License, Version 2.0)
+  * HOMEPAGE:
+    * https://github.com/sequenceiq/hadoop-docker/
+  * COMMIT TAG:
+    * update this when this patch is committed
diff --git a/README.md b/README.md
index 8dfb8af2ef3..b4f728d5c67 100644
--- a/README.md
+++ b/README.md
@@ -1,17 +1,10 @@
-[![Build Status](https://travis-ci.org/druid-io/druid.svg?branch=master)](https://travis-ci.org/druid-io/druid) [![Inspections Status](https://img.shields.io/teamcity/http/teamcity.jetbrains.com/s/OpenSourceProjects_Druid_Inspections.svg?label=TeamCity%20inspections)](https://teamcity.jetbrains.com/viewType.html?buildTypeId=OpenSourceProjects_Druid_Inspections) [![Coverage Status](https://coveralls.io/repos/druid-io/druid/badge.svg?branch=master)](https://coveralls.io/r/druid-io/druid?branch=master)
+[![Build Status](https://travis-ci.org/apache/incubator-druid.svg?branch=master)](https://travis-ci.org/apache/incubator-druid) [![Inspections Status](https://img.shields.io/teamcity/http/teamcity.jetbrains.com/s/OpenSourceProjects_Druid_Inspections.svg?label=TeamCity%20inspections)](https://teamcity.jetbrains.com/viewType.html?buildTypeId=OpenSourceProjects_Druid_Inspections) [![Coverage Status](https://coveralls.io/repos/apache/incubator-druid/badge.svg?branch=master)](https://coveralls.io/r/apache/incubator-druid?branch=master) [![IRC#druid-dev](https://img.shields.io/badge/IRC-druid--dev-blue.svg)](https://webchat.freenode.net?channels=druid-dev)
 
-## Druid
+## Apache Druid (incubating)
 
-Druid is a distributed, column-oriented, real-time analytics data store
-that is commonly used to power exploratory dashboards in multi-tenant
-environments.
+Apache Druid (incubating) is a high performance analytics data store for event-driven data.
 
-Druid excels as a data warehousing solution for fast aggregate queries on
-petabyte sized data sets. Druid supports a variety of flexible filters, exact
-calculations, approximate algorithms, and other useful calculations.
-
-Druid can load both streaming and batch data and integrates with
-Samza, Kafka, Storm, Spark, and Hadoop.
+*Disclaimer: Apache Druid is an effort undergoing incubation at The Apache Software Foundation (ASF), sponsored by the Apache Incubator. Incubation is required of all newly accepted projects until a further review indicates that the infrastructure, communications, and decision making process have stabilized in a manner consistent with other successful ASF projects. While incubation status is not necessarily a reflection of the completeness or stability of the code, it does indicate that the project has yet to be fully endorsed by the ASF.*
 
 ### License
 
@@ -35,14 +28,19 @@ You can get started with Druid with our [quickstart](http://druid.io/docs/latest
 
 ### Reporting Issues
 
-If you find any bugs, please file a [GitHub issue](https://github.com/druid-io/druid/issues).
+If you find any bugs, please file a [GitHub issue](https://github.com/apache/incubator-druid/issues).
 
 ### Community
 
-Community support is available on the [druid-user mailing
-list](https://groups.google.com/forum/#!forum/druid-user)(druid-user@googlegroups.com).
+The Druid community is in the process of migrating to Apache by way of the Apache Incubator. Eventually, as we proceed
+along this path, our site will move from http://druid.io/ to https://druid.apache.org/.
+
+Community support is available on the
+[druid-user mailing list](https://groups.google.com/forum/#!forum/druid-user)(druid-user@googlegroups.com), which
+is hosted at Google Groups.
 
-Development discussions occur on the [druid-development list](https://groups.google.com/forum/#!forum/druid-development)(druid-development@googlegroups.com).
+Development discussions occur on [dev@druid.apache.org](https://lists.apache.org/list.html?dev@druid.apache.org), which
+you can subscribe to by emailing [dev-subscribe@druid.apache.org](mailto:dev-subscribe@druid.apache.org).
 
 We also have a couple people hanging out on IRC in `#druid-dev` on
 `irc.freenode.net`.
diff --git a/api/pom.xml b/api/pom.xml
deleted file mode 100644
index d4bcddd4370..00000000000
--- a/api/pom.xml
+++ /dev/null
@@ -1,146 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ~ Licensed to Metamarkets Group Inc. (Metamarkets) under one
- ~ or more contributor license agreements.  See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership.  Metamarkets licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License.  You may obtain a copy of the License at
- ~
- ~   http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied.  See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-
-    <artifactId>druid-api</artifactId>
-    <name>druid-api</name>
-    <description>Druid Extensions API</description>
-
-    <parent>
-        <groupId>io.druid</groupId>
-        <artifactId>druid</artifactId>
-        <version>0.12.0-SNAPSHOT</version>
-    </parent>
-
-    <dependencies>
-        <dependency>
-            <groupId>io.druid</groupId>
-            <artifactId>java-util</artifactId>
-            <version>${project.parent.version}</version>
-                <exclusions>
-                    <exclusion>
-                        <groupId>org.slf4j</groupId>
-                        <artifactId>slf4j-api</artifactId>
-                    </exclusion>
-                </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>com.google.inject</groupId>
-            <artifactId>guice</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>com.google.inject.extensions</groupId>
-            <artifactId>guice-multibindings</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>io.airlift</groupId>
-            <artifactId>airline</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-annotations</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-core</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-databind</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>com.fasterxml.jackson.dataformat</groupId>
-            <artifactId>jackson-dataformat-smile</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.hibernate</groupId>
-            <artifactId>hibernate-validator</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>javax.validation</groupId>
-            <artifactId>validation-api</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>commons-io</groupId>
-            <artifactId>commons-io</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>com.google.code.findbugs</groupId>
-            <artifactId>jsr305</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>net.thisptr</groupId>
-            <artifactId>jackson-jq</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>it.unimi.dsi</groupId>
-            <artifactId>fastutil</artifactId>
-        </dependency>
-        <!-- Tests -->
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-simple</artifactId>
-            <scope>test</scope>
-            <optional>true</optional>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.logging.log4j</groupId>
-            <artifactId>log4j-api</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.logging.log4j</groupId>
-            <artifactId>log4j-core</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.logging.log4j</groupId>
-            <artifactId>log4j-slf4j-impl</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.logging.log4j</groupId>
-            <artifactId>log4j-1.2-api</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.logging.log4j</groupId>
-            <artifactId>log4j-jul</artifactId>
-            <scope>test</scope>
-        </dependency>
-    </dependencies>
-
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-release-plugin</artifactId>
-            </plugin>
-        </plugins>
-    </build>
-
-</project>
diff --git a/api/src/main/java/io/druid/cli/CliCommandCreator.java b/api/src/main/java/io/druid/cli/CliCommandCreator.java
deleted file mode 100644
index ff48b7060dd..00000000000
--- a/api/src/main/java/io/druid/cli/CliCommandCreator.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.cli;
-
-import io.airlift.airline.Cli;
-import io.druid.guice.annotations.ExtensionPoint;
-
-/**
- */
-@ExtensionPoint
-public interface CliCommandCreator
-{
-  void addCommands(Cli.CliBuilder builder);
-}
diff --git a/api/src/main/java/io/druid/data/input/ByteBufferInputRowParser.java b/api/src/main/java/io/druid/data/input/ByteBufferInputRowParser.java
deleted file mode 100644
index 9f73be4dc6f..00000000000
--- a/api/src/main/java/io/druid/data/input/ByteBufferInputRowParser.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import io.druid.data.input.impl.InputRowParser;
-import io.druid.data.input.impl.ParseSpec;
-
-import java.nio.ByteBuffer;
-
-public interface ByteBufferInputRowParser extends InputRowParser<ByteBuffer>
-{
-  @Override
-  ByteBufferInputRowParser withParseSpec(ParseSpec parseSpec);
-}
diff --git a/api/src/main/java/io/druid/data/input/Committer.java b/api/src/main/java/io/druid/data/input/Committer.java
deleted file mode 100644
index 3c913d46e2e..00000000000
--- a/api/src/main/java/io/druid/data/input/Committer.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import io.druid.guice.annotations.ExtensionPoint;
-
-/**
- * Committer includes a Runnable and a Jackson-serialized metadata object containing the offset
- */
-@ExtensionPoint
-public interface Committer extends Runnable
-{
-  /**
-   * @return A json serialized representation of commit metadata,
-   * which needs to be serialized and deserialized by Jackson.
-   * Commit metadata can be a complex type, but we recommend keeping it to List/Map/"Primitive JSON" types
-   */
-  Object getMetadata();
-}
diff --git a/api/src/main/java/io/druid/data/input/Firehose.java b/api/src/main/java/io/druid/data/input/Firehose.java
deleted file mode 100644
index a6f403cf355..00000000000
--- a/api/src/main/java/io/druid/data/input/Firehose.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import io.druid.guice.annotations.ExtensionPoint;
-
-import javax.annotation.Nullable;
-import java.io.Closeable;
-
-/**
- * This is an interface that holds onto the stream of incoming data.  Realtime data ingestion is built around this
- * abstraction.  In order to add a new type of source for realtime data ingestion, all you need to do is implement
- * one of these and register it with the Main.
- *
- * This object acts a lot like an Iterator, but it doesn't extend the Iterator interface because it extends
- * Closeable and it is very important that the close() method doesn't get forgotten, which is easy to do if this
- * gets passed around as an Iterator.
- * <p>
- * The implementation of this interface only needs to be minimally thread-safe. The three methods ##hasMore(),
- * ##nextRow() and ##commit() are all called from the same thread.  ##commit(), however, returns a callback
- * which will be called on another thread, so the operations inside of that callback must be thread-safe.
- * </p>
- */
-@ExtensionPoint
-public interface Firehose extends Closeable
-{
-  /**
-   * Returns whether there are more rows to process.  This is used to indicate that another item is immediately
-   * available via ##nextRow().  Thus, if the stream is still available but there are no new messages on it, this call
-   * should block until a new message is available.
-   *
-   * If something happens such that the stream is no longer available, this should return false.
-   *
-   * @return true if and when there is another row available, false if the stream has dried up
-   */
-  boolean hasMore();
-
-  /**
-   * The next row available.  Should only be called if hasMore returns true.
-   * The return value can be null which means the caller must skip this row.
-   *
-   * @return The next row
-   */
-  @Nullable
-  InputRow nextRow();
-
-  /**
-   * Returns a runnable that will "commit" everything read up to the point at which commit() is called.  This is
-   * often equivalent to everything that has been read since the last commit() call (or instantiation of the object),
-   * but doesn't necessarily have to be.
-   *
-   * This method is called when the main processing loop starts to persist its current batch of things to process.
-   * The returned runnable will be run when the current batch has been successfully persisted, there is usually
-   * some time lag between when this method is called and when the runnable is run.  The Runnable is also run on
-   * a separate thread so its operation should be thread-safe.
-   *
-   * The Runnable is essentially just a lambda/closure that is run() after data supplied by this instance has
-   * been committed on the writer side of this interface protocol.
-   * <p>
-   * A simple implementation of this interface might do nothing when run() is called 
-   * (in which case the same do-nothing instance can be returned every time), or 
-   * a more complex implementation might clean up temporary resources that are no longer needed 
-   * because of InputRows delivered by prior calls to ##nextRow().
-   * </p>
-   */
-  Runnable commit();
-}
diff --git a/api/src/main/java/io/druid/data/input/FirehoseFactory.java b/api/src/main/java/io/druid/data/input/FirehoseFactory.java
deleted file mode 100644
index c68ad6be9a2..00000000000
--- a/api/src/main/java/io/druid/data/input/FirehoseFactory.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import com.fasterxml.jackson.annotation.JsonTypeInfo;
-import io.druid.data.input.impl.InputRowParser;
-import io.druid.data.input.impl.prefetch.PrefetchableTextFilesFirehoseFactory;
-import io.druid.guice.annotations.ExtensionPoint;
-import io.druid.java.util.common.parsers.ParseException;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * FirehoseFactory creates a {@link Firehose} which is an interface holding onto the stream of incoming data.
- * It currently provides two methods for creating a {@link Firehose} and their default implementations call each other
- * for the backward compatibility.  Implementations of this interface must implement one of these methods.
- */
-@ExtensionPoint
-@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
-public interface FirehoseFactory<T extends InputRowParser>
-{
-  /**
-   * Initialization method that connects up the fire hose.  If this method returns successfully it should be safe to
-   * call hasMore() on the returned Firehose (which might subsequently block).
-   * <p/>
-   * If this method returns null, then any attempt to call hasMore(), nextRow(), commit() and close() on the return
-   * value will throw a surprising NPE.   Throwing IOException on connection failure or runtime exception on
-   * invalid configuration is preferred over returning null.
-   *
-   * @param parser             an input row parser
-   */
-  @Deprecated
-  default Firehose connect(T parser) throws IOException, ParseException
-  {
-    return connect(parser, null);
-  }
-
-  /**
-   * Initialization method that connects up the fire hose.  If this method returns successfully it should be safe to
-   * call hasMore() on the returned Firehose (which might subsequently block).
-   * <p/>
-   * If this method returns null, then any attempt to call hasMore(), nextRow(), commit() and close() on the return
-   * value will throw a surprising NPE.   Throwing IOException on connection failure or runtime exception on
-   * invalid configuration is preferred over returning null.
-   * <p/>
-   * Some fire hoses like {@link PrefetchableTextFilesFirehoseFactory} may use a temporary
-   * directory to cache data in it.
-   *
-   * @param parser             an input row parser
-   * @param temporaryDirectory a directory where temporary files are stored
-   */
-  default Firehose connect(T parser, File temporaryDirectory) throws IOException, ParseException
-  {
-    return connect(parser);
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/FirehoseFactoryV2.java b/api/src/main/java/io/druid/data/input/FirehoseFactoryV2.java
deleted file mode 100644
index 560ff52dd8b..00000000000
--- a/api/src/main/java/io/druid/data/input/FirehoseFactoryV2.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import com.fasterxml.jackson.annotation.JsonTypeInfo;
-import io.druid.data.input.impl.InputRowParser;
-import io.druid.guice.annotations.ExtensionPoint;
-import io.druid.java.util.common.parsers.ParseException;
-
-import java.io.IOException;
-/**
- * Initialization method that connects up the FirehoseV2.  If this method returns successfully it should be safe to
- * call start() on the returned FirehoseV2 (which might subsequently block).
- *
- * In contrast to V1 version, FirehoseFactoryV2 is able to pass an additional json-serialized object to FirehoseV2,
- * which contains last commit metadata
- *
- * <p/>
- * If this method returns null, then any attempt to call start(), advance(), currRow(), makeCommitter() and close() on the return
- * value will throw a surprising NPE.   Throwing IOException on connection failure or runtime exception on
- * invalid configuration is preferred over returning null.
- */
-@ExtensionPoint
-@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
-public interface FirehoseFactoryV2<T extends InputRowParser>
-{
-  FirehoseV2 connect(T parser, Object lastCommit) throws IOException, ParseException;
-}
diff --git a/api/src/main/java/io/druid/data/input/FirehoseV2.java b/api/src/main/java/io/druid/data/input/FirehoseV2.java
deleted file mode 100644
index 9d34d510d70..00000000000
--- a/api/src/main/java/io/druid/data/input/FirehoseV2.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import io.druid.guice.annotations.ExtensionPoint;
-
-import java.io.Closeable;
-
-/**
- * This is an interface that holds onto the stream of incoming data.  Realtime data ingestion is built around this
- * abstraction.  In order to add a new type of source for realtime data ingestion, all you need to do is implement
- * one of these and register it with the Main.
- *
- * In contrast to Firehose v1 version, FirehoseV2 will always operate in a "peek, then advance" manner.
- * And the intended usage patttern is
- * 1. Call start()
- * 2. Read currRow()
- * 3. Call advance()
- * 4. If index should be committed: commit()
- * 5. GOTO 2
- *
- * Note that commit() is being called *after* advance.
- *
- * This object acts a lot like an Iterator, but it doesn't extend the Iterator interface because it extends
- * Closeable and it is very important that the close() method doesn't get forgotten, which is easy to do if this
- * gets passed around as an Iterator.
- *
- * The implementation of this interface only needs to be minimally thread-safe. The methods ##start(), ##advance(),
- * ##currRow() and ##makeCommitter() are all called from the same thread.  ##makeCommitter(), however, returns a callback
- * which will be called on another thread, so the operations inside of that callback must be thread-safe.
- */
-@ExtensionPoint
-public interface FirehoseV2 extends Closeable
-{
-  /**
-   * For initial start
-   */
-  void start() throws Exception;
-
-  /**
-   * Advance the firehose to the next offset.  Implementations of this interface should make sure that
-   * if advance() is called and throws out an exception, the next call to currRow() should return a
-   * null value.
-   *
-   * @return true if and when there is another row available, false if the stream has dried up
-   */
-  boolean advance();
-
-  /**
-   * @return The current row
-   */
-  InputRow currRow();
-
-  /**
-   * Returns a Committer that will "commit" everything read up to the point at which makeCommitter() is called.
-   *
-   * This method is called when the main processing loop starts to persist its current batch of things to process.
-   * The returned committer will be run when the current batch has been successfully persisted
-   * and the metadata the committer carries can also be persisted along with segment data. There is usually
-   * some time lag between when this method is called and when the runnable is run.  The Runnable is also run on
-   * a separate thread so its operation should be thread-safe.
-   *
-   * Note that "correct" usage of this interface will always call advance() before commit() if the current row
-   * is considered in the commit.
-   *
-   * The Runnable is essentially just a lambda/closure that is run() after data supplied by this instance has
-   * been committed on the writer side of this interface protocol.
-   *
-   * A simple implementation of this interface might do nothing when run() is called,
-   * and save proper commit information in metadata
-   */
-  Committer makeCommitter();
-}
diff --git a/api/src/main/java/io/druid/data/input/InputRow.java b/api/src/main/java/io/druid/data/input/InputRow.java
deleted file mode 100644
index b5512c9a926..00000000000
--- a/api/src/main/java/io/druid/data/input/InputRow.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import io.druid.guice.annotations.ExtensionPoint;
-
-import java.util.List;
-
-/**
- * An InputRow is the interface definition of an event being input into the data ingestion layer.
- *
- * An InputRow is a Row with a self-describing list of the dimensions available.  This list is used to
- * implement "schema-less" data ingestion that allows the system to add new dimensions as they appear.
- *
- */
-@ExtensionPoint
-public interface InputRow extends Row
-{
-  /**
-   * Returns the dimensions that exist in this row.
-   *
-   * @return the dimensions that exist in this row.
-   */
-  List<String> getDimensions();
-}
diff --git a/api/src/main/java/io/druid/data/input/MapBasedInputRow.java b/api/src/main/java/io/druid/data/input/MapBasedInputRow.java
deleted file mode 100644
index d6f3647ed17..00000000000
--- a/api/src/main/java/io/druid/data/input/MapBasedInputRow.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import io.druid.guice.annotations.PublicApi;
-import io.druid.java.util.common.DateTimes;
-import org.joda.time.DateTime;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- */
-@PublicApi
-public class MapBasedInputRow extends MapBasedRow implements InputRow
-{
-  private final List<String> dimensions;
-
-  public MapBasedInputRow(
-      long timestamp,
-      List<String> dimensions,
-      Map<String, Object> event
-  )
-  {
-    super(timestamp, event);
-    this.dimensions = dimensions;
-  }
-
-  public MapBasedInputRow(
-      DateTime timestamp,
-      List<String> dimensions,
-      Map<String, Object> event
-  )
-  {
-    super(timestamp, event);
-    this.dimensions = dimensions;
-  }
-
-  @Override
-  public List<String> getDimensions()
-  {
-    return dimensions;
-  }
-
-  @Override
-  public String toString()
-  {
-    return "MapBasedInputRow{" +
-           "timestamp=" + DateTimes.utc(getTimestampFromEpoch()) +
-           ", event=" + getEvent() +
-           ", dimensions=" + dimensions +
-           '}';
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/MapBasedRow.java b/api/src/main/java/io/druid/data/input/MapBasedRow.java
deleted file mode 100644
index 5a7c4db0373..00000000000
--- a/api/src/main/java/io/druid/data/input/MapBasedRow.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import io.druid.guice.annotations.PublicApi;
-import io.druid.java.util.common.DateTimes;
-import org.joda.time.DateTime;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- */
-@PublicApi
-public class MapBasedRow implements Row
-{
-  private final DateTime timestamp;
-  private final Map<String, Object> event;
-
-  @JsonCreator
-  public MapBasedRow(
-      @JsonProperty("timestamp") DateTime timestamp,
-      @JsonProperty("event") Map<String, Object> event
-  )
-  {
-    this.timestamp = timestamp;
-    this.event = event;
-  }
-
-  public MapBasedRow(
-      long timestamp,
-      Map<String, Object> event
-  )
-  {
-    this(DateTimes.utc(timestamp), event);
-  }
-
-  @Override
-  public long getTimestampFromEpoch()
-  {
-    return timestamp.getMillis();
-  }
-
-  @Override
-  @JsonProperty
-  public DateTime getTimestamp()
-  {
-    return timestamp;
-  }
-
-  @JsonProperty
-  public Map<String, Object> getEvent()
-  {
-    return event;
-  }
-
-  @Override
-  public List<String> getDimension(String dimension)
-  {
-    return Rows.objectToStrings(event.get(dimension));
-  }
-
-  @Override
-  public Object getRaw(String dimension)
-  {
-    return event.get(dimension);
-  }
-
-  @Override
-  public Number getMetric(String metric)
-  {
-    return Rows.objectToNumber(metric, event.get(metric));
-  }
-
-  @Override
-  public String toString()
-  {
-    return "MapBasedRow{" +
-           "timestamp=" + timestamp +
-           ", event=" + event +
-           '}';
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    MapBasedRow that = (MapBasedRow) o;
-
-    if (!event.equals(that.event)) {
-      return false;
-    }
-    if (!timestamp.equals(that.timestamp)) {
-      return false;
-    }
-
-    return true;
-  }
-
-  @Override
-  public int hashCode()
-  {
-    int result = timestamp.hashCode();
-    result = 31 * result + event.hashCode();
-    return result;
-  }
-
-  @Override
-  public int compareTo(Row o)
-  {
-    return timestamp.compareTo(o.getTimestamp());
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/Row.java b/api/src/main/java/io/druid/data/input/Row.java
deleted file mode 100644
index b8ed24c803d..00000000000
--- a/api/src/main/java/io/druid/data/input/Row.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import com.fasterxml.jackson.annotation.JsonSubTypes;
-import com.fasterxml.jackson.annotation.JsonTypeInfo;
-import io.druid.guice.annotations.PublicApi;
-import org.joda.time.DateTime;
-
-import java.util.List;
-
-/**
- * A Row of data.  This can be used for both input and output into various parts of the system.  It assumes
- * that the user already knows the schema of the row and can query for the parts that they care about.
- */
-@PublicApi
-@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "version", defaultImpl = MapBasedRow.class)
-@JsonSubTypes(value = {
-    @JsonSubTypes.Type(name = "v1", value = MapBasedRow.class)
-})
-public interface Row extends Comparable<Row>
-{
-  /**
-   * Returns the timestamp from the epoch in milliseconds.  If the event happened _right now_, this would return the
-   * same thing as System.currentTimeMillis();
-   *
-   * @return the timestamp from the epoch in milliseconds.
-   */
-  long getTimestampFromEpoch();
-
-  /**
-   * Returns the timestamp from the epoch as an org.joda.time.DateTime.  If the event happened _right now_, this would return the
-   * same thing as new DateTime();
-   *
-   * @return the timestamp from the epoch as an org.joda.time.DateTime object.
-   */
-  DateTime getTimestamp();
-
-  /**
-   * Returns the list of dimension values for the given column name.
-   * <p/>
-   *
-   * @param dimension the column name of the dimension requested
-   *
-   * @return the list of values for the provided column name
-   */
-  List<String> getDimension(String dimension);
-
-  /**
-   * Returns the raw dimension value for the given column name. This is different from {@link #getDimension} which
-   * all values to strings before returning them.
-   *
-   * @param dimension the column name of the dimension requested
-   *
-   * @return the value of the provided column name
-   */
-  Object getRaw(String dimension);
-
-  /**
-   * Returns the metric column value for the given column name. This method is different from {@link #getRaw} in two
-   * aspects:
-   *  1. If the column is absent in the row, numeric zero is returned, rather than null.
-   *  2. If the column has string value, an attempt is made to parse this value as a number.
-   */
-  Number getMetric(String metric);
-}
diff --git a/api/src/main/java/io/druid/data/input/Rows.java b/api/src/main/java/io/druid/data/input/Rows.java
deleted file mode 100644
index 0ef09e9fa24..00000000000
--- a/api/src/main/java/io/druid/data/input/Rows.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSortedSet;
-import com.google.common.collect.Maps;
-import com.google.common.primitives.Longs;
-import io.druid.java.util.common.StringUtils;
-import io.druid.java.util.common.parsers.ParseException;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-/**
- */
-public class Rows
-{
-  public static final Long LONG_ZERO = 0L;
-
-  /**
-   * @param timeStamp rollup up timestamp to be used to create group key
-   * @param inputRow  input row
-   *
-   * @return groupKey for the given input row
-   */
-  public static List<Object> toGroupKey(long timeStamp, InputRow inputRow)
-  {
-    final Map<String, Set<String>> dims = Maps.newTreeMap();
-    for (final String dim : inputRow.getDimensions()) {
-      final Set<String> dimValues = ImmutableSortedSet.copyOf(inputRow.getDimension(dim));
-      if (dimValues.size() > 0) {
-        dims.put(dim, dimValues);
-      }
-    }
-    return ImmutableList.of(
-        timeStamp,
-        dims
-    );
-  }
-
-  /**
-   * Convert an object to a list of strings.
-   */
-  public static List<String> objectToStrings(final Object inputValue)
-  {
-    if (inputValue == null) {
-      return Collections.emptyList();
-    } else if (inputValue instanceof List) {
-      // guava's toString function fails on null objects, so please do not use it
-      final List<Object> values = (List) inputValue;
-
-      final List<String> retVal = new ArrayList<>(values.size());
-      for (Object val : values) {
-        retVal.add(String.valueOf(val));
-      }
-
-      return retVal;
-    } else {
-      return Collections.singletonList(String.valueOf(inputValue));
-    }
-  }
-
-  /**
-   * Convert an object to a number. Nulls are treated as zeroes.
-   *
-   * @param name       field name of the object being converted (may be used for exception messages)
-   * @param inputValue the actual object being converted
-   *
-   * @return a number
-   *
-   * @throws NullPointerException if the string is null
-   * @throws ParseException       if the column cannot be converted to a number
-   */
-  public static Number objectToNumber(final String name, final Object inputValue)
-  {
-    if (inputValue == null) {
-      return Rows.LONG_ZERO;
-    }
-
-    if (inputValue instanceof Number) {
-      return (Number) inputValue;
-    } else if (inputValue instanceof String) {
-      try {
-        String metricValueString = StringUtils.removeChar(((String) inputValue).trim(), ',');
-        // Longs.tryParse() doesn't support leading '+', so we need to trim it ourselves
-        metricValueString = trimLeadingPlusOfLongString(metricValueString);
-        Long v = Longs.tryParse(metricValueString);
-        // Do NOT use ternary operator here, because it makes Java to convert Long to Double
-        if (v != null) {
-          return v;
-        } else {
-          return Double.valueOf(metricValueString);
-        }
-      }
-      catch (Exception e) {
-        throw new ParseException(e, "Unable to parse value[%s] for field[%s]", inputValue, name);
-      }
-    } else {
-      throw new ParseException("Unknown type[%s] for field", inputValue.getClass(), inputValue);
-    }
-  }
-
-  private static String trimLeadingPlusOfLongString(String metricValueString)
-  {
-    if (metricValueString.length() > 1 && metricValueString.charAt(0) == '+') {
-      char secondChar = metricValueString.charAt(1);
-      if (secondChar >= '0' && secondChar <= '9') {
-        metricValueString = metricValueString.substring(1);
-      }
-    }
-    return metricValueString;
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/AbstractTextFilesFirehoseFactory.java b/api/src/main/java/io/druid/data/input/impl/AbstractTextFilesFirehoseFactory.java
deleted file mode 100644
index aa95f2204af..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/AbstractTextFilesFirehoseFactory.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.google.common.base.Preconditions;
-import com.google.common.base.Throwables;
-import com.google.common.collect.ImmutableList;
-import io.druid.data.input.Firehose;
-import io.druid.data.input.FirehoseFactory;
-import io.druid.java.util.common.logger.Logger;
-import org.apache.commons.io.Charsets;
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.io.LineIterator;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.List;
-import java.util.NoSuchElementException;
-
-/**
- * This is an abstract class for firehose factory for making firehoses reading text files.
- * It provides an unified {@link #connect(StringInputRowParser, File)} implementation for its subclasses.
- *
- * @param <T> object type representing input data
- */
-public abstract class AbstractTextFilesFirehoseFactory<T>
-    implements FirehoseFactory<StringInputRowParser>
-{
-  private static final Logger LOG = new Logger(AbstractTextFilesFirehoseFactory.class);
-
-  private List<T> objects;
-
-  @Override
-  public Firehose connect(StringInputRowParser firehoseParser, File temporaryDirectory) throws IOException
-  {
-    if (objects == null) {
-      objects = ImmutableList.copyOf(Preconditions.checkNotNull(initObjects(), "initObjects"));
-    }
-    final Iterator<T> iterator = objects.iterator();
-    return new FileIteratingFirehose(
-        new Iterator<LineIterator>()
-        {
-          @Override
-          public boolean hasNext()
-          {
-            return iterator.hasNext();
-          }
-
-          @Override
-          public LineIterator next()
-          {
-            if (!hasNext()) {
-              throw new NoSuchElementException();
-            }
-            final T object = iterator.next();
-            try {
-              return IOUtils.lineIterator(wrapObjectStream(object, openObjectStream(object)), Charsets.UTF_8);
-            }
-            catch (Exception e) {
-              LOG.error(
-                  e,
-                  "Exception reading object[%s]",
-                  object
-              );
-              throw Throwables.propagate(e);
-            }
-          }
-        },
-        firehoseParser
-    );
-  }
-
-  /**
-   * Initialize objects to be read by this firehose.  Since firehose factories are constructed whenever
-   * io.druid.indexing.common.task.Task objects are deserialized, actual initialization of objects is deferred
-   * until {@link #connect(StringInputRowParser, File)} is called.
-   *
-   * @return a collection of initialized objects.
-   */
-  protected abstract Collection<T> initObjects() throws IOException;
-
-  /**
-   * Open an input stream from the given object.  If the object is compressed, this method should return a byte stream
-   * as it is compressed.  The object compression should be handled in {@link #wrapObjectStream(Object, InputStream)}.
-   *
-   * @param object an object to be read
-   *
-   * @return an input stream for the object
-   *
-   * @throws IOException
-   */
-  protected abstract InputStream openObjectStream(T object) throws IOException;
-
-  /**
-   * Wrap the given input stream if needed.  The decompression logic should be applied to the given stream if the object
-   * is compressed.
-   *
-   * @param object an input object
-   * @param stream a stream for the object
-   * @return
-   * @throws IOException
-   */
-  protected abstract InputStream wrapObjectStream(T object, InputStream stream) throws IOException;
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/CSVParseSpec.java b/api/src/main/java/io/druid/data/input/impl/CSVParseSpec.java
deleted file mode 100644
index 262b67fe1fe..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/CSVParseSpec.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Preconditions;
-import io.druid.java.util.common.parsers.CSVParser;
-import io.druid.java.util.common.parsers.Parser;
-
-import java.util.List;
-
-/**
- */
-public class CSVParseSpec extends ParseSpec
-{
-  private final String listDelimiter;
-  private final List<String> columns;
-  private final boolean hasHeaderRow;
-  private final int skipHeaderRows;
-
-  @JsonCreator
-  public CSVParseSpec(
-      @JsonProperty("timestampSpec") TimestampSpec timestampSpec,
-      @JsonProperty("dimensionsSpec") DimensionsSpec dimensionsSpec,
-      @JsonProperty("listDelimiter") String listDelimiter,
-      @JsonProperty("columns") List<String> columns,
-      @JsonProperty("hasHeaderRow") boolean hasHeaderRow,
-      @JsonProperty("skipHeaderRows") int skipHeaderRows
-  )
-  {
-    super(timestampSpec, dimensionsSpec);
-
-    this.listDelimiter = listDelimiter;
-    this.columns = columns;
-    this.hasHeaderRow = hasHeaderRow;
-    this.skipHeaderRows = skipHeaderRows;
-
-    if (columns != null) {
-      for (String column : columns) {
-        Preconditions.checkArgument(!column.contains(","), "Column[%s] has a comma, it cannot", column);
-      }
-      verify(dimensionsSpec.getDimensionNames());
-    } else {
-      Preconditions.checkArgument(
-          hasHeaderRow,
-          "If columns field is not set, the first row of your data must have your header"
-          + " and hasHeaderRow must be set to true."
-      );
-    }
-  }
-
-  @Deprecated
-  public CSVParseSpec(
-      TimestampSpec timestampSpec,
-      DimensionsSpec dimensionsSpec,
-      String listDelimiter,
-      List<String> columns
-  )
-  {
-    this(timestampSpec, dimensionsSpec, listDelimiter, columns, false, 0);
-  }
-
-  @JsonProperty
-  public String getListDelimiter()
-  {
-    return listDelimiter;
-  }
-
-  @JsonProperty("columns")
-  public List<String> getColumns()
-  {
-    return columns;
-  }
-
-  @JsonProperty
-  public boolean isHasHeaderRow()
-  {
-    return hasHeaderRow;
-  }
-
-  @JsonProperty("skipHeaderRows")
-  public int getSkipHeaderRows()
-  {
-    return skipHeaderRows;
-  }
-
-  @Override
-  public void verify(List<String> usedCols)
-  {
-    for (String columnName : usedCols) {
-      Preconditions.checkArgument(columns.contains(columnName), "column[%s] not in columns.", columnName);
-    }
-  }
-
-  @Override
-  public Parser<String, Object> makeParser()
-  {
-    return new CSVParser(listDelimiter, columns, hasHeaderRow, skipHeaderRows);
-  }
-
-  @Override
-  public ParseSpec withTimestampSpec(TimestampSpec spec)
-  {
-    return new CSVParseSpec(spec, getDimensionsSpec(), listDelimiter, columns, hasHeaderRow, skipHeaderRows);
-  }
-
-  @Override
-  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
-  {
-    return new CSVParseSpec(getTimestampSpec(), spec, listDelimiter, columns, hasHeaderRow, skipHeaderRows);
-  }
-
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/DelimitedParseSpec.java b/api/src/main/java/io/druid/data/input/impl/DelimitedParseSpec.java
deleted file mode 100644
index 8636b3445a4..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/DelimitedParseSpec.java
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Preconditions;
-import io.druid.java.util.common.parsers.DelimitedParser;
-import io.druid.java.util.common.parsers.Parser;
-
-import java.util.List;
-
-/**
- */
-public class DelimitedParseSpec extends ParseSpec
-{
-  private final String delimiter;
-  private final String listDelimiter;
-  private final List<String> columns;
-  private final boolean hasHeaderRow;
-  private final int skipHeaderRows;
-
-  @JsonCreator
-  public DelimitedParseSpec(
-      @JsonProperty("timestampSpec") TimestampSpec timestampSpec,
-      @JsonProperty("dimensionsSpec") DimensionsSpec dimensionsSpec,
-      @JsonProperty("delimiter") String delimiter,
-      @JsonProperty("listDelimiter") String listDelimiter,
-      @JsonProperty("columns") List<String> columns,
-      @JsonProperty("hasHeaderRow") boolean hasHeaderRow,
-      @JsonProperty("skipHeaderRows") int skipHeaderRows
-  )
-  {
-    super(timestampSpec, dimensionsSpec);
-
-    this.delimiter = delimiter;
-    this.listDelimiter = listDelimiter;
-    this.columns = columns;
-    this.hasHeaderRow = hasHeaderRow;
-    this.skipHeaderRows = skipHeaderRows;
-
-    if (columns != null) {
-      for (String column : this.columns) {
-        Preconditions.checkArgument(!column.contains(","), "Column[%s] has a comma, it cannot", column);
-      }
-      verify(dimensionsSpec.getDimensionNames());
-    } else {
-      Preconditions.checkArgument(
-          hasHeaderRow,
-          "If columns field is not set, the first row of your data must have your header"
-          + " and hasHeaderRow must be set to true."
-      );
-    }
-  }
-
-  @Deprecated
-  public DelimitedParseSpec(
-      TimestampSpec timestampSpec,
-      DimensionsSpec dimensionsSpec,
-      String delimiter,
-      String listDelimiter,
-      List<String> columns
-  )
-  {
-    this(timestampSpec, dimensionsSpec, delimiter, listDelimiter, columns, false, 0);
-  }
-
-  @JsonProperty("delimiter")
-  public String getDelimiter()
-  {
-    return delimiter;
-  }
-
-  @JsonProperty("listDelimiter")
-  public String getListDelimiter()
-  {
-    return listDelimiter;
-  }
-
-  @JsonProperty("columns")
-  public List<String> getColumns()
-  {
-    return columns;
-  }
-
-  @JsonProperty
-  public boolean isHasHeaderRow()
-  {
-    return hasHeaderRow;
-  }
-
-  @JsonProperty("skipHeaderRows")
-  public int getSkipHeaderRows()
-  {
-    return skipHeaderRows;
-  }
-
-  @Override
-  public void verify(List<String> usedCols)
-  {
-    for (String columnName : usedCols) {
-      Preconditions.checkArgument(columns.contains(columnName), "column[%s] not in columns.", columnName);
-    }
-  }
-
-  @Override
-  public Parser<String, Object> makeParser()
-  {
-    return new DelimitedParser(
-        delimiter,
-        listDelimiter,
-        columns,
-        hasHeaderRow,
-        skipHeaderRows
-    );
-  }
-
-  @Override
-  public ParseSpec withTimestampSpec(TimestampSpec spec)
-  {
-    return new DelimitedParseSpec(
-        spec,
-        getDimensionsSpec(),
-        delimiter,
-        listDelimiter,
-        columns,
-        hasHeaderRow,
-        skipHeaderRows
-    );
-  }
-
-  @Override
-  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
-  {
-    return new DelimitedParseSpec(
-        getTimestampSpec(),
-        spec,
-        delimiter,
-        listDelimiter,
-        columns,
-        hasHeaderRow,
-        skipHeaderRows
-    );
-  }
-
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/DimensionSchema.java b/api/src/main/java/io/druid/data/input/impl/DimensionSchema.java
deleted file mode 100644
index d721cbf0569..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/DimensionSchema.java
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.annotation.JsonSubTypes;
-import com.fasterxml.jackson.annotation.JsonTypeInfo;
-import com.fasterxml.jackson.annotation.JsonValue;
-import com.google.common.base.Preconditions;
-import io.druid.guice.annotations.PublicApi;
-import io.druid.java.util.common.StringUtils;
-
-import java.util.Objects;
-
-/**
- */
-@PublicApi
-@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = StringDimensionSchema.class)
-@JsonSubTypes(value = {
-    @JsonSubTypes.Type(name = DimensionSchema.STRING_TYPE_NAME, value = StringDimensionSchema.class),
-    @JsonSubTypes.Type(name = DimensionSchema.LONG_TYPE_NAME, value = LongDimensionSchema.class),
-    @JsonSubTypes.Type(name = DimensionSchema.FLOAT_TYPE_NAME, value = FloatDimensionSchema.class),
-    @JsonSubTypes.Type(name = DimensionSchema.DOUBLE_TYPE_NAME, value = DoubleDimensionSchema.class),
-    @JsonSubTypes.Type(name = DimensionSchema.SPATIAL_TYPE_NAME, value = NewSpatialDimensionSchema.class),
-})
-public abstract class DimensionSchema
-{
-  public static final String STRING_TYPE_NAME = "string";
-  public static final String LONG_TYPE_NAME = "long";
-  public static final String FLOAT_TYPE_NAME = "float";
-  public static final String SPATIAL_TYPE_NAME = "spatial";
-  public static final String DOUBLE_TYPE_NAME = "double";
-
-
-  // main druid and druid-api should really use the same ValueType enum.
-  // merge them when druid-api is merged back into the main repo
-
-  /**
-   * Should be the same as {@code io.druid.segment.column.ValueType}.
-   * TODO merge them when druid-api is merged back into the main repo
-   */
-  public enum ValueType
-  {
-    FLOAT,
-    LONG,
-    STRING,
-    DOUBLE,
-    @SuppressWarnings("unused") // used in io.druid.segment.column.ValueType
-    COMPLEX;
-
-    @JsonValue
-    @Override
-    public String toString()
-    {
-      return StringUtils.toUpperCase(this.name());
-    }
-
-    @JsonCreator
-    public static ValueType fromString(String name)
-    {
-      return valueOf(StringUtils.toUpperCase(name));
-    }
-  }
-
-  public enum MultiValueHandling
-  {
-    SORTED_ARRAY,
-    SORTED_SET,
-    ARRAY {
-      @Override
-      public boolean needSorting()
-      {
-        return false;
-      }
-    };
-
-    public boolean needSorting()
-    {
-      return true;
-    }
-
-    @Override
-    @JsonValue
-    public String toString()
-    {
-      return StringUtils.toUpperCase(name());
-    }
-
-    @JsonCreator
-    public static MultiValueHandling fromString(String name)
-    {
-      return name == null ? ofDefault() : valueOf(StringUtils.toUpperCase(name));
-    }
-
-    // this can be system configuration
-    public static MultiValueHandling ofDefault()
-    {
-      return SORTED_ARRAY;
-    }
-  }
-
-  private final String name;
-  private final MultiValueHandling multiValueHandling;
-
-  protected DimensionSchema(String name, MultiValueHandling multiValueHandling)
-  {
-    this.name = Preconditions.checkNotNull(name, "Dimension name cannot be null.");
-    this.multiValueHandling = multiValueHandling == null ? MultiValueHandling.ofDefault() : multiValueHandling;
-  }
-
-  @JsonProperty
-  public String getName()
-  {
-    return name;
-  }
-
-  @JsonProperty
-  public MultiValueHandling getMultiValueHandling()
-  {
-    return multiValueHandling;
-  }
-
-  @JsonIgnore
-  public abstract String getTypeName();
-
-  @JsonIgnore
-  public abstract ValueType getValueType();
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    DimensionSchema that = (DimensionSchema) o;
-
-    if (!name.equals(that.name)) {
-      return false;
-    }
-
-    if (!getValueType().equals(that.getValueType())) {
-      return false;
-    }
-
-    return Objects.equals(multiValueHandling, that.multiValueHandling);
-  }
-
-  @Override
-  public int hashCode()
-  {
-    return Objects.hash(name, getValueType(), multiValueHandling);
-  }
-
-  @Override
-  public String toString()
-  {
-    return "DimensionSchema{" +
-           "name='" + name + "'" +
-           ", valueType='" + getValueType() + "'" +
-           ", multiValueHandling='" + getMultiValueHandling() + "'" +
-           "}";
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/DimensionsSpec.java b/api/src/main/java/io/druid/data/input/impl/DimensionsSpec.java
deleted file mode 100644
index 70640c3aec1..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/DimensionsSpec.java
+++ /dev/null
@@ -1,259 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Function;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Iterables;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
-import io.druid.guice.annotations.PublicApi;
-import io.druid.java.util.common.parsers.ParserUtils;
-
-import javax.annotation.Nullable;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-@PublicApi
-public class DimensionsSpec
-{
-  private final List<DimensionSchema> dimensions;
-  private final Set<String> dimensionExclusions;
-  private final Map<String, DimensionSchema> dimensionSchemaMap;
-
-  public static final DimensionsSpec EMPTY = new DimensionsSpec(null, null, null);
-
-  public static List<DimensionSchema> getDefaultSchemas(List<String> dimNames)
-  {
-    return getDefaultSchemas(dimNames, DimensionSchema.MultiValueHandling.ofDefault());
-  }
-
-  public static List<DimensionSchema> getDefaultSchemas(
-      final List<String> dimNames,
-      final DimensionSchema.MultiValueHandling multiValueHandling
-  )
-  {
-    return Lists.transform(
-        dimNames,
-        new Function<String, DimensionSchema>()
-        {
-          @Override
-          public DimensionSchema apply(String input)
-          {
-            return new StringDimensionSchema(input, multiValueHandling);
-          }
-        }
-    );
-  }
-
-  public static DimensionSchema convertSpatialSchema(SpatialDimensionSchema spatialSchema)
-  {
-    return new NewSpatialDimensionSchema(spatialSchema.getDimName(), spatialSchema.getDims());
-  }
-
-  @JsonCreator
-  public DimensionsSpec(
-      @JsonProperty("dimensions") List<DimensionSchema> dimensions,
-      @JsonProperty("dimensionExclusions") List<String> dimensionExclusions,
-      @Deprecated @JsonProperty("spatialDimensions") List<SpatialDimensionSchema> spatialDimensions
-  )
-  {
-    this.dimensions = dimensions == null
-                      ? Lists.<DimensionSchema>newArrayList()
-                      : Lists.newArrayList(dimensions);
-
-    this.dimensionExclusions = (dimensionExclusions == null)
-                               ? Sets.<String>newHashSet()
-                               : Sets.newHashSet(dimensionExclusions);
-
-    List<SpatialDimensionSchema> spatialDims = (spatialDimensions == null)
-                                               ? Lists.<SpatialDimensionSchema>newArrayList()
-                                               : spatialDimensions;
-
-    verify(spatialDims);
-
-    // Map for easy dimension name-based schema lookup
-    this.dimensionSchemaMap = new HashMap<>();
-    for (DimensionSchema schema : this.dimensions) {
-      dimensionSchemaMap.put(schema.getName(), schema);
-    }
-
-    for (SpatialDimensionSchema spatialSchema : spatialDims) {
-      DimensionSchema newSchema = DimensionsSpec.convertSpatialSchema(spatialSchema);
-      this.dimensions.add(newSchema);
-      dimensionSchemaMap.put(newSchema.getName(), newSchema);
-    }
-  }
-
-
-  @JsonProperty
-  public List<DimensionSchema> getDimensions()
-  {
-    return dimensions;
-  }
-
-  @JsonProperty
-  public Set<String> getDimensionExclusions()
-  {
-    return dimensionExclusions;
-  }
-
-  @Deprecated
-  @JsonIgnore
-  public List<SpatialDimensionSchema> getSpatialDimensions()
-  {
-    Iterable<NewSpatialDimensionSchema> filteredList = Iterables.filter(
-        dimensions, NewSpatialDimensionSchema.class
-    );
-
-    Iterable<SpatialDimensionSchema> transformedList = Iterables.transform(
-        filteredList,
-        new Function<NewSpatialDimensionSchema, SpatialDimensionSchema>()
-        {
-          @Nullable
-          @Override
-          public SpatialDimensionSchema apply(NewSpatialDimensionSchema input)
-          {
-            return new SpatialDimensionSchema(input.getName(), input.getDims());
-          }
-        }
-    );
-
-    return Lists.newArrayList(transformedList);
-  }
-
-
-  @JsonIgnore
-  public List<String> getDimensionNames()
-  {
-    return Lists.transform(
-        dimensions,
-        new Function<DimensionSchema, String>()
-        {
-          @Override
-          public String apply(DimensionSchema input)
-          {
-            return input.getName();
-          }
-        }
-    );
-  }
-
-  @PublicApi
-  public DimensionSchema getSchema(String dimension)
-  {
-    return dimensionSchemaMap.get(dimension);
-  }
-
-  public boolean hasCustomDimensions()
-  {
-    return !(dimensions == null || dimensions.isEmpty());
-  }
-
-  @PublicApi
-  public DimensionsSpec withDimensions(List<DimensionSchema> dims)
-  {
-    return new DimensionsSpec(dims, ImmutableList.copyOf(dimensionExclusions), null);
-  }
-
-  public DimensionsSpec withDimensionExclusions(Set<String> dimExs)
-  {
-    return new DimensionsSpec(
-        dimensions,
-        ImmutableList.copyOf(Sets.union(dimensionExclusions, dimExs)),
-        null
-    );
-  }
-
-  @Deprecated
-  public DimensionsSpec withSpatialDimensions(List<SpatialDimensionSchema> spatials)
-  {
-    return new DimensionsSpec(dimensions, ImmutableList.copyOf(dimensionExclusions), spatials);
-  }
-
-  private void verify(List<SpatialDimensionSchema> spatialDimensions)
-  {
-    List<String> dimNames = getDimensionNames();
-    Preconditions.checkArgument(
-        Sets.intersection(this.dimensionExclusions, Sets.newHashSet(dimNames)).isEmpty(),
-        "dimensions and dimensions exclusions cannot overlap"
-    );
-
-    ParserUtils.validateFields(dimNames);
-    ParserUtils.validateFields(dimensionExclusions);
-
-    List<String> spatialDimNames = Lists.transform(
-        spatialDimensions,
-        new Function<SpatialDimensionSchema, String>()
-        {
-          @Override
-          public String apply(SpatialDimensionSchema input)
-          {
-            return input.getDimName();
-          }
-        }
-    );
-
-    // Don't allow duplicates between main list and deprecated spatial list
-    ParserUtils.validateFields(Iterables.concat(dimNames, spatialDimNames));
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    DimensionsSpec that = (DimensionsSpec) o;
-
-    if (!dimensions.equals(that.dimensions)) {
-      return false;
-    }
-
-    return dimensionExclusions.equals(that.dimensionExclusions);
-  }
-
-  @Override
-  public int hashCode()
-  {
-    int result = dimensions.hashCode();
-    result = 31 * result + dimensionExclusions.hashCode();
-    return result;
-  }
-
-  @Override
-  public String toString()
-  {
-    return "DimensionsSpec{" +
-           "dimensions=" + dimensions +
-           ", dimensionExclusions=" + dimensionExclusions +
-           '}';
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/DoubleDimensionSchema.java b/api/src/main/java/io/druid/data/input/impl/DoubleDimensionSchema.java
deleted file mode 100644
index bcd642fe40e..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/DoubleDimensionSchema.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-public class DoubleDimensionSchema extends DimensionSchema
-{
-  @JsonCreator
-  public DoubleDimensionSchema(@JsonProperty("name") String name)
-  {
-    super(name, null);
-  }
-
-  @Override
-  public String getTypeName()
-  {
-    return DimensionSchema.DOUBLE_TYPE_NAME;
-  }
-
-  @Override
-  public ValueType getValueType()
-  {
-    return ValueType.DOUBLE;
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/FileIteratingFirehose.java b/api/src/main/java/io/druid/data/input/impl/FileIteratingFirehose.java
deleted file mode 100644
index eb60b366877..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/FileIteratingFirehose.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import io.druid.data.input.Firehose;
-import io.druid.data.input.InputRow;
-import io.druid.utils.Runnables;
-import org.apache.commons.io.LineIterator;
-
-import javax.annotation.Nullable;
-import java.io.Closeable;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.NoSuchElementException;
-
-/**
- */
-public class FileIteratingFirehose implements Firehose
-{
-  private final Iterator<LineIterator> lineIterators;
-  private final StringInputRowParser parser;
-
-  private LineIterator lineIterator = null;
-
-  private final Closeable closer;
-
-  public FileIteratingFirehose(
-      Iterator<LineIterator> lineIterators,
-      StringInputRowParser parser
-  )
-  {
-    this(lineIterators, parser, null);
-  }
-
-  public FileIteratingFirehose(
-      Iterator<LineIterator> lineIterators,
-      StringInputRowParser parser,
-      Closeable closer
-  )
-  {
-    this.lineIterators = lineIterators;
-    this.parser = parser;
-    this.closer = closer;
-  }
-
-  @Override
-  public boolean hasMore()
-  {
-    while ((lineIterator == null || !lineIterator.hasNext()) && lineIterators.hasNext()) {
-      lineIterator = getNextLineIterator();
-    }
-
-    return lineIterator != null && lineIterator.hasNext();
-  }
-
-  @Nullable
-  @Override
-  public InputRow nextRow()
-  {
-    if (!hasMore()) {
-      throw new NoSuchElementException();
-    }
-
-    return parser.parse(lineIterator.next());
-  }
-
-  private LineIterator getNextLineIterator()
-  {
-    if (lineIterator != null) {
-      lineIterator.close();
-    }
-
-    final LineIterator iterator = lineIterators.next();
-    parser.startFileFromBeginning();
-    return iterator;
-  }
-
-  @Override
-  public Runnable commit()
-  {
-    return Runnables.getNoopRunnable();
-  }
-
-  @Override
-  public void close() throws IOException
-  {
-    try {
-      if (lineIterator != null) {
-        lineIterator.close();
-      }
-    }
-    catch (Throwable t) {
-      try {
-        if (closer != null) {
-          closer.close();
-        }
-      }
-      catch (Exception e) {
-        t.addSuppressed(e);
-      }
-      throw t;
-    }
-    if (closer != null) {
-      closer.close();
-    }
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/FloatDimensionSchema.java b/api/src/main/java/io/druid/data/input/impl/FloatDimensionSchema.java
deleted file mode 100644
index a457a226ee4..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/FloatDimensionSchema.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-public class FloatDimensionSchema extends DimensionSchema
-{
-  @JsonCreator
-  public FloatDimensionSchema(
-      @JsonProperty("name") String name
-  )
-  {
-    super(name, null);
-  }
-
-  @Override
-  public String getTypeName()
-  {
-    return DimensionSchema.FLOAT_TYPE_NAME;
-  }
-
-  @Override
-  @JsonIgnore
-  public ValueType getValueType()
-  {
-    return ValueType.FLOAT;
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/InputRowParser.java b/api/src/main/java/io/druid/data/input/impl/InputRowParser.java
deleted file mode 100644
index c3b8fba6e4f..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/InputRowParser.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonSubTypes;
-import com.fasterxml.jackson.annotation.JsonTypeInfo;
-import io.druid.data.input.InputRow;
-import io.druid.guice.annotations.ExtensionPoint;
-import io.druid.java.util.common.collect.Utils;
-
-import javax.annotation.Nullable;
-import javax.validation.constraints.NotNull;
-import java.util.List;
-
-@ExtensionPoint
-@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = StringInputRowParser.class)
-@JsonSubTypes(value = {
-    @JsonSubTypes.Type(name = "string", value = StringInputRowParser.class),
-    @JsonSubTypes.Type(name = "map", value = MapInputRowParser.class),
-    @JsonSubTypes.Type(name = "noop", value = NoopInputRowParser.class)
-})
-public interface InputRowParser<T>
-{
-  /**
-   * Parse an input into list of {@link InputRow}. List can contains null for rows that should be thrown away,
-   * or throws {@code ParseException} if the input is unparseable. This method should never return null otherwise
-   * lots of things will break.
-   */
-  @NotNull
-  default List<InputRow> parseBatch(T input)
-  {
-    return Utils.nullableListOf(parse(input));
-  }
-
-  /**
-   * Parse an input into an {@link InputRow}. Return null if this input should be thrown away, or throws
-   * {@code ParseException} if the input is unparseable.
-   */
-  @Deprecated
-  @Nullable
-  default InputRow parse(T input)
-  {
-    return null;
-  }
-
-  ParseSpec getParseSpec();
-
-  InputRowParser withParseSpec(ParseSpec parseSpec);
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/JSONLowercaseParseSpec.java b/api/src/main/java/io/druid/data/input/impl/JSONLowercaseParseSpec.java
deleted file mode 100644
index 177a2b39a75..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/JSONLowercaseParseSpec.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import io.druid.java.util.common.parsers.JSONToLowerParser;
-import io.druid.java.util.common.parsers.Parser;
-
-import java.util.List;
-
-/**
- * This class is only here for backwards compatibility
- */
-@Deprecated
-public class JSONLowercaseParseSpec extends ParseSpec
-{
-  private final ObjectMapper objectMapper;
-
-  @JsonCreator
-  public JSONLowercaseParseSpec(
-      @JsonProperty("timestampSpec") TimestampSpec timestampSpec,
-      @JsonProperty("dimensionsSpec") DimensionsSpec dimensionsSpec
-  )
-  {
-    super(timestampSpec, dimensionsSpec);
-    this.objectMapper = new ObjectMapper();
-  }
-
-  @Override
-  public void verify(List<String> usedCols)
-  {
-  }
-
-  @Override
-  public Parser<String, Object> makeParser()
-  {
-    return new JSONToLowerParser(objectMapper, null, null);
-  }
-
-  @Override
-  public ParseSpec withTimestampSpec(TimestampSpec spec)
-  {
-    return new JSONLowercaseParseSpec(spec, getDimensionsSpec());
-  }
-
-  @Override
-  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
-  {
-    return new JSONLowercaseParseSpec(getTimestampSpec(), spec);
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/JSONParseSpec.java b/api/src/main/java/io/druid/data/input/impl/JSONParseSpec.java
deleted file mode 100644
index ce33d41ed48..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/JSONParseSpec.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.core.JsonParser.Feature;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import io.druid.java.util.common.parsers.JSONPathParser;
-import io.druid.java.util.common.parsers.JSONPathSpec;
-import io.druid.java.util.common.parsers.Parser;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-
-/**
- */
-public class JSONParseSpec extends ParseSpec
-{
-  private final ObjectMapper objectMapper;
-  private final JSONPathSpec flattenSpec;
-  private final Map<String, Boolean> featureSpec;
-
-  @JsonCreator
-  public JSONParseSpec(
-      @JsonProperty("timestampSpec") TimestampSpec timestampSpec,
-      @JsonProperty("dimensionsSpec") DimensionsSpec dimensionsSpec,
-      @JsonProperty("flattenSpec") JSONPathSpec flattenSpec,
-      @JsonProperty("featureSpec") Map<String, Boolean> featureSpec
-  )
-  {
-    super(timestampSpec, dimensionsSpec);
-    this.objectMapper = new ObjectMapper();
-    this.flattenSpec = flattenSpec != null ? flattenSpec : JSONPathSpec.DEFAULT;
-    this.featureSpec = (featureSpec == null) ? new HashMap<String, Boolean>() : featureSpec;
-    for (Map.Entry<String, Boolean> entry : this.featureSpec.entrySet()) {
-      Feature feature = Feature.valueOf(entry.getKey());
-      objectMapper.configure(feature, entry.getValue());
-    }
-  }
-
-  @Deprecated
-  public JSONParseSpec(TimestampSpec ts, DimensionsSpec dims)
-  {
-    this(ts, dims, null, null);
-  }
-
-  @Override
-  public void verify(List<String> usedCols)
-  {
-  }
-
-  @Override
-  public Parser<String, Object> makeParser()
-  {
-    return new JSONPathParser(flattenSpec, objectMapper);
-  }
-
-  @Override
-  public ParseSpec withTimestampSpec(TimestampSpec spec)
-  {
-    return new JSONParseSpec(spec, getDimensionsSpec(), getFlattenSpec(), getFeatureSpec());
-  }
-
-  @Override
-  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
-  {
-    return new JSONParseSpec(getTimestampSpec(), spec, getFlattenSpec(), getFeatureSpec());
-  }
-
-  @JsonProperty
-  public JSONPathSpec getFlattenSpec()
-  {
-    return flattenSpec;
-  }
-
-  @JsonProperty
-  public Map<String, Boolean> getFeatureSpec()
-  {
-    return featureSpec;
-  }
-
-  @Override
-  public boolean equals(final Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-    if (!super.equals(o)) {
-      return false;
-    }
-    final JSONParseSpec that = (JSONParseSpec) o;
-    return Objects.equals(flattenSpec, that.flattenSpec) &&
-           Objects.equals(featureSpec, that.featureSpec);
-  }
-
-  @Override
-  public int hashCode()
-  {
-    return Objects.hash(super.hashCode(), flattenSpec, featureSpec);
-  }
-
-  @Override
-  public String toString()
-  {
-    return "JSONParseSpec{" +
-           "timestampSpec=" + getTimestampSpec() +
-           ", dimensionsSpec=" + getDimensionsSpec() +
-           ", flattenSpec=" + flattenSpec +
-           ", featureSpec=" + featureSpec +
-           '}';
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/JavaScriptParseSpec.java b/api/src/main/java/io/druid/data/input/impl/JavaScriptParseSpec.java
deleted file mode 100644
index 71fe381bed2..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/JavaScriptParseSpec.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JacksonInject;
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Preconditions;
-import io.druid.java.util.common.parsers.JavaScriptParser;
-import io.druid.java.util.common.parsers.Parser;
-import io.druid.js.JavaScriptConfig;
-
-import java.util.List;
-
-/**
- */
-public class JavaScriptParseSpec extends ParseSpec
-{
-  private final String function;
-  private final JavaScriptConfig config;
-
-  // This variable is lazily initialized to avoid unnecessary JavaScript compilation during JSON serde
-  private JavaScriptParser parser;
-
-  @JsonCreator
-  public JavaScriptParseSpec(
-      @JsonProperty("timestampSpec") TimestampSpec timestampSpec,
-      @JsonProperty("dimensionsSpec") DimensionsSpec dimensionsSpec,
-      @JsonProperty("function") String function,
-      @JacksonInject JavaScriptConfig config
-  )
-  {
-    super(timestampSpec, dimensionsSpec);
-
-    this.function = function;
-    this.config = config;
-  }
-
-  @JsonProperty("function")
-  public String getFunction()
-  {
-    return function;
-  }
-
-  @Override
-  public void verify(List<String> usedCols)
-  {
-  }
-
-  @Override
-  public Parser<String, Object> makeParser()
-  {
-    // JavaScript configuration should be checked when it's actually used because someone might still want Druid
-    // nodes to be able to deserialize JavaScript-based objects even though JavaScript is disabled.
-    Preconditions.checkState(config.isEnabled(), "JavaScript is disabled");
-    parser = parser == null ? new JavaScriptParser(function) : parser;
-    return parser;
-  }
-
-  @Override
-  public ParseSpec withTimestampSpec(TimestampSpec spec)
-  {
-    return new JavaScriptParseSpec(spec, getDimensionsSpec(), function, config);
-  }
-
-  @Override
-  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
-  {
-    return new JavaScriptParseSpec(getTimestampSpec(), spec, function, config);
-  }
-
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/LongDimensionSchema.java b/api/src/main/java/io/druid/data/input/impl/LongDimensionSchema.java
deleted file mode 100644
index 64af529360b..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/LongDimensionSchema.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-public class LongDimensionSchema extends DimensionSchema
-{
-  @JsonCreator
-  public LongDimensionSchema(
-      @JsonProperty("name") String name
-  )
-  {
-    super(name, null);
-  }
-
-  @Override
-  public String getTypeName()
-  {
-    return DimensionSchema.LONG_TYPE_NAME;
-  }
-
-  @Override
-  @JsonIgnore
-  public ValueType getValueType()
-  {
-    return ValueType.LONG;
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/MapInputRowParser.java b/api/src/main/java/io/druid/data/input/impl/MapInputRowParser.java
deleted file mode 100644
index 3fa2305a700..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/MapInputRowParser.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
-import io.druid.data.input.InputRow;
-import io.druid.data.input.MapBasedInputRow;
-import io.druid.java.util.common.StringUtils;
-import io.druid.java.util.common.parsers.ParseException;
-import org.joda.time.DateTime;
-
-import java.util.List;
-import java.util.Map;
-
-public class MapInputRowParser implements InputRowParser<Map<String, Object>>
-{
-  private final ParseSpec parseSpec;
-
-  @JsonCreator
-  public MapInputRowParser(
-      @JsonProperty("parseSpec") ParseSpec parseSpec
-  )
-  {
-    this.parseSpec = parseSpec;
-  }
-
-  @Override
-  public List<InputRow> parseBatch(Map<String, Object> theMap)
-  {
-    final List<String> dimensions = parseSpec.getDimensionsSpec().hasCustomDimensions()
-                                    ? parseSpec.getDimensionsSpec().getDimensionNames()
-                                    : Lists.newArrayList(
-                                        Sets.difference(
-                                            theMap.keySet(),
-                                            parseSpec.getDimensionsSpec()
-                                                     .getDimensionExclusions()
-                                        )
-                                    );
-
-    final DateTime timestamp;
-    try {
-      timestamp = parseSpec.getTimestampSpec().extractTimestamp(theMap);
-      if (timestamp == null) {
-        final String input = theMap.toString();
-        throw new NullPointerException(
-            StringUtils.format(
-                "Null timestamp in input: %s",
-                input.length() < 100 ? input : input.substring(0, 100) + "..."
-            )
-        );
-      }
-    }
-    catch (Exception e) {
-      throw new ParseException(e, "Unparseable timestamp found!");
-    }
-
-    return ImmutableList.of(new MapBasedInputRow(timestamp.getMillis(), dimensions, theMap));
-  }
-
-  @JsonProperty
-  @Override
-  public ParseSpec getParseSpec()
-  {
-    return parseSpec;
-  }
-
-  @Override
-  public InputRowParser withParseSpec(ParseSpec parseSpec)
-  {
-    return new MapInputRowParser(parseSpec);
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/NewSpatialDimensionSchema.java b/api/src/main/java/io/druid/data/input/impl/NewSpatialDimensionSchema.java
deleted file mode 100644
index c5b823e39a9..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/NewSpatialDimensionSchema.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-import java.util.List;
-
-/**
- * NOTE: 
- * This class should be deprecated after Druid supports configurable index types on dimensions.
- * When that exists, this should be the implementation: https://github.com/druid-io/druid/issues/2622
- * 
- * This is a stop-gap solution to consolidate the dimension specs and remove the separate spatial 
- * section in DimensionsSpec.
- */
-public class NewSpatialDimensionSchema extends DimensionSchema
-{
-  private final List<String> dims;
-
-  @JsonCreator
-  public NewSpatialDimensionSchema(
-      @JsonProperty("name") String name,
-      @JsonProperty("dims") List<String> dims
-  )
-  {
-    super(name, null);
-    this.dims = dims;
-  }
-
-  @JsonProperty
-  public List<String> getDims()
-  {
-    return dims;
-  }
-
-  @Override
-  public String getTypeName()
-  {
-    return DimensionSchema.SPATIAL_TYPE_NAME;
-  }
-
-  @Override
-  @JsonIgnore
-  public ValueType getValueType()
-  {
-    return ValueType.STRING;
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    NewSpatialDimensionSchema that = (NewSpatialDimensionSchema) o;
-
-    return dims != null ? dims.equals(that.dims) : that.dims == null;
-
-  }
-
-  @Override
-  public int hashCode()
-  {
-    return dims != null ? dims.hashCode() : 0;
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/NoopInputRowParser.java b/api/src/main/java/io/druid/data/input/impl/NoopInputRowParser.java
deleted file mode 100644
index 19459dfc4d4..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/NoopInputRowParser.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.collect.ImmutableList;
-import io.druid.data.input.InputRow;
-
-import java.util.List;
-
-/**
- */
-public class NoopInputRowParser implements InputRowParser<InputRow>
-{
-  private final ParseSpec parseSpec;
-
-  @JsonCreator
-  public NoopInputRowParser(
-      @JsonProperty("parseSpec") ParseSpec parseSpec
-  )
-  {
-    this.parseSpec = parseSpec != null ? parseSpec : new TimeAndDimsParseSpec(null, null);
-  }
-
-  @Override
-  public List<InputRow> parseBatch(InputRow input)
-  {
-    return ImmutableList.of(input);
-  }
-
-  @JsonProperty
-  @Override
-  public ParseSpec getParseSpec()
-  {
-    return parseSpec;
-  }
-
-  @Override
-  public InputRowParser withParseSpec(ParseSpec parseSpec)
-  {
-    return new NoopInputRowParser(parseSpec);
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    NoopInputRowParser that = (NoopInputRowParser) o;
-
-    return parseSpec.equals(that.parseSpec);
-
-  }
-
-  @Override
-  public int hashCode()
-  {
-    return parseSpec.hashCode();
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/ParseSpec.java b/api/src/main/java/io/druid/data/input/impl/ParseSpec.java
deleted file mode 100644
index 860bf30971b..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/ParseSpec.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.annotation.JsonSubTypes;
-import com.fasterxml.jackson.annotation.JsonTypeInfo;
-import io.druid.guice.annotations.ExtensionPoint;
-import io.druid.guice.annotations.PublicApi;
-import io.druid.java.util.common.parsers.Parser;
-
-import java.util.List;
-
-@ExtensionPoint
-@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "format", defaultImpl = DelimitedParseSpec.class)
-@JsonSubTypes(value = {
-    @JsonSubTypes.Type(name = "json", value = JSONParseSpec.class),
-    @JsonSubTypes.Type(name = "csv", value = CSVParseSpec.class),
-    @JsonSubTypes.Type(name = "tsv", value = DelimitedParseSpec.class),
-    @JsonSubTypes.Type(name = "jsonLowercase", value = JSONLowercaseParseSpec.class),
-    @JsonSubTypes.Type(name = "timeAndDims", value = TimeAndDimsParseSpec.class),
-    @JsonSubTypes.Type(name = "regex", value = RegexParseSpec.class),
-    @JsonSubTypes.Type(name = "javascript", value = JavaScriptParseSpec.class)
-
-})
-public abstract class ParseSpec
-{
-  private final TimestampSpec timestampSpec;
-  private final DimensionsSpec dimensionsSpec;
-
-  protected ParseSpec(TimestampSpec timestampSpec, DimensionsSpec dimensionsSpec)
-  {
-    this.timestampSpec = timestampSpec;
-    this.dimensionsSpec = dimensionsSpec;
-  }
-
-  @JsonProperty
-  public TimestampSpec getTimestampSpec()
-  {
-    return timestampSpec;
-  }
-
-  @JsonProperty
-  public DimensionsSpec getDimensionsSpec()
-  {
-    return dimensionsSpec;
-  }
-
-  @PublicApi
-  public void verify(List<String> usedCols)
-  {
-    // do nothing
-  }
-
-  public Parser<String, Object> makeParser()
-  {
-    return null;
-  }
-
-  @PublicApi
-  public ParseSpec withTimestampSpec(TimestampSpec spec)
-  {
-    throw new UnsupportedOperationException();
-  }
-
-  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
-  {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    ParseSpec parseSpec = (ParseSpec) o;
-
-    if (timestampSpec != null ? !timestampSpec.equals(parseSpec.timestampSpec) : parseSpec.timestampSpec != null) {
-      return false;
-    }
-    return !(dimensionsSpec != null
-             ? !dimensionsSpec.equals(parseSpec.dimensionsSpec)
-             : parseSpec.dimensionsSpec != null);
-
-  }
-
-  @Override
-  public int hashCode()
-  {
-    int result = timestampSpec != null ? timestampSpec.hashCode() : 0;
-    result = 31 * result + (dimensionsSpec != null ? dimensionsSpec.hashCode() : 0);
-    return result;
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/RegexParseSpec.java b/api/src/main/java/io/druid/data/input/impl/RegexParseSpec.java
deleted file mode 100644
index d23f2878197..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/RegexParseSpec.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Optional;
-import com.google.common.base.Preconditions;
-import io.druid.java.util.common.parsers.Parser;
-import io.druid.java.util.common.parsers.RegexParser;
-
-import java.util.List;
-
-/**
- */
-public class RegexParseSpec extends ParseSpec
-{
-  private final String listDelimiter;
-  private final List<String> columns;
-  private final String pattern;
-
-  @JsonCreator
-  public RegexParseSpec(
-      @JsonProperty("timestampSpec") TimestampSpec timestampSpec,
-      @JsonProperty("dimensionsSpec") DimensionsSpec dimensionsSpec,
-      @JsonProperty("listDelimiter") String listDelimiter,
-      @JsonProperty("columns") List<String> columns,
-      @JsonProperty("pattern") String pattern
-  )
-  {
-    super(timestampSpec, dimensionsSpec);
-
-    this.listDelimiter = listDelimiter;
-    this.columns = columns;
-    this.pattern = pattern;
-
-    verify(dimensionsSpec.getDimensionNames());
-  }
-
-  @JsonProperty
-  public String getListDelimiter()
-  {
-    return listDelimiter;
-  }
-
-  @JsonProperty("pattern")
-  public String getPattern()
-  {
-    return pattern;
-  }
-
-  @JsonProperty
-  public List<String> getColumns()
-  {
-    return columns;
-  }
-
-  @Override
-  public void verify(List<String> usedCols)
-  {
-    if (columns != null) {
-      for (String columnName : usedCols) {
-        Preconditions.checkArgument(columns.contains(columnName), "column[%s] not in columns.", columnName);
-      }
-    }
-  }
-
-  @Override
-  public Parser<String, Object> makeParser()
-  {
-    if (columns == null) {
-      return new RegexParser(pattern, Optional.fromNullable(listDelimiter));
-    }
-    return new RegexParser(pattern, Optional.fromNullable(listDelimiter), columns);
-  }
-
-  @Override
-  public ParseSpec withTimestampSpec(TimestampSpec spec)
-  {
-    return new RegexParseSpec(spec, getDimensionsSpec(), listDelimiter, columns, pattern);
-  }
-
-  @Override
-  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
-  {
-    return new RegexParseSpec(getTimestampSpec(), spec, listDelimiter, columns, pattern);
-  }
-
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/SpatialDimensionSchema.java b/api/src/main/java/io/druid/data/input/impl/SpatialDimensionSchema.java
deleted file mode 100644
index 60a9224707d..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/SpatialDimensionSchema.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-import java.util.List;
-
-/**
- */
-@Deprecated
-public class SpatialDimensionSchema
-{
-  private final String dimName;
-  private final List<String> dims;
-
-  @JsonCreator
-  public SpatialDimensionSchema(
-      @JsonProperty("dimName") String dimName,
-      @JsonProperty("dims") List<String> dims
-  )
-  {
-    this.dimName = dimName;
-    this.dims = dims;
-  }
-
-  @JsonProperty
-  public String getDimName()
-  {
-    return dimName;
-  }
-
-  @JsonProperty
-  public List<String> getDims()
-  {
-    return dims;
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    SpatialDimensionSchema that = (SpatialDimensionSchema) o;
-
-    if (dimName != null ? !dimName.equals(that.dimName) : that.dimName != null) {
-      return false;
-    }
-    return dims != null ? dims.equals(that.dims) : that.dims == null;
-
-  }
-
-  @Override
-  public int hashCode()
-  {
-    int result = dimName != null ? dimName.hashCode() : 0;
-    result = 31 * result + (dims != null ? dims.hashCode() : 0);
-    return result;
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/StringDimensionSchema.java b/api/src/main/java/io/druid/data/input/impl/StringDimensionSchema.java
deleted file mode 100644
index dd6ffd40163..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/StringDimensionSchema.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-public class StringDimensionSchema extends DimensionSchema
-{
-  @JsonCreator
-  public static StringDimensionSchema create(String name)
-  {
-    return new StringDimensionSchema(name);
-  }
-
-  @JsonCreator
-  public StringDimensionSchema(
-      @JsonProperty("name") String name,
-      @JsonProperty("multiValueHandling") MultiValueHandling multiValueHandling
-  )
-  {
-    super(name, multiValueHandling);
-  }
-
-  public StringDimensionSchema(String name)
-  {
-    this(name, null);
-  }
-
-  @Override
-  public String getTypeName()
-  {
-    return DimensionSchema.STRING_TYPE_NAME;
-  }
-
-  @Override
-  @JsonIgnore
-  public ValueType getValueType()
-  {
-    return ValueType.STRING;
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/StringInputRowParser.java b/api/src/main/java/io/druid/data/input/impl/StringInputRowParser.java
deleted file mode 100644
index cefe4706e2f..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/StringInputRowParser.java
+++ /dev/null
@@ -1,167 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Charsets;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Iterators;
-import io.druid.data.input.ByteBufferInputRowParser;
-import io.druid.data.input.InputRow;
-import io.druid.java.util.common.collect.Utils;
-import io.druid.java.util.common.parsers.ParseException;
-import io.druid.java.util.common.parsers.Parser;
-
-import javax.annotation.Nullable;
-import java.nio.ByteBuffer;
-import java.nio.CharBuffer;
-import java.nio.charset.Charset;
-import java.nio.charset.CoderResult;
-import java.nio.charset.CodingErrorAction;
-import java.util.List;
-import java.util.Map;
-
-/**
- */
-public class StringInputRowParser implements ByteBufferInputRowParser
-{
-  private static final Charset DEFAULT_CHARSET = Charsets.UTF_8;
-
-  private final ParseSpec parseSpec;
-  private final MapInputRowParser mapParser;
-  private final Charset charset;
-
-  private Parser<String, Object> parser;
-  private CharBuffer chars;
-
-  @JsonCreator
-  public StringInputRowParser(
-      @JsonProperty("parseSpec") ParseSpec parseSpec,
-      @JsonProperty("encoding") String encoding
-  )
-  {
-    this.parseSpec = Preconditions.checkNotNull(parseSpec, "parseSpec");
-    this.mapParser = new MapInputRowParser(parseSpec);
-
-    if (encoding != null) {
-      this.charset = Charset.forName(encoding);
-    } else {
-      this.charset = DEFAULT_CHARSET;
-    }
-  }
-
-  @Deprecated
-  public StringInputRowParser(ParseSpec parseSpec)
-  {
-    this(parseSpec, null);
-  }
-
-  @Override
-  public List<InputRow> parseBatch(ByteBuffer input)
-  {
-    return Utils.nullableListOf(parseMap(buildStringKeyMap(input)));
-  }
-
-  @JsonProperty
-  @Override
-  public ParseSpec getParseSpec()
-  {
-    return parseSpec;
-  }
-
-  @JsonProperty
-  public String getEncoding()
-  {
-    return charset.name();
-  }
-
-  @Override
-  public StringInputRowParser withParseSpec(ParseSpec parseSpec)
-  {
-    return new StringInputRowParser(parseSpec, getEncoding());
-  }
-
-  private Map<String, Object> buildStringKeyMap(ByteBuffer input)
-  {
-    int payloadSize = input.remaining();
-
-    if (chars == null || chars.remaining() < payloadSize) {
-      chars = CharBuffer.allocate(payloadSize);
-    }
-
-    final CoderResult coderResult = charset.newDecoder()
-                                           .onMalformedInput(CodingErrorAction.REPLACE)
-                                           .onUnmappableCharacter(CodingErrorAction.REPLACE)
-                                           .decode(input, chars, true);
-
-    Map<String, Object> theMap;
-    if (coderResult.isUnderflow()) {
-      chars.flip();
-      try {
-        theMap = parseString(chars.toString());
-      }
-      finally {
-        chars.clear();
-      }
-    } else {
-      throw new ParseException("Failed with CoderResult[%s]", coderResult);
-    }
-    return theMap;
-  }
-
-  public void initializeParser()
-  {
-    if (parser == null) {
-      // parser should be created when it is really used to avoid unnecessary initialization of the underlying
-      // parseSpec.
-      parser = parseSpec.makeParser();
-    }
-  }
-
-  public void startFileFromBeginning()
-  {
-    initializeParser();
-    parser.startFileFromBeginning();
-  }
-
-  @Nullable
-  public InputRow parse(@Nullable String input)
-  {
-    return parseMap(parseString(input));
-  }
-
-  @Nullable
-  private Map<String, Object> parseString(@Nullable String inputString)
-  {
-    initializeParser();
-    return parser.parseToMap(inputString);
-  }
-
-  @Nullable
-  private InputRow parseMap(@Nullable Map<String, Object> theMap)
-  {
-    // If a header is present in the data (and with proper configurations), a null is returned
-    if (theMap == null) {
-      return null;
-    }
-    return Iterators.getOnlyElement(mapParser.parseBatch(theMap).iterator());
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/TimeAndDimsParseSpec.java b/api/src/main/java/io/druid/data/input/impl/TimeAndDimsParseSpec.java
deleted file mode 100644
index 4b962b99227..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/TimeAndDimsParseSpec.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import io.druid.java.util.common.parsers.Parser;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- */
-public class TimeAndDimsParseSpec extends ParseSpec
-{
-  @JsonCreator
-  public TimeAndDimsParseSpec(
-      @JsonProperty("timestampSpec") TimestampSpec timestampSpec,
-      @JsonProperty("dimensionsSpec") DimensionsSpec dimensionsSpec
-  )
-  {
-    super(
-        timestampSpec != null ? timestampSpec : new TimestampSpec(null, null, null),
-        dimensionsSpec != null ? dimensionsSpec : new DimensionsSpec(null, null, null)
-    );
-  }
-
-  @Override
-  public Parser<String, Object> makeParser()
-  {
-    return new Parser<String, Object>()
-    {
-      @Override
-      public Map<String, Object> parseToMap(String input)
-      {
-        throw new UnsupportedOperationException("not supported");
-      }
-
-      @Override
-      public void setFieldNames(Iterable<String> fieldNames)
-      {
-        throw new UnsupportedOperationException("not supported");
-      }
-
-      @Override
-      public List<String> getFieldNames()
-      {
-        throw new UnsupportedOperationException("not supported");
-      }
-    };
-  }
-
-  @Override
-  public ParseSpec withTimestampSpec(TimestampSpec spec)
-  {
-    return new TimeAndDimsParseSpec(spec, getDimensionsSpec());
-  }
-
-  @Override
-  public ParseSpec withDimensionsSpec(DimensionsSpec spec)
-  {
-    return new TimeAndDimsParseSpec(getTimestampSpec(), spec);
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/TimestampSpec.java b/api/src/main/java/io/druid/data/input/impl/TimestampSpec.java
deleted file mode 100644
index 7fc274df21a..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/TimestampSpec.java
+++ /dev/null
@@ -1,177 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Function;
-import io.druid.guice.annotations.PublicApi;
-import io.druid.java.util.common.parsers.TimestampParser;
-import org.joda.time.DateTime;
-
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-
-/**
- */
-@PublicApi
-public class TimestampSpec
-{
-  private static class ParseCtx
-  {
-    Object lastTimeObject = null;
-    DateTime lastDateTime = null;
-  }
-
-  private static final String DEFAULT_COLUMN = "timestamp";
-  private static final String DEFAULT_FORMAT = "auto";
-  private static final DateTime DEFAULT_MISSING_VALUE = null;
-
-  private final String timestampColumn;
-  private final String timestampFormat;
-  // this value should never be set for production data
-  private final DateTime missingValue;
-  /** This field is a derivative of {@link #timestampFormat}; not checked in {@link #equals} and {@link #hashCode} */
-  private final Function<Object, DateTime> timestampConverter;
-
-  // remember last value parsed
-  private static final ThreadLocal<ParseCtx> parseCtx = ThreadLocal.withInitial(ParseCtx::new);
-
-  @JsonCreator
-  public TimestampSpec(
-      @JsonProperty("column") String timestampColumn,
-      @JsonProperty("format") String format,
-      // this value should never be set for production data
-      @JsonProperty("missingValue") DateTime missingValue
-  )
-  {
-    this.timestampColumn = (timestampColumn == null) ? DEFAULT_COLUMN : timestampColumn;
-    this.timestampFormat = format == null ? DEFAULT_FORMAT : format;
-    this.timestampConverter = TimestampParser.createObjectTimestampParser(timestampFormat);
-    this.missingValue = missingValue == null
-                        ? DEFAULT_MISSING_VALUE
-                        : missingValue;
-  }
-
-  @JsonProperty("column")
-  public String getTimestampColumn()
-  {
-    return timestampColumn;
-  }
-
-  @JsonProperty("format")
-  public String getTimestampFormat()
-  {
-    return timestampFormat;
-  }
-
-  @JsonProperty("missingValue")
-  public DateTime getMissingValue()
-  {
-    return missingValue;
-  }
-
-  public DateTime extractTimestamp(Map<String, Object> input)
-  {
-    return parseDateTime(input.get(timestampColumn));
-  }
-
-  public DateTime parseDateTime(Object input)
-  {
-    DateTime extracted = missingValue;
-    if (input != null) {
-      ParseCtx ctx = parseCtx.get();
-      // Check if the input is equal to the last input, so we don't need to parse it again
-      if (input.equals(ctx.lastTimeObject)) {
-        extracted = ctx.lastDateTime;
-      } else {
-        extracted = timestampConverter.apply(input);
-        ParseCtx newCtx = new ParseCtx();
-        newCtx.lastTimeObject = input;
-        newCtx.lastDateTime = extracted;
-        parseCtx.set(newCtx);
-      }
-    }
-    return extracted;
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    TimestampSpec that = (TimestampSpec) o;
-
-    if (!timestampColumn.equals(that.timestampColumn)) {
-      return false;
-    }
-    if (!timestampFormat.equals(that.timestampFormat)) {
-      return false;
-    }
-    return !(missingValue != null ? !missingValue.equals(that.missingValue) : that.missingValue != null);
-
-  }
-
-  @Override
-  public int hashCode()
-  {
-    int result = timestampColumn.hashCode();
-    result = 31 * result + timestampFormat.hashCode();
-    result = 31 * result + (missingValue != null ? missingValue.hashCode() : 0);
-    return result;
-  }
-
-  @Override
-  public String toString()
-  {
-    return "TimestampSpec{" +
-           "timestampColumn='" + timestampColumn + '\'' +
-           ", timestampFormat='" + timestampFormat + '\'' +
-           ", missingValue=" + missingValue +
-           '}';
-  }
-
-  //simple merge strategy on timestampSpec that checks if all are equal or else
-  //returns null. this can be improved in future but is good enough for most use-cases.
-  public static TimestampSpec mergeTimestampSpec(List<TimestampSpec> toMerge)
-  {
-    if (toMerge == null || toMerge.size() == 0) {
-      return null;
-    }
-
-    TimestampSpec result = toMerge.get(0);
-    for (int i = 1; i < toMerge.size(); i++) {
-      if (toMerge.get(i) == null) {
-        continue;
-      }
-      if (!Objects.equals(result, toMerge.get(i))) {
-        return null;
-      }
-    }
-
-    return result;
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/prefetch/CacheManager.java b/api/src/main/java/io/druid/data/input/impl/prefetch/CacheManager.java
deleted file mode 100644
index 96a7a41692c..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/prefetch/CacheManager.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl.prefetch;
-
-import com.google.common.annotations.VisibleForTesting;
-import io.druid.java.util.common.ISE;
-import io.druid.java.util.common.logger.Logger;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * A class managing cached files used by {@link PrefetchableTextFilesFirehoseFactory}.
- */
-class CacheManager<T>
-{
-  private static final Logger LOG = new Logger(CacheManager.class);
-
-  // A roughly max size of total cached objects which means the actual cached size can be bigger. The reason is our
-  // current client implementations for cloud storages like s3 don't support range scan yet, so we must download the
-  // whole file at once. It's still possible for the size of cached data to not exceed these variables by estimating the
-  // after-fetch size, but it makes us to consider the case when any files cannot be fetched due to their large size,
-  // which makes the implementation complicated.
-  private final long maxCacheCapacityBytes;
-
-  private final List<FetchedFile<T>> files = new ArrayList<>();
-
-  private long totalCachedBytes;
-
-  CacheManager(long maxCacheCapacityBytes)
-  {
-    this.maxCacheCapacityBytes = maxCacheCapacityBytes;
-  }
-
-  boolean isEnabled()
-  {
-    return maxCacheCapacityBytes > 0;
-  }
-
-  boolean cacheable()
-  {
-    // maxCacheCapacityBytes is a rough limit, so if totalCachedBytes is larger than it, no more caching is
-    // allowed.
-    return totalCachedBytes < maxCacheCapacityBytes;
-  }
-
-  FetchedFile<T> cache(FetchedFile<T> fetchedFile)
-  {
-    if (!cacheable()) {
-      throw new ISE(
-          "Cache space is full. totalCachedBytes[%d], maxCacheCapacityBytes[%d]",
-          totalCachedBytes,
-          maxCacheCapacityBytes
-      );
-    }
-
-    final FetchedFile<T> cachedFile = fetchedFile.cache();
-    files.add(cachedFile);
-    totalCachedBytes += cachedFile.length();
-
-    LOG.info("Object[%s] is cached. Current cached bytes is [%d]", cachedFile.getObject(), totalCachedBytes);
-    return cachedFile;
-  }
-
-  List<FetchedFile<T>> getFiles()
-  {
-    return files;
-  }
-
-  @VisibleForTesting
-  long getTotalCachedBytes()
-  {
-    return totalCachedBytes;
-  }
-
-  long getMaxCacheCapacityBytes()
-  {
-    return maxCacheCapacityBytes;
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/prefetch/FetchedFile.java b/api/src/main/java/io/druid/data/input/impl/prefetch/FetchedFile.java
deleted file mode 100644
index 8f3111edeb0..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/prefetch/FetchedFile.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl.prefetch;
-
-import java.io.Closeable;
-import java.io.File;
-
-/**
- * A class containing meta information about fetched objects.  This class used by {@link Fetcher}.
- */
-class FetchedFile<T>
-{
-  // Original object
-  private final T object;
-  // Fetched file stored in local disk
-  private final File file;
-  // Closer which is called when the file is not needed anymore. Usually this deletes the file except for cached files.
-  private final Closeable resourceCloser;
-
-  FetchedFile(T object, File file, Closeable resourceCloser)
-  {
-    this.object = object;
-    this.file = file;
-    this.resourceCloser = resourceCloser;
-  }
-
-  long length()
-  {
-    return file.length();
-  }
-
-  T getObject()
-  {
-    return object;
-  }
-
-  File getFile()
-  {
-    return file;
-  }
-
-  Closeable getResourceCloser()
-  {
-    return resourceCloser;
-  }
-
-  FetchedFile<T> cache()
-  {
-    return new FetchedFile<>(object, file, () -> {});
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/prefetch/Fetcher.java b/api/src/main/java/io/druid/data/input/impl/prefetch/Fetcher.java
deleted file mode 100644
index d418bedaa17..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/prefetch/Fetcher.java
+++ /dev/null
@@ -1,325 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl.prefetch;
-
-import com.google.common.base.Throwables;
-import io.druid.java.util.common.ISE;
-import io.druid.java.util.common.logger.Logger;
-import org.apache.commons.io.IOUtils;
-
-import java.io.Closeable;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.Iterator;
-import java.util.List;
-import java.util.NoSuchElementException;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Future;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
-import java.util.concurrent.atomic.AtomicLong;
-
-/**
- * A file fetcher used by {@link PrefetchableTextFilesFirehoseFactory}.
- * See the javadoc of {@link PrefetchableTextFilesFirehoseFactory} for more details.
- */
-public class Fetcher<T> implements Iterator<OpenedObject<T>>
-{
-  private static final Logger LOG = new Logger(Fetcher.class);
-  private static final String FETCH_FILE_PREFIX = "fetch-";
-  private static final int BUFFER_SIZE = 1024 * 4;
-
-  private final CacheManager<T> cacheManager;
-  private final List<T> objects;
-  private final ExecutorService fetchExecutor;
-  private final File temporaryDirectory;
-
-  // A roughly max size of total fetched objects, but the actual fetched size can be bigger. The reason is our current
-  // client implementations for cloud storages like s3 don't support range scan yet, so we must download the whole file
-  // at once. It's still possible for the size of cached/fetched data to not exceed these variables by estimating the
-  // after-fetch size, but it makes us consider the case when any files cannot be fetched due to their large size, which
-  // makes the implementation complicated.
-  private final long maxFetchCapacityBytes;
-  private final boolean prefetchEnabled;
-
-  private final long prefetchTriggerBytes;
-
-  // timeout for fetching an object from the remote site
-  private final long fetchTimeout;
-
-  // maximum retry for fetching an object from the remote site
-  private final int maxFetchRetry;
-
-  private final LinkedBlockingQueue<FetchedFile<T>> fetchedFiles = new LinkedBlockingQueue<>();
-
-  // Number of bytes of current fetched files.
-  // This is updated when a file is successfully fetched, a fetched file is deleted, or a fetched file is
-  // cached.
-  private final AtomicLong fetchedBytes = new AtomicLong(0);
-
-  private final ObjectOpenFunction<T> openObjectFunction;
-  private final byte[] buffer;
-
-  private Future<Void> fetchFuture;
-
-  // nextFetchIndex indicates which object should be downloaded when fetch is triggered.
-  // This variable is always read by the same thread regardless of prefetch is enabled or not.
-  private int nextFetchIndex;
-
-  private int numRemainingObjects;
-
-  Fetcher(
-      CacheManager<T> cacheManager,
-      List<T> objects,
-      ExecutorService fetchExecutor,
-      File temporaryDirectory,
-      long maxFetchCapacityBytes,
-      long prefetchTriggerBytes,
-      long fetchTimeout,
-      int maxFetchRetry,
-      ObjectOpenFunction<T> openObjectFunction
-  )
-  {
-    this.cacheManager = cacheManager;
-    this.objects = objects;
-    this.fetchExecutor = fetchExecutor;
-    this.temporaryDirectory = temporaryDirectory;
-    this.maxFetchCapacityBytes = maxFetchCapacityBytes;
-    this.prefetchTriggerBytes = prefetchTriggerBytes;
-    this.fetchTimeout = fetchTimeout;
-    this.maxFetchRetry = maxFetchRetry;
-    this.openObjectFunction = openObjectFunction;
-    this.buffer = new byte[BUFFER_SIZE];
-
-    this.prefetchEnabled = maxFetchCapacityBytes > 0;
-    this.numRemainingObjects = objects.size();
-
-    // (*) If cache is initialized, put all cached files to the queue.
-    this.fetchedFiles.addAll(cacheManager.getFiles());
-    this.nextFetchIndex = fetchedFiles.size();
-
-    if (prefetchEnabled) {
-      fetchIfNeeded(0L);
-    }
-  }
-
-  /**
-   * Submit a fetch task if remainingBytes is smaller than {@link #prefetchTriggerBytes}.
-   */
-  private void fetchIfNeeded(long remainingBytes)
-  {
-    if ((fetchFuture == null || fetchFuture.isDone())
-        && remainingBytes <= prefetchTriggerBytes) {
-      fetchFuture = fetchExecutor.submit(() -> {
-        fetch();
-        return null;
-      });
-    }
-  }
-
-  /**
-   * Fetch objects to a local disk up to {@link PrefetchableTextFilesFirehoseFactory#maxFetchCapacityBytes}.
-   * This method is not thread safe and must be called by a single thread.  Note that even
-   * {@link PrefetchableTextFilesFirehoseFactory#maxFetchCapacityBytes} is 0, at least 1 file is always fetched.
-   * This is for simplifying design, and should be improved when our client implementations for cloud storages
-   * like S3 support range scan.
-   *
-   * This method is called by {@link #fetchExecutor} if prefetch is enabled.  Otherwise, it is called by the same
-   * thread.
-   */
-  private void fetch() throws Exception
-  {
-    for (; nextFetchIndex < objects.size() && fetchedBytes.get() <= maxFetchCapacityBytes; nextFetchIndex++) {
-      final T object = objects.get(nextFetchIndex);
-      LOG.info("Fetching [%d]th object[%s], fetchedBytes[%d]", nextFetchIndex, object, fetchedBytes.get());
-      final File outFile = File.createTempFile(FETCH_FILE_PREFIX, null, temporaryDirectory);
-      fetchedBytes.addAndGet(download(object, outFile, 0));
-      fetchedFiles.put(new FetchedFile<>(object, outFile, getFileCloser(outFile, fetchedBytes)));
-    }
-  }
-
-  /**
-   * Downloads an object. It retries downloading {@link PrefetchableTextFilesFirehoseFactory#maxFetchRetry}
-   * times and throws an exception.
-   *
-   * @param object   an object to be downloaded
-   * @param outFile  a file which the object data is stored
-   * @param tryCount current retry count
-   *
-   * @return number of downloaded bytes
-   */
-  private long download(T object, File outFile, int tryCount) throws IOException
-  {
-    try (final InputStream is = openObjectFunction.open(object);
-         final OutputStream os = new FileOutputStream(outFile)) {
-      return IOUtils.copyLarge(is, os, buffer);
-    }
-    catch (IOException e) {
-      final int nextTry = tryCount + 1;
-      if (!Thread.currentThread().isInterrupted() && nextTry < maxFetchRetry) {
-        LOG.error(e, "Failed to download object[%s], retrying (%d of %d)", object, nextTry, maxFetchRetry);
-        outFile.delete();
-        return download(object, outFile, nextTry);
-      } else {
-        LOG.error(e, "Failed to download object[%s], retries exhausted, aborting", object);
-        throw e;
-      }
-    }
-  }
-
-  @Override
-  public boolean hasNext()
-  {
-    return numRemainingObjects > 0;
-  }
-
-  @Override
-  public OpenedObject<T> next()
-  {
-    if (!hasNext()) {
-      throw new NoSuchElementException();
-    }
-
-    // If fetch() fails, hasNext() always returns true and next() is always called. The below method checks that
-    // fetch() threw an exception and propagates it if exists.
-    checkFetchException(false);
-
-    try {
-      final OpenedObject<T> openedObject = prefetchEnabled ? openObjectFromLocal() : openObjectFromRemote();
-      numRemainingObjects--;
-      return openedObject;
-    }
-    catch (IOException e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  private void checkFetchException(boolean wait)
-  {
-    try {
-      if (wait) {
-        fetchFuture.get(fetchTimeout, TimeUnit.MILLISECONDS);
-        fetchFuture = null;
-      } else if (fetchFuture != null && fetchFuture.isDone()) {
-        fetchFuture.get();
-        fetchFuture = null;
-      }
-    }
-    catch (InterruptedException | ExecutionException e) {
-      throw new RuntimeException(e);
-    }
-    catch (TimeoutException e) {
-      throw new ISE(e, "Failed to fetch, but cannot check the reason in [%d] ms", fetchTimeout);
-    }
-  }
-
-  private OpenedObject<T> openObjectFromLocal() throws IOException
-  {
-    final FetchedFile<T> fetchedFile;
-
-    if (!fetchedFiles.isEmpty()) {
-      // If there are already fetched files, use them
-      fetchedFile = fetchedFiles.poll();
-    } else {
-      // Otherwise, wait for fetching
-      try {
-        fetchIfNeeded(fetchedBytes.get());
-        fetchedFile = fetchedFiles.poll(fetchTimeout, TimeUnit.MILLISECONDS);
-        if (fetchedFile == null) {
-          // Check the latest fetch is failed
-          checkFetchException(true);
-          // Or throw a timeout exception
-          throw new RuntimeException(new TimeoutException());
-        }
-      }
-      catch (InterruptedException e) {
-        throw Throwables.propagate(e);
-      }
-    }
-    final FetchedFile<T> maybeCached = cacheIfPossible(fetchedFile);
-    // trigger fetch again for subsequent next() calls
-    fetchIfNeeded(fetchedBytes.get());
-    return new OpenedObject<>(maybeCached);
-  }
-
-  private OpenedObject<T> openObjectFromRemote() throws IOException
-  {
-    if (fetchedFiles.size() > 0) {
-      // If fetchedFiles is not empty even though prefetching is disabled, they should be cached files.
-      // We use them first. See (*).
-      return new OpenedObject<>(fetchedFiles.poll());
-    } else if (cacheManager.cacheable()) {
-      // If cache is enabled, first download an object to local storage and cache it.
-      try {
-        // Since maxFetchCapacityBytes is 0, at most one file is fetched.
-        fetch();
-        FetchedFile<T> fetchedFile = fetchedFiles.poll();
-        if (fetchedFile == null) {
-          throw new ISE("Cannot fetch object[%s]", objects.get(nextFetchIndex - 1));
-        }
-        final FetchedFile<T> cached = cacheIfPossible(fetchedFile);
-        return new OpenedObject<>(cached);
-      }
-      catch (Exception e) {
-        throw Throwables.propagate(e);
-      }
-    } else {
-      final T object = objects.get(nextFetchIndex);
-      LOG.info("Reading [%d]th object[%s]", nextFetchIndex, object);
-      nextFetchIndex++;
-      return new OpenedObject<>(object, openObjectFunction.open(object), getNoopCloser());
-    }
-  }
-
-  private FetchedFile<T> cacheIfPossible(FetchedFile<T> fetchedFile)
-  {
-    if (cacheManager.cacheable()) {
-      final FetchedFile<T> cachedFile = cacheManager.cache(fetchedFile);
-      // If the fetchedFile is cached, make a room for fetching more data immediately.
-      // This is because cache space and fetch space are separated.
-      fetchedBytes.addAndGet(-fetchedFile.length());
-      return cachedFile;
-    } else {
-      return fetchedFile;
-    }
-  }
-
-  private static Closeable getNoopCloser()
-  {
-    return () -> {};
-  }
-
-  private static Closeable getFileCloser(
-      final File file,
-      final AtomicLong fetchedBytes
-  )
-  {
-    return () -> {
-      final long fileSize = file.length();
-      file.delete();
-      fetchedBytes.addAndGet(-fileSize);
-    };
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/prefetch/ObjectOpenFunction.java b/api/src/main/java/io/druid/data/input/impl/prefetch/ObjectOpenFunction.java
deleted file mode 100644
index 52cfbce8afb..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/prefetch/ObjectOpenFunction.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl.prefetch;
-
-import java.io.IOException;
-import java.io.InputStream;
-
-interface ObjectOpenFunction<T>
-{
-  InputStream open(T object) throws IOException;
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/prefetch/OpenedObject.java b/api/src/main/java/io/druid/data/input/impl/prefetch/OpenedObject.java
deleted file mode 100644
index 791a8db43a1..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/prefetch/OpenedObject.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl.prefetch;
-
-import org.apache.commons.io.FileUtils;
-
-import java.io.Closeable;
-import java.io.IOException;
-import java.io.InputStream;
-
-/**
- * A class containing meta information about an opened object.  This class is used to put related objects together.  It
- * contains an original object, an objectStream from the object, and a resourceCloser which knows how to release
- * associated resources on closing.
- *
- * {@link PrefetchableTextFilesFirehoseFactory.ResourceCloseableLineIterator} consumes the objectStream and closes
- * it with the resourceCloser.
- */
-class OpenedObject<T>
-{
-  // Original object
-  private final T object;
-  // Input stream from the object
-  private final InputStream objectStream;
-  // Closer which is called when the file is not needed anymore. Usually this deletes the file except for cached files.
-  private final Closeable resourceCloser;
-
-  OpenedObject(FetchedFile<T> fetchedFile) throws IOException
-  {
-    this(fetchedFile.getObject(), FileUtils.openInputStream(fetchedFile.getFile()), fetchedFile.getResourceCloser());
-  }
-
-  OpenedObject(T object, InputStream objectStream, Closeable resourceCloser)
-  {
-    this.object = object;
-    this.objectStream = objectStream;
-    this.resourceCloser = resourceCloser;
-  }
-
-  T getObject()
-  {
-    return object;
-  }
-
-  InputStream getObjectStream()
-  {
-    return objectStream;
-  }
-
-  Closeable getResourceCloser()
-  {
-    return resourceCloser;
-  }
-}
diff --git a/api/src/main/java/io/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactory.java b/api/src/main/java/io/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactory.java
deleted file mode 100644
index eb694cf85b7..00000000000
--- a/api/src/main/java/io/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactory.java
+++ /dev/null
@@ -1,268 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl.prefetch;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.ImmutableList;
-import io.druid.data.input.Firehose;
-import io.druid.data.input.impl.AbstractTextFilesFirehoseFactory;
-import io.druid.data.input.impl.FileIteratingFirehose;
-import io.druid.data.input.impl.StringInputRowParser;
-import io.druid.java.util.common.ISE;
-import io.druid.java.util.common.concurrent.Execs;
-import io.druid.java.util.common.logger.Logger;
-import org.apache.commons.io.LineIterator;
-
-import java.io.Closeable;
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.nio.charset.StandardCharsets;
-import java.util.Iterator;
-import java.util.List;
-import java.util.NoSuchElementException;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.TimeUnit;
-
-/**
- * PrefetchableTextFilesFirehoseFactory is an abstract firehose factory for reading text files.  The firehose returned
- * by this class provides three key functionalities.
- * <p/>
- *
- * - Caching: for the first call of {@link #connect(StringInputRowParser, File)}, it caches objects in a local disk
- * up to maxCacheCapacityBytes.  These caches are NOT deleted until the process terminates, and thus can be used for
- * future reads.
- * <br/>
- * - Fetching: when it reads all cached data, it fetches remaining objects into a local disk and reads data from
- * them.  For the performance reason, prefetch technique is used, that is, when the size of remaining fetched data is
- * smaller than {@link #prefetchTriggerBytes}, a background prefetch thread automatically starts to fetch remaining
- * objects.
- * <br/>
- * - Retry: if an exception occurs while downloading an object, it retries again up to {@link #maxFetchRetry}.
- * <p/>
- *
- * This implementation can be useful when the cost for reading input objects is large as reading from AWS S3 because
- * batch tasks like IndexTask or HadoopIndexTask can read the whole data twice for determining partition specs and
- * generating segments if the intervals of GranularitySpec is not specified.
- * <br/>
- * Prefetching can be turned on/off by setting maxFetchCapacityBytes.  Depending on prefetching is enabled or
- * disabled, the behavior of the firehose is different like below.
- * <p/>
- *
- * 1. If prefetch is enabled, this firehose can fetch input objects in background.
- * <br/>
- * 2. When next() is called, it first checks that there are already fetched files in local storage.
- * <br/>
- *   2.1 If exists, it simply chooses a fetched file and returns a {@link LineIterator} reading that file.
- *   <br/>
- *   2.2 If there is no fetched files in local storage but some objects are still remained to be read, the firehose
- *   fetches one of input objects in background immediately. If an IOException occurs while downloading the object,
- *   it retries up to the maximum retry count. Finally, the firehose returns a {@link LineIterator} only when the
- *   download operation is successfully finished.
- *   <br/>
- * 3. If prefetch is disabled, the firehose returns a {@link LineIterator} which directly reads the stream opened by
- * {@link #openObjectStream}. If there is an IOException, it will throw it and the read will fail.
- */
-public abstract class PrefetchableTextFilesFirehoseFactory<T>
-    extends AbstractTextFilesFirehoseFactory<T>
-{
-  private static final Logger LOG = new Logger(PrefetchableTextFilesFirehoseFactory.class);
-  private static final long DEFAULT_MAX_CACHE_CAPACITY_BYTES = 1024 * 1024 * 1024; // 1GB
-  private static final long DEFAULT_MAX_FETCH_CAPACITY_BYTES = 1024 * 1024 * 1024; // 1GB
-  private static final long DEFAULT_FETCH_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(60);
-  private static final int DEFAULT_MAX_FETCH_RETRY = 3;
-
-  private final CacheManager<T> cacheManager;
-  private final long maxFetchCapacityBytes;
-  private final long prefetchTriggerBytes;
-  private final long fetchTimeout;
-  private final int maxFetchRetry;
-
-  private List<T> objects;
-
-  public PrefetchableTextFilesFirehoseFactory(
-      Long maxCacheCapacityBytes,
-      Long maxFetchCapacityBytes,
-      Long prefetchTriggerBytes,
-      Long fetchTimeout,
-      Integer maxFetchRetry
-  )
-  {
-    this.cacheManager = new CacheManager<>(
-        maxCacheCapacityBytes == null ? DEFAULT_MAX_CACHE_CAPACITY_BYTES : maxCacheCapacityBytes
-    );
-    this.maxFetchCapacityBytes = maxFetchCapacityBytes == null
-                                 ? DEFAULT_MAX_FETCH_CAPACITY_BYTES
-                                 : maxFetchCapacityBytes;
-    this.prefetchTriggerBytes = prefetchTriggerBytes == null
-                                ? this.maxFetchCapacityBytes / 2
-                                : prefetchTriggerBytes;
-    this.fetchTimeout = fetchTimeout == null ? DEFAULT_FETCH_TIMEOUT_MS : fetchTimeout;
-    this.maxFetchRetry = maxFetchRetry == null ? DEFAULT_MAX_FETCH_RETRY : maxFetchRetry;
-  }
-
-  @JsonProperty
-  public long getMaxCacheCapacityBytes()
-  {
-    return cacheManager.getMaxCacheCapacityBytes();
-  }
-
-  @JsonProperty
-  public long getMaxFetchCapacityBytes()
-  {
-    return maxFetchCapacityBytes;
-  }
-
-  @JsonProperty
-  public long getPrefetchTriggerBytes()
-  {
-    return prefetchTriggerBytes;
-  }
-
-  @JsonProperty
-  public long getFetchTimeout()
-  {
-    return fetchTimeout;
-  }
-
-  @JsonProperty
-  public int getMaxFetchRetry()
-  {
-    return maxFetchRetry;
-  }
-
-  @VisibleForTesting
-  CacheManager<T> getCacheManager()
-  {
-    return cacheManager;
-  }
-
-  @Override
-  public Firehose connect(StringInputRowParser firehoseParser, File temporaryDirectory) throws IOException
-  {
-    if (!cacheManager.isEnabled() && maxFetchCapacityBytes == 0) {
-      return super.connect(firehoseParser, temporaryDirectory);
-    }
-
-    if (objects == null) {
-      objects = ImmutableList.copyOf(Preconditions.checkNotNull(initObjects(), "objects"));
-    }
-
-    Preconditions.checkState(temporaryDirectory.exists(), "temporaryDirectory[%s] does not exist", temporaryDirectory);
-    Preconditions.checkState(
-        temporaryDirectory.isDirectory(),
-        "temporaryDirectory[%s] is not a directory",
-        temporaryDirectory
-    );
-
-    LOG.info("Create a new firehose for [%d] objects", objects.size());
-
-    // fetchExecutor is responsible for background data fetching
-    final ExecutorService fetchExecutor = Execs.singleThreaded("firehose_fetch_%d");
-    final Fetcher<T> fetcher = new Fetcher<>(
-        cacheManager,
-        objects,
-        fetchExecutor,
-        temporaryDirectory,
-        maxFetchCapacityBytes,
-        prefetchTriggerBytes,
-        fetchTimeout,
-        maxFetchRetry,
-        this::openObjectStream
-    );
-
-    return new FileIteratingFirehose(
-        new Iterator<LineIterator>()
-        {
-          @Override
-          public boolean hasNext()
-          {
-            return fetcher.hasNext();
-          }
-
-          @Override
-          public LineIterator next()
-          {
-            if (!hasNext()) {
-              throw new NoSuchElementException();
-            }
-
-            final OpenedObject<T> openedObject = fetcher.next();
-            final InputStream stream;
-            try {
-              stream = wrapObjectStream(
-                  openedObject.getObject(),
-                  openedObject.getObjectStream()
-              );
-            }
-            catch (IOException e) {
-              throw new RuntimeException(e);
-            }
-
-            return new ResourceCloseableLineIterator(
-                new InputStreamReader(stream, StandardCharsets.UTF_8),
-                openedObject.getResourceCloser()
-            );
-          }
-        },
-        firehoseParser,
-        () -> {
-          fetchExecutor.shutdownNow();
-          try {
-            Preconditions.checkState(fetchExecutor.awaitTermination(fetchTimeout, TimeUnit.MILLISECONDS));
-          }
-          catch (InterruptedException e) {
-            Thread.currentThread().interrupt();
-            throw new ISE("Failed to shutdown fetch executor during close");
-          }
-        }
-    );
-  }
-
-  /**
-   * This class calls the {@link Closeable#close()} method of the resourceCloser when it is closed.
-   */
-  static class ResourceCloseableLineIterator extends LineIterator
-  {
-    private final Closeable resourceCloser;
-
-    ResourceCloseableLineIterator(Reader reader, Closeable resourceCloser) throws IllegalArgumentException
-    {
-      super(reader);
-      this.resourceCloser = resourceCloser;
-    }
-
-    @Override
-    public void close()
-    {
-      super.close();
-      try {
-        resourceCloser.close();
-      }
-      catch (IOException e) {
-        throw new RuntimeException(e);
-      }
-    }
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/Binders.java b/api/src/main/java/io/druid/guice/Binders.java
deleted file mode 100644
index 30315d6fdeb..00000000000
--- a/api/src/main/java/io/druid/guice/Binders.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.inject.Binder;
-import com.google.inject.Key;
-import com.google.inject.multibindings.MapBinder;
-import io.druid.guice.annotations.PublicApi;
-import io.druid.segment.loading.DataSegmentArchiver;
-import io.druid.segment.loading.DataSegmentFinder;
-import io.druid.segment.loading.DataSegmentKiller;
-import io.druid.segment.loading.DataSegmentMover;
-import io.druid.segment.loading.DataSegmentPuller;
-import io.druid.segment.loading.DataSegmentPusher;
-import io.druid.tasklogs.TaskLogs;
-
-/**
- */
-@PublicApi
-public class Binders
-{
-  public static MapBinder<String, DataSegmentPuller> dataSegmentPullerBinder(Binder binder)
-  {
-    return MapBinder.newMapBinder(binder, String.class, DataSegmentPuller.class);
-  }
-
-  public static MapBinder<String, DataSegmentKiller> dataSegmentKillerBinder(Binder binder)
-  {
-    return MapBinder.newMapBinder(binder, String.class, DataSegmentKiller.class);
-  }
-
-  public static MapBinder<String, DataSegmentMover> dataSegmentMoverBinder(Binder binder)
-  {
-    return MapBinder.newMapBinder(binder, String.class, DataSegmentMover.class);
-  }
-
-  public static MapBinder<String, DataSegmentArchiver> dataSegmentArchiverBinder(Binder binder)
-  {
-    return MapBinder.newMapBinder(binder, String.class, DataSegmentArchiver.class);
-  }
-
-  public static MapBinder<String, DataSegmentPusher> dataSegmentPusherBinder(Binder binder)
-  {
-    return PolyBind.optionBinder(binder, Key.get(DataSegmentPusher.class));
-  }
-
-  public static MapBinder<String, DataSegmentFinder> dataSegmentFinderBinder(Binder binder)
-  {
-    return PolyBind.optionBinder(binder, Key.get(DataSegmentFinder.class));
-  }
-
-  public static MapBinder<String, TaskLogs> taskLogsBinder(Binder binder)
-  {
-    return PolyBind.optionBinder(binder, Key.get(TaskLogs.class));
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/ConditionalMultibind.java b/api/src/main/java/io/druid/guice/ConditionalMultibind.java
deleted file mode 100644
index bd01f97086c..00000000000
--- a/api/src/main/java/io/druid/guice/ConditionalMultibind.java
+++ /dev/null
@@ -1,247 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.common.base.Predicate;
-import com.google.inject.Binder;
-import com.google.inject.TypeLiteral;
-import com.google.inject.multibindings.Multibinder;
-import io.druid.guice.annotations.PublicApi;
-
-import java.lang.annotation.Annotation;
-import java.util.Properties;
-
-/**
- * Provides the ability to conditionally bind an item to a set. The condition is based on the value set in the
- * runtime.properties.
- *
- * Usage example:
- *
- * ConditionalMultibind.create(props, binder, Animal.class)
- *                     .addConditionBinding("animal.type", Predicates.equalTo("cat"), Cat.class)
- *                     .addConditionBinding("animal.type", Predicates.equalTo("dog"), Dog.class);
- *
- * At binding time, this will check the value set for property "animal.type" in props. If the value is "cat", it will
- * add a binding to Cat.class. If the value is "dog", it will add a binding to Dog.class.
- *
- * At injection time, you will get the items that satisfy their corresponding predicates by calling
- * injector.getInstance(Key.get(new TypeLiteral<Set<Animal>>(){}))
- */
-@PublicApi
-public class ConditionalMultibind<T>
-{
-
-  /**
-   * Create a ConditionalMultibind that resolves items to be added to the set at "binding" time.
-   *
-   * @param properties the runtime properties.
-   * @param binder     the binder for the injector that is being configured.
-   * @param type       the type that will be injected.
-   * @param <T>        interface type.
-   *
-   * @return An instance of ConditionalMultibind that can be used to add conditional bindings.
-   */
-  public static <T> ConditionalMultibind<T> create(Properties properties, Binder binder, Class<T> type)
-  {
-    return new ConditionalMultibind<T>(properties, Multibinder.<T>newSetBinder(binder, type));
-  }
-
-  /**
-   * Create a ConditionalMultibind that resolves items to be added to the set at "binding" time.
-   *
-   * @param properties     the runtime properties.
-   * @param binder         the binder for the injector that is being configured.
-   * @param type           the type that will be injected.
-   * @param <T>            interface type.
-   * @param annotationType the binding annotation.
-   *
-   * @return An instance of ConditionalMultibind that can be used to add conditional bindings.
-   */
-  public static <T> ConditionalMultibind<T> create(
-      Properties properties,
-      Binder binder,
-      Class<T> type,
-      Class<? extends Annotation> annotationType
-  )
-  {
-    return new ConditionalMultibind<T>(properties, Multibinder.<T>newSetBinder(binder, type, annotationType));
-  }
-
-  /**
-   * Create a ConditionalMultibind that resolves items to be added to the set at "binding" time.
-   *
-   * @param properties the runtime properties.
-   * @param binder     the binder for the injector that is being configured.
-   * @param type       the type that will be injected.
-   * @param <T>        interface type.
-   *
-   * @return An instance of ConditionalMultibind that can be used to add conditional bindings.
-   */
-  public static <T> ConditionalMultibind<T> create(Properties properties, Binder binder, TypeLiteral<T> type)
-  {
-    return new ConditionalMultibind<T>(properties, Multibinder.<T>newSetBinder(binder, type));
-  }
-
-  /**
-   * Create a ConditionalMultibind that resolves items to be added to the set at "binding" time.
-   *
-   * @param properties     the runtime properties.
-   * @param binder         the binder for the injector that is being configured.
-   * @param type           the type that will be injected.
-   * @param <T>            interface type.
-   * @param annotationType the binding annotation.
-   *
-   * @return An instance of ConditionalMultibind that can be used to add conditional bindings.
-   */
-  public static <T> ConditionalMultibind<T> create(
-      Properties properties,
-      Binder binder,
-      TypeLiteral<T> type,
-      Class<? extends Annotation> annotationType
-  )
-  {
-    return new ConditionalMultibind<T>(properties, Multibinder.<T>newSetBinder(binder, type, annotationType));
-  }
-
-
-  private final Properties properties;
-  private final Multibinder<T> multibinder;
-
-  public ConditionalMultibind(Properties properties, Multibinder<T> multibinder)
-  {
-    this.properties = properties;
-    this.multibinder = multibinder;
-  }
-
-  /**
-   * Unconditionally bind target to the set.
-   *
-   * @param target the target class to which it adds a binding.
-   *
-   * @return self to support a continuous syntax for adding more conditional bindings.
-   */
-  public ConditionalMultibind<T> addBinding(Class<? extends T> target)
-  {
-    multibinder.addBinding().to(target);
-    return this;
-  }
-
-  /**
-   * Unconditionally bind target to the set.
-   *
-   * @param target the target instance to which it adds a binding.
-   *
-   * @return self to support a continuous syntax for adding more conditional bindings.
-   */
-  public ConditionalMultibind<T> addBinding(T target)
-  {
-    multibinder.addBinding().toInstance(target);
-    return this;
-  }
-
-  /**
-   * Unconditionally bind target to the set.
-   *
-   * @param target the target type to which it adds a binding.
-   *
-   * @return self to support a continuous syntax for adding more conditional bindings.
-   */
-  public ConditionalMultibind<T> addBinding(TypeLiteral<T> target)
-  {
-    multibinder.addBinding().to(target);
-    return this;
-  }
-
-  /**
-   * Conditionally bind target to the set. If "condition" returns true, add a binding to "target".
-   *
-   * @param property  the property to inspect on
-   * @param condition the predicate used to verify whether to add a binding to "target"
-   * @param target    the target class to which it adds a binding.
-   *
-   * @return self to support a continuous syntax for adding more conditional bindings.
-   */
-  public ConditionalMultibind<T> addConditionBinding(
-      String property,
-      Predicate<String> condition,
-      Class<? extends T> target
-  )
-  {
-    final String value = properties.getProperty(property);
-    if (value == null) {
-      return this;
-    }
-    if (condition.apply(value)) {
-      multibinder.addBinding().to(target);
-    }
-    return this;
-  }
-
-  /**
-   * Conditionally bind target to the set. If "condition" returns true, add a binding to "target".
-   *
-   * @param property  the property to inspect on
-   * @param condition the predicate used to verify whether to add a binding to "target"
-   * @param target    the target instance to which it adds a binding.
-   *
-   * @return self to support a continuous syntax for adding more conditional bindings.
-   */
-  public ConditionalMultibind<T> addConditionBinding(
-      String property,
-      Predicate<String> condition,
-      T target
-  )
-  {
-    final String value = properties.getProperty(property);
-    if (value == null) {
-      return this;
-    }
-    if (condition.apply(value)) {
-      multibinder.addBinding().toInstance(target);
-    }
-    return this;
-  }
-
-  /**
-   * Conditionally bind target to the set. If "condition" returns true, add a binding to "target".
-   *
-   * @param property  the property to inspect on
-   * @param condition the predicate used to verify whether to add a binding to "target"
-   * @param target    the target type to which it adds a binding.
-   *
-   * @return self to support a continuous syntax for adding more conditional bindings.
-   */
-  @PublicApi
-  public ConditionalMultibind<T> addConditionBinding(
-      String property,
-      Predicate<String> condition,
-      TypeLiteral<T> target
-  )
-  {
-    final String value = properties.getProperty(property);
-    if (value == null) {
-      return this;
-    }
-    if (condition.apply(value)) {
-      multibinder.addBinding().to(target);
-    }
-    return this;
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/DruidGuiceExtensions.java b/api/src/main/java/io/druid/guice/DruidGuiceExtensions.java
deleted file mode 100644
index 956abc7abcd..00000000000
--- a/api/src/main/java/io/druid/guice/DruidGuiceExtensions.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.inject.Binder;
-import com.google.inject.Module;
-import io.druid.guice.annotations.PublicApi;
-
-/**
- */
-@PublicApi
-public class DruidGuiceExtensions implements Module
-{
-  @Override
-  public void configure(Binder binder)
-  {
-    binder.bindScope(LazySingleton.class, DruidScopes.SINGLETON);
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/DruidScopes.java b/api/src/main/java/io/druid/guice/DruidScopes.java
deleted file mode 100644
index d7aeab313c2..00000000000
--- a/api/src/main/java/io/druid/guice/DruidScopes.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.inject.Key;
-import com.google.inject.Provider;
-import com.google.inject.Scope;
-import com.google.inject.Scopes;
-import io.druid.guice.annotations.PublicApi;
-
-/**
- */
-@PublicApi
-public class DruidScopes
-{
-  public static final Scope SINGLETON = new Scope()
-  {
-    @Override
-    public <T> Provider<T> scope(Key<T> key, Provider<T> unscoped)
-    {
-      return Scopes.SINGLETON.scope(key, unscoped);
-    }
-
-    @Override
-    public String toString()
-    {
-      return "DruidScopes.SINGLETON";
-    }
-  };
-}
diff --git a/api/src/main/java/io/druid/guice/Jerseys.java b/api/src/main/java/io/druid/guice/Jerseys.java
deleted file mode 100644
index 51520eae749..00000000000
--- a/api/src/main/java/io/druid/guice/Jerseys.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.inject.Binder;
-import com.google.inject.TypeLiteral;
-import com.google.inject.multibindings.Multibinder;
-import io.druid.guice.annotations.JSR311Resource;
-import io.druid.guice.annotations.PublicApi;
-
-/**
- */
-@PublicApi
-public class Jerseys
-{
-  public static void addResource(Binder binder, Class<?> resourceClazz)
-  {
-    Multibinder.newSetBinder(binder, new TypeLiteral<Class<?>>(){}, JSR311Resource.class)
-               .addBinding()
-               .toInstance(resourceClazz);
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/JsonConfigProvider.java b/api/src/main/java/io/druid/guice/JsonConfigProvider.java
deleted file mode 100644
index c3a9cfd64d8..00000000000
--- a/api/src/main/java/io/druid/guice/JsonConfigProvider.java
+++ /dev/null
@@ -1,213 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.common.base.Supplier;
-import com.google.common.base.Suppliers;
-import com.google.inject.Binder;
-import com.google.inject.Inject;
-import com.google.inject.Key;
-import com.google.inject.Provider;
-import com.google.inject.util.Types;
-import io.druid.guice.annotations.PublicApi;
-
-import java.lang.annotation.Annotation;
-import java.lang.reflect.ParameterizedType;
-import java.util.Properties;
-
-
-/**
- * Provides a singleton value of type {@code <T>} from {@code Properties} bound in guice.
- * <br/>
- * <h3>Usage</h3>
- * To install this provider, bind it in your guice module, like below.
- *
- * <pre>
- * JsonConfigProvider.bind(binder, "druid.server", DruidServerConfig.class);
- * </pre>
- * <br/>
- * In the above case, {@code druid.server} should be a key found in the {@code Properties} bound elsewhere.
- * The value of that key should directly relate to the fields in {@code DruidServerConfig.class}.
- *
- * <h3>Implementation</h3>
- * <br/>
- * The state of {@code <T>} is defined by the value of the property {@code propertyBase}.
- * This value is a json structure, decoded via {@link JsonConfigurator#configurate(Properties, String, Class)}.
- * <br/>
- *
- * An example might be if DruidServerConfig.class were
- *
- * <pre>
- *   public class DruidServerConfig
- *   {
- *     @JsonProperty @NotNull public String hostname = null;
- *     @JsonProperty @Min(1025) public int port = 8080;
- *   }
- * </pre>
- *
- * And your Properties object had in it
- *
- * <pre>
- *   druid.server.hostname=0.0.0.0
- *   druid.server.port=3333
- * </pre>
- *
- * Then this would bind a singleton instance of a DruidServerConfig object with hostname = "0.0.0.0" and port = 3333.
- *
- * If the port weren't set in the properties, then the default of 8080 would be taken.  Essentially, it is the same as
- * subtracting the "druid.server" prefix from the properties and building a Map which is then passed into
- * ObjectMapper.convertValue()
- *
- * @param <T> type of config object to provide.
- */
-@PublicApi
-public class JsonConfigProvider<T> implements Provider<Supplier<T>>
-{
-  @SuppressWarnings("unchecked")
-  public static <T> void bind(Binder binder, String propertyBase, Class<T> classToProvide)
-  {
-    bind(
-        binder,
-        propertyBase,
-        classToProvide,
-        Key.get(classToProvide),
-        (Key) Key.get(Types.newParameterizedType(Supplier.class, classToProvide))
-    );
-  }
-
-  @SuppressWarnings("unchecked")
-  public static <T> void bind(Binder binder, String propertyBase, Class<T> classToProvide, Annotation annotation)
-  {
-    bind(
-        binder,
-        propertyBase,
-        classToProvide,
-        Key.get(classToProvide, annotation),
-        (Key) Key.get(Types.newParameterizedType(Supplier.class, classToProvide), annotation)
-    );
-  }
-
-  @SuppressWarnings("unchecked")
-  public static <T> void bind(
-      Binder binder,
-      String propertyBase,
-      Class<T> classToProvide,
-      Class<? extends Annotation> annotation
-  )
-  {
-    bind(
-        binder,
-        propertyBase,
-        classToProvide,
-        Key.get(classToProvide, annotation),
-        (Key) Key.get(Types.newParameterizedType(Supplier.class, classToProvide), annotation)
-    );
-  }
-
-  @SuppressWarnings("unchecked")
-  public static <T> void bind(
-      Binder binder,
-      String propertyBase,
-      Class<T> clazz,
-      Key<T> instanceKey,
-      Key<Supplier<T>> supplierKey
-  )
-  {
-    binder.bind(supplierKey).toProvider((Provider) of(propertyBase, clazz)).in(LazySingleton.class);
-    binder.bind(instanceKey).toProvider(new SupplierProvider<T>(supplierKey));
-  }
-
-  @SuppressWarnings("unchecked")
-  public static <T> void bindInstance(
-      Binder binder,
-      Key<T> bindKey,
-      T instance
-  )
-  {
-    binder.bind(bindKey).toInstance(instance);
-
-    final ParameterizedType supType = Types.newParameterizedType(Supplier.class, bindKey.getTypeLiteral().getType());
-    final Key supplierKey;
-
-    if (bindKey.getAnnotationType() != null) {
-      supplierKey = Key.get(supType, bindKey.getAnnotationType());
-    } else if (bindKey.getAnnotation() != null) {
-      supplierKey = Key.get(supType, bindKey.getAnnotation());
-    } else {
-      supplierKey = Key.get(supType);
-    }
-
-    binder.bind(supplierKey).toInstance(Suppliers.<T>ofInstance(instance));
-  }
-
-  public static <T> JsonConfigProvider<T> of(String propertyBase, Class<T> classToProvide)
-  {
-    return new JsonConfigProvider<T>(propertyBase, classToProvide);
-  }
-
-  private final String propertyBase;
-  private final Class<T> classToProvide;
-
-  private Properties props;
-  private JsonConfigurator configurator;
-
-  private Supplier<T> retVal = null;
-
-  public JsonConfigProvider(
-      String propertyBase,
-      Class<T> classToProvide
-  )
-  {
-    this.propertyBase = propertyBase;
-    this.classToProvide = classToProvide;
-  }
-
-  @Inject
-  public void inject(
-      Properties props,
-      JsonConfigurator configurator
-  )
-  {
-    this.props = props;
-    this.configurator = configurator;
-  }
-
-  @Override
-  public Supplier<T> get()
-  {
-    if (retVal != null) {
-      return retVal;
-    }
-
-    try {
-      final T config = configurator.configurate(props, propertyBase, classToProvide);
-      retVal = Suppliers.ofInstance(config);
-    }
-    catch (RuntimeException e) {
-      // When a runtime exception gets thrown out, this provider will get called again if the object is asked for again.
-      // This will have the same failed result, 'cause when it's called no parameters will have actually changed.
-      // Guice will then report the same error multiple times, which is pretty annoying. Cache a null supplier and
-      // return that instead.  This is technically enforcing a singleton, but such is life.
-      retVal = Suppliers.ofInstance(null);
-      throw e;
-    }
-    return retVal;
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/JsonConfigurator.java b/api/src/main/java/io/druid/guice/JsonConfigurator.java
deleted file mode 100644
index f6d766dd955..00000000000
--- a/api/src/main/java/io/druid/guice/JsonConfigurator.java
+++ /dev/null
@@ -1,224 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.fasterxml.jackson.annotation.JacksonInject;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.introspect.AnnotatedField;
-import com.fasterxml.jackson.databind.introspect.BeanPropertyDefinition;
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Function;
-import com.google.common.base.Strings;
-import com.google.common.base.Throwables;
-import com.google.common.collect.Iterables;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.inject.Inject;
-import com.google.inject.ProvisionException;
-import com.google.inject.spi.Message;
-import io.druid.java.util.common.StringUtils;
-import io.druid.java.util.common.logger.Logger;
-
-import javax.validation.ConstraintViolation;
-import javax.validation.ElementKind;
-import javax.validation.Path;
-import javax.validation.Validator;
-import java.io.IOException;
-import java.lang.reflect.Field;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-
-/**
- */
-public class JsonConfigurator
-{
-  private static final Logger log = new Logger(JsonConfigurator.class);
-
-  private final ObjectMapper jsonMapper;
-  private final Validator validator;
-
-  @Inject
-  public JsonConfigurator(
-      ObjectMapper jsonMapper,
-      Validator validator
-  )
-  {
-    this.jsonMapper = jsonMapper;
-    this.validator = validator;
-  }
-
-  public <T> T configurate(Properties props, String propertyPrefix, Class<T> clazz) throws ProvisionException
-  {
-    verifyClazzIsConfigurable(jsonMapper, clazz);
-
-    // Make it end with a period so we only include properties with sub-object thingies.
-    final String propertyBase = propertyPrefix.endsWith(".") ? propertyPrefix : propertyPrefix + ".";
-
-    Map<String, Object> jsonMap = Maps.newHashMap();
-    for (String prop : props.stringPropertyNames()) {
-      if (prop.startsWith(propertyBase)) {
-        final String propValue = props.getProperty(prop);
-        Object value;
-        try {
-          // If it's a String Jackson wants it to be quoted, so check if it's not an object or array and quote.
-          String modifiedPropValue = propValue;
-          if (!(modifiedPropValue.startsWith("[") || modifiedPropValue.startsWith("{"))) {
-            modifiedPropValue = jsonMapper.writeValueAsString(propValue);
-          }
-          value = jsonMapper.readValue(modifiedPropValue, Object.class);
-        }
-        catch (IOException e) {
-          log.info(e, "Unable to parse [%s]=[%s] as a json object, using as is.", prop, propValue);
-          value = propValue;
-        }
-
-        hieraricalPutValue(propertyPrefix, prop, prop.substring(propertyBase.length()), value, jsonMap);
-      }
-    }
-
-    final T config;
-    try {
-      config = jsonMapper.convertValue(jsonMap, clazz);
-    }
-    catch (IllegalArgumentException e) {
-      throw new ProvisionException(
-          StringUtils.format("Problem parsing object at prefix[%s]: %s.", propertyPrefix, e.getMessage()), e
-      );
-    }
-
-    final Set<ConstraintViolation<T>> violations = validator.validate(config);
-    if (!violations.isEmpty()) {
-      List<String> messages = Lists.newArrayList();
-
-      for (ConstraintViolation<T> violation : violations) {
-        StringBuilder path = new StringBuilder();
-        try {
-          Class<?> beanClazz = violation.getRootBeanClass();
-          final Iterator<Path.Node> iter = violation.getPropertyPath().iterator();
-          while (iter.hasNext()) {
-            Path.Node next = iter.next();
-            if (next.getKind() == ElementKind.PROPERTY) {
-              final String fieldName = next.getName();
-              final Field theField = beanClazz.getDeclaredField(fieldName);
-
-              if (theField.getAnnotation(JacksonInject.class) != null) {
-                path = new StringBuilder(StringUtils.format(" -- Injected field[%s] not bound!?", fieldName));
-                break;
-              }
-
-              JsonProperty annotation = theField.getAnnotation(JsonProperty.class);
-              final boolean noAnnotationValue = annotation == null || Strings.isNullOrEmpty(annotation.value());
-              final String pathPart = noAnnotationValue ? fieldName : annotation.value();
-              if (path.length() == 0) {
-                path.append(pathPart);
-              } else {
-                path.append(".").append(pathPart);
-              }
-            }
-          }
-        }
-        catch (NoSuchFieldException e) {
-          throw Throwables.propagate(e);
-        }
-
-        messages.add(StringUtils.format("%s - %s", path.toString(), violation.getMessage()));
-      }
-
-      throw new ProvisionException(
-          Iterables.transform(
-              messages,
-              new Function<String, Message>()
-              {
-                @Override
-                public Message apply(String input)
-                {
-                  return new Message(StringUtils.format("%s%s", propertyBase, input));
-                }
-              }
-          )
-      );
-    }
-
-    log.info("Loaded class[%s] from props[%s] as [%s]", clazz, propertyBase, config);
-
-    return config;
-  }
-
-  private static void hieraricalPutValue(
-      String propertyPrefix,
-      String originalProperty,
-      String property,
-      Object value,
-      Map<String, Object> targetMap
-  )
-  {
-    int dotIndex = property.indexOf('.');
-    if (dotIndex < 0) {
-      targetMap.put(property, value);
-      return;
-    }
-    if (dotIndex == 0) {
-      throw new ProvisionException(StringUtils.format("Double dot in property: %s", originalProperty));
-    }
-    if (dotIndex == property.length() - 1) {
-      throw new ProvisionException(StringUtils.format("Dot at the end of property: %s", originalProperty));
-    }
-    String nestedKey = property.substring(0, dotIndex);
-    Object nested = targetMap.computeIfAbsent(nestedKey, k -> new HashMap<String, Object>());
-    if (!(nested instanceof Map)) {
-      // Clash is possible between properties, which are used to configure different objects: e. g.
-      // druid.emitter=parametrized is used to configure Emitter class, and druid.emitter.parametrized.xxx=yyy is used
-      // to configure ParametrizedUriEmitterConfig object. So skipping xxx=yyy key-value pair when configuring Emitter
-      // doesn't make any difference. That is why we just log this situation, instead of throwing an exception.
-      log.info(
-          "Skipping %s property: one of it's prefixes is also used as a property key. Prefix: %s",
-          originalProperty,
-          propertyPrefix
-      );
-      return;
-    }
-    Map<String, Object> nestedMap = (Map<String, Object>) nested;
-    hieraricalPutValue(propertyPrefix, originalProperty, property.substring(dotIndex + 1), value, nestedMap);
-  }
-
-  @VisibleForTesting
-  public static <T> void verifyClazzIsConfigurable(ObjectMapper mapper, Class<T> clazz)
-  {
-    final List<BeanPropertyDefinition> beanDefs = mapper.getSerializationConfig()
-                                                        .introspect(mapper.constructType(clazz))
-                                                        .findProperties();
-    for (BeanPropertyDefinition beanDef : beanDefs) {
-      final AnnotatedField field = beanDef.getField();
-      if (field == null || !field.hasAnnotation(JsonProperty.class)) {
-        throw new ProvisionException(
-            StringUtils.format(
-                "JsonConfigurator requires Jackson-annotated Config objects to have field annotations. %s doesn't",
-                clazz
-            )
-        );
-      }
-    }
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/KeyHolder.java b/api/src/main/java/io/druid/guice/KeyHolder.java
deleted file mode 100644
index 24533fcdd5a..00000000000
--- a/api/src/main/java/io/druid/guice/KeyHolder.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.inject.Key;
-
-/**
- */
-public class KeyHolder<T>
-{
-  private final Key<? extends T> key;
-
-  public KeyHolder(
-      Key<? extends T> key
-  )
-  {
-    this.key = key;
-  }
-
-  public Key<? extends T> getKey()
-  {
-    return key;
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/LazySingleton.java b/api/src/main/java/io/druid/guice/LazySingleton.java
deleted file mode 100644
index 5acf6466be6..00000000000
--- a/api/src/main/java/io/druid/guice/LazySingleton.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.inject.ScopeAnnotation;
-import io.druid.guice.annotations.PublicApi;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- */
-@Target({ElementType.TYPE, ElementType.METHOD})
-@Retention(RetentionPolicy.RUNTIME)
-@ScopeAnnotation
-@PublicApi
-public @interface LazySingleton
-{
-}
diff --git a/api/src/main/java/io/druid/guice/LifecycleModule.java b/api/src/main/java/io/druid/guice/LifecycleModule.java
deleted file mode 100644
index 9d294fb3b7c..00000000000
--- a/api/src/main/java/io/druid/guice/LifecycleModule.java
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.inject.Binder;
-import com.google.inject.Injector;
-import com.google.inject.Key;
-import com.google.inject.Module;
-import com.google.inject.Provides;
-import com.google.inject.TypeLiteral;
-import com.google.inject.multibindings.Multibinder;
-import com.google.inject.name.Names;
-import io.druid.java.util.common.lifecycle.Lifecycle;
-
-import java.lang.annotation.Annotation;
-import java.util.Set;
-
-/**
- * A Module to add lifecycle management to the injector.  {@link DruidGuiceExtensions} must also be included.
- */
-public class LifecycleModule implements Module
-{
-  private final LifecycleScope scope = new LifecycleScope(Lifecycle.Stage.NORMAL);
-  private final LifecycleScope lastScope = new LifecycleScope(Lifecycle.Stage.LAST);
-
-  /**
-   * Registers a class to instantiate eagerly.  Classes mentioned here will be pulled out of
-   * the injector with an injector.getInstance() call when the lifecycle is created.
-   *
-   * Eagerly loaded classes will *not* be automatically added to the Lifecycle unless they are bound to the proper
-   * scope.  That is, they are generally eagerly loaded because the loading operation will produce some beneficial
-   * side-effect even if nothing actually directly depends on the instance.
-   *
-   * This mechanism exists to allow the {@link Lifecycle} to be the primary entry point from the injector, not to
-   * auto-register things with the {@link Lifecycle}.  It is also possible to just bind things eagerly with Guice,
-   * it is not clear which is actually the best approach.  This is more explicit, but eager bindings inside of modules
-   * is less error-prone.
-   *
-   * @param clazz, the class to instantiate
-   * @return this, for chaining.
-   */
-  public static void register(Binder binder, Class<?> clazz)
-  {
-    registerKey(binder, Key.get(clazz));
-  }
-
-  /**
-   * Registers a class/annotation combination to instantiate eagerly.  Classes mentioned here will be pulled out of
-   * the injector with an injector.getInstance() call when the lifecycle is created.
-   *
-   * Eagerly loaded classes will *not* be automatically added to the Lifecycle unless they are bound to the proper
-   * scope.  That is, they are generally eagerly loaded because the loading operation will produce some beneficial
-   * side-effect even if nothing actually directly depends on the instance.
-   *
-   * This mechanism exists to allow the {@link Lifecycle} to be the primary entry point from the injector, not to
-   * auto-register things with the {@link Lifecycle}.  It is also possible to just bind things eagerly with Guice,
-   * it is not clear which is actually the best approach.  This is more explicit, but eager bindings inside of modules
-   * is less error-prone.
-   *
-   * @param clazz, the class to instantiate
-   * @param annotation The annotation class to register with Guice
-   * @return this, for chaining
-   */
-  public static void register(Binder binder, Class<?> clazz, Class<? extends Annotation> annotation)
-  {
-    registerKey(binder, Key.get(clazz, annotation));
-  }
-
-  /**
-   * Registers a key to instantiate eagerly.  {@link Key}s mentioned here will be pulled out of
-   * the injector with an injector.getInstance() call when the lifecycle is created.
-   *
-   * Eagerly loaded classes will *not* be automatically added to the Lifecycle unless they are bound to the proper
-   * scope.  That is, they are generally eagerly loaded because the loading operation will produce some beneficial
-   * side-effect even if nothing actually directly depends on the instance.
-   *
-   * This mechanism exists to allow the {@link Lifecycle} to be the primary entry point
-   * from the injector, not to auto-register things with the {@link Lifecycle}.  It is
-   * also possible to just bind things eagerly with Guice, it is not clear which is actually the best approach.
-   * This is more explicit, but eager bindings inside of modules is less error-prone.
-   *
-   * @param key The key to use in finding the DruidNode instance
-   */
-  public static void registerKey(Binder binder, Key<?> key)
-  {
-    getEagerBinder(binder).addBinding().toInstance(new KeyHolder<Object>(key));
-  }
-
-  private static Multibinder<KeyHolder> getEagerBinder(Binder binder)
-  {
-    return Multibinder.newSetBinder(binder, KeyHolder.class, Names.named("lifecycle"));
-  }
-
-  @Override
-  public void configure(Binder binder)
-  {
-    getEagerBinder(binder); // Load up the eager binder so that it will inject the empty set at a minimum.
-
-    binder.bindScope(ManageLifecycle.class, scope);
-    binder.bindScope(ManageLifecycleLast.class, lastScope);
-  }
-
-  @Provides @LazySingleton
-  public Lifecycle getLifecycle(final Injector injector)
-  {
-    final Key<Set<KeyHolder>> keyHolderKey = Key.get(new TypeLiteral<Set<KeyHolder>>(){}, Names.named("lifecycle"));
-    final Set<KeyHolder> eagerClasses = injector.getInstance(keyHolderKey);
-
-    Lifecycle lifecycle = new Lifecycle()
-    {
-      @Override
-      public void start() throws Exception
-      {
-        for (KeyHolder<?> holder : eagerClasses) {
-          injector.getInstance(holder.getKey()); // Pull the key so as to "eagerly" load up the class.
-        }
-        super.start();
-      }
-    };
-    scope.setLifecycle(lifecycle);
-    lastScope.setLifecycle(lifecycle);
-
-    return lifecycle;
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/LifecycleScope.java b/api/src/main/java/io/druid/guice/LifecycleScope.java
deleted file mode 100644
index 19914b0f0f3..00000000000
--- a/api/src/main/java/io/druid/guice/LifecycleScope.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.common.collect.Lists;
-import com.google.inject.Key;
-import com.google.inject.Provider;
-import com.google.inject.Scope;
-import io.druid.java.util.common.lifecycle.Lifecycle;
-import io.druid.java.util.common.logger.Logger;
-
-import java.util.List;
-
-/**
- * A scope that adds objects to the Lifecycle.  This is by definition also a lazy singleton scope.
- */
-public class LifecycleScope implements Scope
-{
-  private static final Logger log = new Logger(LifecycleScope.class);
-  private final Lifecycle.Stage stage;
-
-  private Lifecycle lifecycle;
-  private final List<Object> instances = Lists.newLinkedList();
-
-  public LifecycleScope(Lifecycle.Stage stage)
-  {
-    this.stage = stage;
-  }
-
-  public void setLifecycle(Lifecycle lifecycle)
-  {
-    synchronized (instances) {
-      this.lifecycle = lifecycle;
-      for (Object instance : instances) {
-        lifecycle.addManagedInstance(instance, stage);
-      }
-    }
-  }
-
-  @Override
-  public <T> Provider<T> scope(final Key<T> key, final Provider<T> unscoped)
-  {
-    return new Provider<T>()
-    {
-      private volatile T value = null;
-
-      @Override
-      public synchronized T get()
-      {
-        if (value == null) {
-          final T retVal = unscoped.get();
-
-          synchronized (instances) {
-            if (lifecycle == null) {
-              instances.add(retVal);
-            } else {
-              try {
-                lifecycle.addMaybeStartManagedInstance(retVal, stage);
-              }
-              catch (Exception e) {
-                log.warn(e, "Caught exception when trying to create a[%s]", key);
-                return null;
-              }
-            }
-          }
-
-          value = retVal;
-        }
-
-        return value;
-      }
-    };
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/ManageLifecycle.java b/api/src/main/java/io/druid/guice/ManageLifecycle.java
deleted file mode 100644
index 53d4d8f33e9..00000000000
--- a/api/src/main/java/io/druid/guice/ManageLifecycle.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.inject.ScopeAnnotation;
-import io.druid.guice.annotations.PublicApi;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- * Marks the object to be managed by {@link io.druid.java.util.common.lifecycle.Lifecycle}
- *
- * This Scope gets defined by {@link LifecycleModule}
- */
-@Target({ ElementType.TYPE, ElementType.METHOD })
-@Retention(RetentionPolicy.RUNTIME)
-@ScopeAnnotation
-@PublicApi
-public @interface ManageLifecycle
-{
-}
diff --git a/api/src/main/java/io/druid/guice/ManageLifecycleLast.java b/api/src/main/java/io/druid/guice/ManageLifecycleLast.java
deleted file mode 100644
index 02a7ff15d98..00000000000
--- a/api/src/main/java/io/druid/guice/ManageLifecycleLast.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.inject.ScopeAnnotation;
-import io.druid.guice.annotations.PublicApi;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- * Marks the object to be managed by {@link io.druid.java.util.common.lifecycle.Lifecycle} and set to be on Stage.LAST
- *
- * This Scope gets defined by {@link LifecycleModule}
- */
-@Target({ ElementType.TYPE, ElementType.METHOD })
-@Retention(RetentionPolicy.RUNTIME)
-@ScopeAnnotation
-@PublicApi
-public @interface ManageLifecycleLast
-{
-}
diff --git a/api/src/main/java/io/druid/guice/PolyBind.java b/api/src/main/java/io/druid/guice/PolyBind.java
deleted file mode 100644
index fbcdaaaa4ad..00000000000
--- a/api/src/main/java/io/druid/guice/PolyBind.java
+++ /dev/null
@@ -1,207 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.common.base.Preconditions;
-import com.google.inject.Binder;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
-import com.google.inject.Key;
-import com.google.inject.Provider;
-import com.google.inject.ProvisionException;
-import com.google.inject.TypeLiteral;
-import com.google.inject.binder.ScopedBindingBuilder;
-import com.google.inject.multibindings.MapBinder;
-import com.google.inject.util.Types;
-import io.druid.guice.annotations.PublicApi;
-import io.druid.java.util.common.StringUtils;
-
-import javax.annotation.Nullable;
-import java.lang.reflect.ParameterizedType;
-import java.util.Map;
-import java.util.Properties;
-
-/**
- * Provides the ability to create "polymorphic" bindings.  Where the polymorphism is actually just making a decision
- * based on a value in a Properties.
- *
- * The workflow is that you first create a choice by calling createChoice().  Then you create options using the binder
- * returned by the optionBinder() method.  Multiple different modules can call optionBinder and all options will be
- * reflected at injection time as long as equivalent interface Key objects are passed into the various methods.
- */
-@PublicApi
-public class PolyBind
-{
-  /**
-   * Sets up a "choice" for the injector to resolve at injection time.
-   *
-   * @param binder the binder for the injector that is being configured
-   * @param property the property that will be checked to determine the implementation choice
-   * @param interfaceKey the interface that will be injected using this choice
-   * @param defaultKey the default instance to be injected if the property doesn't match a choice.  Can be null
-   * @param <T> interface type
-   * @return A ScopedBindingBuilder so that scopes can be added to the binding, if required.
-   */
-  public static <T> ScopedBindingBuilder createChoice(
-      Binder binder,
-      String property,
-      Key<T> interfaceKey,
-      @Nullable Key<? extends T> defaultKey
-  )
-  {
-    ConfiggedProvider<T> provider = new ConfiggedProvider<>(interfaceKey, property, defaultKey, null);
-    return binder.bind(interfaceKey).toProvider(provider);
-  }
-
-  /**
-   * @deprecated use {@link #createChoiceWithDefault(Binder, String, Key, String)}
-   * instead. {@code defaultKey} argument is ignored.
-   */
-  @Deprecated
-  public static <T> ScopedBindingBuilder createChoiceWithDefault(
-      Binder binder,
-      String property,
-      Key<T> interfaceKey,
-      Key<? extends T> defaultKey,
-      String defaultPropertyValue
-  )
-  {
-    return createChoiceWithDefault(binder, property, interfaceKey, defaultPropertyValue);
-  }
-
-  /**
-   * Sets up a "choice" for the injector to resolve at injection time.
-   *
-   * @param binder the binder for the injector that is being configured
-   * @param property the property that will be checked to determine the implementation choice
-   * @param interfaceKey the interface that will be injected using this choice
-   * @param defaultPropertyValue the default property value to use if the property is not set.
-   * @param <T> interface type
-   * @return A ScopedBindingBuilder so that scopes can be added to the binding, if required.
-   */
-  public static <T> ScopedBindingBuilder createChoiceWithDefault(
-      Binder binder,
-      String property,
-      Key<T> interfaceKey,
-      String defaultPropertyValue
-  )
-  {
-    Preconditions.checkNotNull(defaultPropertyValue);
-    ConfiggedProvider<T> provider = new ConfiggedProvider<>(interfaceKey, property, null, defaultPropertyValue);
-    return binder.bind(interfaceKey).toProvider(provider);
-  }
-
-  /**
-   * Binds an option for a specific choice.  The choice must already be registered on the injector for this to work.
-   *
-   * @param binder the binder for the injector that is being configured
-   * @param interfaceKey the interface that will have an option added to it.  This must equal the
-   *                     Key provided to createChoice
-   * @param <T> interface type
-   * @return A MapBinder that can be used to create the actual option bindings.
-   */
-  public static <T> MapBinder<String, T> optionBinder(Binder binder, Key<T> interfaceKey)
-  {
-    final TypeLiteral<T> interfaceType = interfaceKey.getTypeLiteral();
-
-    if (interfaceKey.getAnnotation() != null) {
-      return MapBinder.newMapBinder(
-          binder, TypeLiteral.get(String.class), interfaceType, interfaceKey.getAnnotation()
-      );
-    } else if (interfaceKey.getAnnotationType() != null) {
-      return MapBinder.newMapBinder(
-          binder, TypeLiteral.get(String.class), interfaceType, interfaceKey.getAnnotationType()
-      );
-    } else {
-      return MapBinder.newMapBinder(binder, TypeLiteral.get(String.class), interfaceType);
-    }
-  }
-
-  static class ConfiggedProvider<T> implements Provider<T>
-  {
-    private final Key<T> key;
-    private final String property;
-    @Nullable
-    private final Key<? extends T> defaultKey;
-    @Nullable
-    private final String defaultPropertyValue;
-
-    private Injector injector;
-    private Properties props;
-
-    ConfiggedProvider(
-        Key<T> key,
-        String property,
-        @Nullable Key<? extends T> defaultKey,
-        @Nullable String defaultPropertyValue
-    )
-    {
-      this.key = key;
-      this.property = property;
-      this.defaultKey = defaultKey;
-      this.defaultPropertyValue = defaultPropertyValue;
-    }
-
-    @Inject
-    void configure(Injector injector, Properties props)
-    {
-      this.injector = injector;
-      this.props = props;
-    }
-
-    @Override
-    @SuppressWarnings("unchecked")
-    public T get()
-    {
-      final ParameterizedType mapType = Types.mapOf(
-          String.class, Types.newParameterizedType(Provider.class, key.getTypeLiteral().getType())
-      );
-
-      final Map<String, Provider<T>> implsMap;
-      if (key.getAnnotation() != null) {
-        implsMap = (Map<String, Provider<T>>) injector.getInstance(Key.get(mapType, key.getAnnotation()));
-      } else if (key.getAnnotationType() != null) {
-        implsMap = (Map<String, Provider<T>>) injector.getInstance(Key.get(mapType, key.getAnnotation()));
-      } else {
-        implsMap = (Map<String, Provider<T>>) injector.getInstance(Key.get(mapType));
-      }
-
-      String implName = props.getProperty(property);
-      if (implName == null) {
-        if (defaultPropertyValue == null) {
-          if (defaultKey == null) {
-            throw new ProvisionException(StringUtils.format("Some value must be configured for [%s]", key));
-          }
-          return injector.getInstance(defaultKey);
-        }
-        implName = defaultPropertyValue;
-      }
-      final Provider<T> provider = implsMap.get(implName);
-
-      if (provider == null) {
-        throw new ProvisionException(
-            StringUtils.format("Unknown provider[%s] of %s, known options[%s]", implName, key, implsMap.keySet())
-        );
-      }
-
-      return provider.get();
-    }
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/SupplierProvider.java b/api/src/main/java/io/druid/guice/SupplierProvider.java
deleted file mode 100644
index 32afa505d03..00000000000
--- a/api/src/main/java/io/druid/guice/SupplierProvider.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.common.base.Supplier;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
-import com.google.inject.Key;
-import com.google.inject.Provider;
-
-/**
- */
-public class SupplierProvider<T> implements Provider<T>
-{
-  private final Key<Supplier<T>> supplierKey;
-
-  private Provider<Supplier<T>> supplierProvider;
-
-  public SupplierProvider(
-      Key<Supplier<T>> supplierKey
-  )
-  {
-    this.supplierKey = supplierKey;
-  }
-
-  @Inject
-  public void configure(Injector injector)
-  {
-    this.supplierProvider = injector.getProvider(supplierKey);
-  }
-
-  @Override
-  public T get()
-  {
-    return supplierProvider.get().get();
-  }
-}
diff --git a/api/src/main/java/io/druid/guice/annotations/EscalatedGlobal.java b/api/src/main/java/io/druid/guice/annotations/EscalatedGlobal.java
deleted file mode 100644
index ef60239a4eb..00000000000
--- a/api/src/main/java/io/druid/guice/annotations/EscalatedGlobal.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice.annotations;
-
-import com.google.inject.BindingAnnotation;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- */
-@BindingAnnotation
-@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
-@Retention(RetentionPolicy.RUNTIME)
-@PublicApi
-public @interface EscalatedGlobal
-{
-}
diff --git a/api/src/main/java/io/druid/guice/annotations/ExtensionPoint.java b/api/src/main/java/io/druid/guice/annotations/ExtensionPoint.java
deleted file mode 100644
index 9dc02e17044..00000000000
--- a/api/src/main/java/io/druid/guice/annotations/ExtensionPoint.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice.annotations;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- * Signifies that the annotated type is an extension point. Extension points are interfaces or non-final classes that
- * may be subclassed in extensions in order to add functionality to Druid. Extension points may change in breaking ways
- * only between major Druid release lines (e.g. 0.10.x -> 0.11.0), but otherwise must remain stable. Extension points
- * may change at any time in non-breaking ways, however, such as by adding new default methods to an interface.
- *
- * All public and protected fields, methods, and constructors of annotated classes and interfaces are considered
- * stable in this sense. If a class is not annotated, but an individual field, method, or constructor is
- * annotated, then only that particular field, method, or constructor is considered an extension API.
- *
- * Extension points are all considered public APIs in the sense of {@link PublicApi}, even if not explicitly annotated
- * as such.
- *
- * Note that there are number of injectable interfaces that are not annotated with {@code ExtensionPoint}. You may
- * still extend these interfaces in extensions, but your extension may need to be recompiled even for a minor
- * update of Druid.
- *
- * @see PublicApi
- */
-@Target({ElementType.TYPE})
-@Retention(RetentionPolicy.SOURCE)
-public @interface ExtensionPoint
-{
-}
diff --git a/api/src/main/java/io/druid/guice/annotations/Global.java b/api/src/main/java/io/druid/guice/annotations/Global.java
deleted file mode 100644
index 84de2013261..00000000000
--- a/api/src/main/java/io/druid/guice/annotations/Global.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice.annotations;
-
-import com.google.inject.BindingAnnotation;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- */
-@BindingAnnotation
-@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
-@Retention(RetentionPolicy.RUNTIME)
-@PublicApi
-public @interface Global
-{
-}
diff --git a/api/src/main/java/io/druid/guice/annotations/JSR311Resource.java b/api/src/main/java/io/druid/guice/annotations/JSR311Resource.java
deleted file mode 100644
index 948bd576063..00000000000
--- a/api/src/main/java/io/druid/guice/annotations/JSR311Resource.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice.annotations;
-
-import com.google.inject.BindingAnnotation;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- */
-@BindingAnnotation
-@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
-@Retention(RetentionPolicy.RUNTIME)
-@PublicApi
-public @interface JSR311Resource
-{
-}
diff --git a/api/src/main/java/io/druid/guice/annotations/Json.java b/api/src/main/java/io/druid/guice/annotations/Json.java
deleted file mode 100644
index 4371554977a..00000000000
--- a/api/src/main/java/io/druid/guice/annotations/Json.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice.annotations;
-
-import com.google.inject.BindingAnnotation;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- */
-@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
-@Retention(RetentionPolicy.RUNTIME)
-@BindingAnnotation
-@PublicApi
-public @interface Json
-{
-}
diff --git a/api/src/main/java/io/druid/guice/annotations/PublicApi.java b/api/src/main/java/io/druid/guice/annotations/PublicApi.java
deleted file mode 100644
index f398dfe81a2..00000000000
--- a/api/src/main/java/io/druid/guice/annotations/PublicApi.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice.annotations;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- * Signifies that the annotated entity is a public API for extension authors. Public APIs may change in breaking ways
- * only between major Druid release lines (e.g. 0.10.x -> 0.11.0), but otherwise must remain stable. Public APIs may
- * change at any time in non-breaking ways, however, such as by adding new fields, methods, or constructors.
- *
- * Note that interfaces annotated with {@code PublicApi} but not with {@link ExtensionPoint} are not meant to be
- * subclassed in extensions. In this case, the annotation simply signifies that the interface is stable for callers.
- * In particular, since it is not meant to be subclassed, new non-default methods may be added to an interface and
- * new abstract methods may be added to a class.
- *
- * If a class or interface is annotated, then all public and protected fields, methods, and constructors that class
- * or interface are considered stable in this sense. If a class is not annotated, but an individual field, method, or
- * constructor is annotated, then only that particular field, method, or constructor is considered a public API.
- *
- * Classes, fields, method, and constructors _not_ annotated with {@code @PublicApi} may be modified or removed
- * in any Druid release, unless they are annotated with {@link ExtensionPoint} (which implies they are a public API
- * as well).
- *
- * @see ExtensionPoint
- */
-@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD, ElementType.CONSTRUCTOR})
-@Retention(RetentionPolicy.SOURCE)
-public @interface PublicApi
-{
-}
diff --git a/api/src/main/java/io/druid/guice/annotations/Self.java b/api/src/main/java/io/druid/guice/annotations/Self.java
deleted file mode 100644
index f5a8b348c4e..00000000000
--- a/api/src/main/java/io/druid/guice/annotations/Self.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice.annotations;
-
-import com.google.inject.BindingAnnotation;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- */
-@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
-@Retention(RetentionPolicy.RUNTIME)
-@BindingAnnotation
-@PublicApi
-public @interface Self
-{
-}
diff --git a/api/src/main/java/io/druid/guice/annotations/Smile.java b/api/src/main/java/io/druid/guice/annotations/Smile.java
deleted file mode 100644
index babfb5a68d7..00000000000
--- a/api/src/main/java/io/druid/guice/annotations/Smile.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice.annotations;
-
-import com.google.inject.BindingAnnotation;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- */
-@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
-@Retention(RetentionPolicy.RUNTIME)
-@BindingAnnotation
-@PublicApi
-public @interface Smile
-{
-}
diff --git a/api/src/main/java/io/druid/indexer/TaskLocation.java b/api/src/main/java/io/druid/indexer/TaskLocation.java
deleted file mode 100644
index 70fd0b2e3ce..00000000000
--- a/api/src/main/java/io/druid/indexer/TaskLocation.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.indexer;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-import java.util.Objects;
-
-public class TaskLocation
-{
-  private static final TaskLocation UNKNOWN = new TaskLocation(null, -1, -1);
-
-  private final String host;
-  private final int port;
-  private final int tlsPort;
-
-  public static TaskLocation create(String host, int port, int tlsPort)
-  {
-    return new TaskLocation(host, port, tlsPort);
-  }
-
-  public static TaskLocation unknown()
-  {
-    return TaskLocation.UNKNOWN;
-  }
-
-  @JsonCreator
-  public TaskLocation(
-      @JsonProperty("host") String host,
-      @JsonProperty("port") int port,
-      @JsonProperty("tlsPort") int tlsPort
-  )
-  {
-    this.host = host;
-    this.port = port;
-    this.tlsPort = tlsPort;
-  }
-
-  @JsonProperty
-  public String getHost()
-  {
-    return host;
-  }
-
-  @JsonProperty
-  public int getPort()
-  {
-    return port;
-  }
-
-  @JsonProperty
-  public int getTlsPort()
-  {
-    return tlsPort;
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    TaskLocation that = (TaskLocation) o;
-
-    return port == that.port && tlsPort == that.tlsPort &&
-           Objects.equals(host, that.host);
-  }
-
-  @Override
-  public int hashCode()
-  {
-    int result = host.hashCode();
-    result = 31 * result + port;
-    result = 31 * result + tlsPort;
-    return result;
-  }
-
-  @Override
-  public String toString()
-  {
-    return "TaskLocation{" +
-           "host='" + host + '\'' +
-           ", port=" + port +
-           ", tlsPort=" + tlsPort +
-           '}';
-  }
-}
diff --git a/api/src/main/java/io/druid/indexer/TaskState.java b/api/src/main/java/io/druid/indexer/TaskState.java
deleted file mode 100644
index c4f54a1f716..00000000000
--- a/api/src/main/java/io/druid/indexer/TaskState.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.indexer;
-
-public enum TaskState
-{
-  RUNNING,
-  SUCCESS,
-  FAILED;
-
-  public boolean isRunnable()
-  {
-    return this == RUNNING;
-  }
-
-  public boolean isComplete()
-  {
-    return this != RUNNING;
-  }
-
-  public boolean isSuccess()
-  {
-    return this == SUCCESS;
-  }
-
-  public boolean isFailure()
-  {
-    return this == FAILED;
-  }
-}
diff --git a/api/src/main/java/io/druid/indexer/TaskStatusPlus.java b/api/src/main/java/io/druid/indexer/TaskStatusPlus.java
deleted file mode 100644
index a45a9a7865a..00000000000
--- a/api/src/main/java/io/druid/indexer/TaskStatusPlus.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.indexer;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Preconditions;
-import org.joda.time.DateTime;
-
-import javax.annotation.Nullable;
-
-public class TaskStatusPlus
-{
-  private final String id;
-  private final DateTime createdTime;
-  private final DateTime queueInsertionTime;
-  private final TaskState state;
-  private final Long duration;
-  private final TaskLocation location;
-
-  @JsonCreator
-  public TaskStatusPlus(
-      @JsonProperty("id") String id,
-      @JsonProperty("createdTime") DateTime createdTime,
-      @JsonProperty("queueInsertionTime") DateTime queueInsertionTime,
-      @JsonProperty("state") @Nullable TaskState state,
-      @JsonProperty("duration") @Nullable Long duration,
-      @JsonProperty("location") TaskLocation location
-  )
-  {
-    if (state != null && state.isComplete()) {
-      Preconditions.checkNotNull(duration, "duration");
-    }
-    this.id = Preconditions.checkNotNull(id, "id");
-    this.createdTime = Preconditions.checkNotNull(createdTime, "createdTime");
-    this.queueInsertionTime = Preconditions.checkNotNull(queueInsertionTime, "queueInsertionTime");
-    this.state = state;
-    this.duration = duration;
-    this.location = Preconditions.checkNotNull(location, "location");
-  }
-
-  @JsonProperty
-  public String getId()
-  {
-    return id;
-  }
-
-  @JsonProperty
-  public DateTime getCreatedTime()
-  {
-    return createdTime;
-  }
-
-  @JsonProperty
-  public DateTime getQueueInsertionTime()
-  {
-    return queueInsertionTime;
-  }
-
-  @JsonProperty
-  public TaskState getState()
-  {
-    return state;
-  }
-
-  @JsonProperty
-  public Long getDuration()
-  {
-    return duration;
-  }
-
-  @JsonProperty
-  public TaskLocation getLocation()
-  {
-    return location;
-  }
-}
diff --git a/api/src/main/java/io/druid/initialization/DruidModule.java b/api/src/main/java/io/druid/initialization/DruidModule.java
deleted file mode 100644
index 9479df114ee..00000000000
--- a/api/src/main/java/io/druid/initialization/DruidModule.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.initialization;
-
-import com.fasterxml.jackson.databind.Module;
-import io.druid.guice.annotations.ExtensionPoint;
-
-import java.util.List;
-
-/**
- */
-@ExtensionPoint
-public interface DruidModule extends com.google.inject.Module
-{
-  List<? extends Module> getJacksonModules();
-}
diff --git a/api/src/main/java/io/druid/jackson/CommaListJoinDeserializer.java b/api/src/main/java/io/druid/jackson/CommaListJoinDeserializer.java
deleted file mode 100644
index 15746a0fb63..00000000000
--- a/api/src/main/java/io/druid/jackson/CommaListJoinDeserializer.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.jackson;
-
-import com.fasterxml.jackson.core.JsonParser;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.DeserializationContext;
-import com.fasterxml.jackson.databind.deser.std.StdScalarDeserializer;
-
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.List;
-
-/**
- */
-public class CommaListJoinDeserializer extends StdScalarDeserializer<List<String>>
-{
-  protected CommaListJoinDeserializer()
-  {
-    super(List.class);
-  }
-
-  @Override
-  public List<String> deserialize(JsonParser jsonParser, DeserializationContext deserializationContext)
-      throws IOException, JsonProcessingException
-  {
-    return Arrays.asList(jsonParser.getText().split(","));
-  }
-}
diff --git a/api/src/main/java/io/druid/jackson/CommaListJoinSerializer.java b/api/src/main/java/io/druid/jackson/CommaListJoinSerializer.java
deleted file mode 100644
index 7e39b7c72ba..00000000000
--- a/api/src/main/java/io/druid/jackson/CommaListJoinSerializer.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.jackson;
-
-import com.fasterxml.jackson.core.JsonGenerationException;
-import com.fasterxml.jackson.core.JsonGenerator;
-import com.fasterxml.jackson.databind.SerializerProvider;
-import com.fasterxml.jackson.databind.ser.std.StdScalarSerializer;
-import com.google.common.base.Joiner;
-
-import java.io.IOException;
-import java.util.List;
-
-/**
- */
-public class CommaListJoinSerializer extends StdScalarSerializer<List<String>>
-{
-  private static final Joiner joiner = Joiner.on(",");
-
-  protected CommaListJoinSerializer()
-  {
-    super(List.class, true);
-  }
-
-  @Override
-  public void serialize(List<String> value, JsonGenerator jgen, SerializerProvider provider)
-      throws IOException, JsonGenerationException
-  {
-    jgen.writeString(joiner.join(value));
-  }
-}
diff --git a/api/src/main/java/io/druid/js/JavaScriptConfig.java b/api/src/main/java/io/druid/js/JavaScriptConfig.java
deleted file mode 100644
index e97f19b1a8a..00000000000
--- a/api/src/main/java/io/druid/js/JavaScriptConfig.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.js;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import io.druid.guice.annotations.PublicApi;
-
-/**
- * Should be used by extension filters, aggregators, etc, that use JavaScript to determine if JavaScript is enabled
- * or not.
- */
-@PublicApi
-public class JavaScriptConfig
-{
-  public static final int DEFAULT_OPTIMIZATION_LEVEL = 9;
-
-  private static final JavaScriptConfig ENABLED_INSTANCE = new JavaScriptConfig(true);
-
-  @JsonProperty
-  private final boolean enabled;
-
-  @JsonCreator
-  public JavaScriptConfig(
-      @JsonProperty("enabled") boolean enabled
-  )
-  {
-    this.enabled = enabled;
-  }
-
-  public boolean isEnabled()
-  {
-    return enabled;
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    JavaScriptConfig that = (JavaScriptConfig) o;
-
-    return enabled == that.enabled;
-
-  }
-
-  @Override
-  public int hashCode()
-  {
-    return (enabled ? 1 : 0);
-  }
-
-  @Override
-  public String toString()
-  {
-    return "JavaScriptConfig{" +
-           "enabled=" + enabled +
-           '}';
-  }
-
-  public static JavaScriptConfig getEnabledInstance()
-  {
-    return ENABLED_INSTANCE;
-  }
-}
diff --git a/api/src/main/java/io/druid/query/SegmentDescriptor.java b/api/src/main/java/io/druid/query/SegmentDescriptor.java
deleted file mode 100644
index ca7dfb2767c..00000000000
--- a/api/src/main/java/io/druid/query/SegmentDescriptor.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.query;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import org.joda.time.Interval;
-
-/**
-*/
-public class SegmentDescriptor
-{
-  private final Interval interval;
-  private final String version;
-  private final int partitionNumber;
-
-  @JsonCreator
-  public SegmentDescriptor(
-      @JsonProperty("itvl") Interval interval,
-      @JsonProperty("ver") String version,
-      @JsonProperty("part") int partitionNumber
-  )
-  {
-    this.interval = interval;
-    this.version = version;
-    this.partitionNumber = partitionNumber;
-  }
-
-  @JsonProperty("itvl")
-  public Interval getInterval()
-  {
-    return interval;
-  }
-
-  @JsonProperty("ver")
-  public String getVersion()
-  {
-    return version;
-  }
-
-  @JsonProperty("part")
-  public int getPartitionNumber()
-  {
-    return partitionNumber;
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    SegmentDescriptor that = (SegmentDescriptor) o;
-
-    if (partitionNumber != that.partitionNumber) {
-      return false;
-    }
-    if (interval != null ? !interval.equals(that.interval) : that.interval != null) {
-      return false;
-    }
-    if (version != null ? !version.equals(that.version) : that.version != null) {
-      return false;
-    }
-
-    return true;
-  }
-
-  @Override
-  public int hashCode()
-  {
-    int result = interval != null ? interval.hashCode() : 0;
-    result = 31 * result + (version != null ? version.hashCode() : 0);
-    result = 31 * result + partitionNumber;
-    return result;
-  }
-
-  @Override
-  public String toString()
-  {
-    return "SegmentDescriptor{" +
-           "interval=" + interval +
-           ", version='" + version + '\'' +
-           ", partitionNumber=" + partitionNumber +
-           '}';
-  }
-}
diff --git a/api/src/main/java/io/druid/segment/SegmentUtils.java b/api/src/main/java/io/druid/segment/SegmentUtils.java
deleted file mode 100644
index 448eaf0c503..00000000000
--- a/api/src/main/java/io/druid/segment/SegmentUtils.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.segment;
-
-import com.google.common.io.Files;
-import com.google.common.primitives.Ints;
-import io.druid.guice.annotations.PublicApi;
-import io.druid.java.util.common.IOE;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-
-/**
- * Utility methods useful for implementing deep storage extensions.
- */
-@PublicApi
-public class SegmentUtils
-{
-  public static int getVersionFromDir(File inDir) throws IOException
-  {
-    File versionFile = new File(inDir, "version.bin");
-    if (versionFile.exists()) {
-      return Ints.fromByteArray(Files.toByteArray(versionFile));
-    }
-
-    final File indexFile = new File(inDir, "index.drd");
-    int version;
-    if (indexFile.exists()) {
-      try (InputStream in = new FileInputStream(indexFile)) {
-        version = in.read();
-      }
-      return version;
-    }
-
-    throw new IOE("Invalid segment dir [%s]. Can't find either of version.bin or index.drd.", inDir);
-  }
-}
diff --git a/api/src/main/java/io/druid/segment/loading/DataSegmentArchiver.java b/api/src/main/java/io/druid/segment/loading/DataSegmentArchiver.java
deleted file mode 100644
index 2776bfb4aa4..00000000000
--- a/api/src/main/java/io/druid/segment/loading/DataSegmentArchiver.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.segment.loading;
-
-import io.druid.guice.annotations.ExtensionPoint;
-import io.druid.timeline.DataSegment;
-
-import javax.annotation.Nullable;
-
-@ExtensionPoint
-public interface DataSegmentArchiver
-{
-  /**
-   * Perform an archive task on the segment and return the resulting segment or null if there was no action needed.
-   *
-   * @param segment The source segment
-   *
-   * @return The segment after archiving or `null` if there was no archiving performed.
-   *
-   * @throws SegmentLoadingException on error
-   */
-  @Nullable
-  DataSegment archive(DataSegment segment) throws SegmentLoadingException;
-
-  /**
-   * Perform the restore from an archived segment and return the resulting segment or null if there was no action
-   *
-   * @param segment The source (archived) segment
-   *
-   * @return The segment after it has been unarchived
-   *
-   * @throws SegmentLoadingException on error
-   */
-  @Nullable
-  DataSegment restore(DataSegment segment) throws SegmentLoadingException;
-}
diff --git a/api/src/main/java/io/druid/segment/loading/DataSegmentFinder.java b/api/src/main/java/io/druid/segment/loading/DataSegmentFinder.java
deleted file mode 100644
index 937a42e72c2..00000000000
--- a/api/src/main/java/io/druid/segment/loading/DataSegmentFinder.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.segment.loading;
-
-import io.druid.guice.annotations.ExtensionPoint;
-import io.druid.timeline.DataSegment;
-
-import java.util.Set;
-
-/**
- * A DataSegmentFinder is responsible for finding Druid segments underneath a specified directory and optionally updates
- * all descriptor.json files on deep storage with correct loadSpec.
- */
-@ExtensionPoint
-public interface DataSegmentFinder
-{
-  /**
-   * This method should first recursively look for descriptor.json (partitionNum_descriptor.json for HDFS data storage) underneath
-   * workingDirPath and then verify that index.zip (partitionNum_index.zip for HDFS data storage) exists in the same folder.
-   * If not, it should throw SegmentLoadingException to let the caller know that descriptor.json exists
-   * while index.zip doesn't. If a segment is found and updateDescriptor is set, then this method should update the
-   * loadSpec in descriptor.json to reflect the location from where it was found. After the search, this method
-   * should return the set of segments that were found.
-   *
-   * @param workingDirPath   the String representation of the working directory path
-   * @param updateDescriptor if true, update loadSpec in descriptor.json if loadSpec's location is different from where
-   *                         desciptor.json was found
-   *
-   * @return a set of segments that were found underneath workingDirPath
-   */
-  Set<DataSegment> findSegments(String workingDirPath, boolean updateDescriptor) throws SegmentLoadingException;
-}
diff --git a/api/src/main/java/io/druid/segment/loading/DataSegmentKiller.java b/api/src/main/java/io/druid/segment/loading/DataSegmentKiller.java
deleted file mode 100644
index c26a73daeb1..00000000000
--- a/api/src/main/java/io/druid/segment/loading/DataSegmentKiller.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.segment.loading;
-
-import io.druid.guice.annotations.ExtensionPoint;
-import io.druid.timeline.DataSegment;
-
-import java.io.IOException;
-
-/**
- */
-@ExtensionPoint
-public interface DataSegmentKiller
-{
-  void kill(DataSegment segments) throws SegmentLoadingException;
-  void killAll() throws IOException;
-
-}
diff --git a/api/src/main/java/io/druid/segment/loading/DataSegmentMover.java b/api/src/main/java/io/druid/segment/loading/DataSegmentMover.java
deleted file mode 100644
index a7554f3a2f8..00000000000
--- a/api/src/main/java/io/druid/segment/loading/DataSegmentMover.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.segment.loading;
-
-import io.druid.guice.annotations.ExtensionPoint;
-import io.druid.timeline.DataSegment;
-
-import java.util.Map;
-
-@ExtensionPoint
-public interface DataSegmentMover
-{
-  DataSegment move(DataSegment segment, Map<String, Object> targetLoadSpec) throws SegmentLoadingException;
-}
diff --git a/api/src/main/java/io/druid/segment/loading/DataSegmentPuller.java b/api/src/main/java/io/druid/segment/loading/DataSegmentPuller.java
deleted file mode 100644
index f8e50293f05..00000000000
--- a/api/src/main/java/io/druid/segment/loading/DataSegmentPuller.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.segment.loading;
-
-import io.druid.guice.annotations.ExtensionPoint;
-import io.druid.timeline.DataSegment;
-
-import java.io.File;
-
-/**
- * A DataSegmentPuller is responsible for pulling data for a particular segment into a particular directory
- */
-@ExtensionPoint
-public interface DataSegmentPuller
-{
-  /**
-   * Pull down segment files for the given DataSegment and put them in the given directory.
-   *
-   * @param segment The segment to pull down files for
-   * @param dir     The directory to store the files in
-   *
-   * @throws SegmentLoadingException if there are any errors
-   */
-  void getSegmentFiles(DataSegment segment, File dir) throws SegmentLoadingException;
-}
diff --git a/api/src/main/java/io/druid/segment/loading/DataSegmentPusher.java b/api/src/main/java/io/druid/segment/loading/DataSegmentPusher.java
deleted file mode 100644
index b9bf810f72c..00000000000
--- a/api/src/main/java/io/druid/segment/loading/DataSegmentPusher.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.segment.loading;
-
-import com.google.common.base.Joiner;
-import io.druid.guice.annotations.ExtensionPoint;
-import io.druid.java.util.common.StringUtils;
-import io.druid.timeline.DataSegment;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.URI;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-
-@ExtensionPoint
-public interface DataSegmentPusher
-{
-  Joiner JOINER = Joiner.on("/").skipNulls();
-
-  @Deprecated
-  String getPathForHadoop(String dataSource);
-  String getPathForHadoop();
-  DataSegment push(File file, DataSegment segment) throws IOException;
-  //use map instead of LoadSpec class to avoid dependency pollution.
-  Map<String, Object> makeLoadSpec(URI finalIndexZipFilePath);
-
-  default String getStorageDir(DataSegment dataSegment)
-  {
-    return getDefaultStorageDir(dataSegment);
-  }
-
-  default String makeIndexPathName(DataSegment dataSegment, String indexName)
-  {
-    return StringUtils.format("./%s/%s", getStorageDir(dataSegment), indexName);
-  }
-
-  /**
-   * Property prefixes that should be added to the "allowedHadoopPrefix" config for passing down to Hadoop jobs. These
-   * should be property prefixes like "druid.xxx", which means to include "druid.xxx" and "druid.xxx.*".
-   */
-  default List<String> getAllowedPropertyPrefixesForHadoop()
-  {
-    return Collections.emptyList();
-  }
-
-  // Note: storage directory structure format = .../dataSource/interval/version/partitionNumber/
-  // If above format is ever changed, make sure to change it appropriately in other places
-  // e.g. HDFSDataSegmentKiller uses this information to clean the version, interval and dataSource directories
-  // on segment deletion if segment being deleted was the only segment
-  static String getDefaultStorageDir(DataSegment segment)
-  {
-    return JOINER.join(
-        segment.getDataSource(),
-        StringUtils.format("%s_%s", segment.getInterval().getStart(), segment.getInterval().getEnd()),
-        segment.getVersion(),
-        segment.getShardSpec().getPartitionNum()
-    );
-  }
-}
diff --git a/api/src/main/java/io/druid/segment/loading/LoadSpec.java b/api/src/main/java/io/druid/segment/loading/LoadSpec.java
deleted file mode 100644
index 12bfefef751..00000000000
--- a/api/src/main/java/io/druid/segment/loading/LoadSpec.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.segment.loading;
-
-import com.fasterxml.jackson.annotation.JsonTypeInfo;
-import io.druid.guice.annotations.ExtensionPoint;
-
-import java.io.File;
-
-/**
- * A means of pulling segment files into a destination directory
- */
-@ExtensionPoint
-@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
-public interface LoadSpec
-{
-  /**
-   * Method should put the segment files in the directory passed
-   * @param destDir The destination directory
-   * @return The byte count of data put in the destination directory
-   */
-  LoadSpecResult loadSegment(File destDir) throws SegmentLoadingException;
-
-  // Hold interesting data about the results of the segment load
-  class LoadSpecResult
-  {
-    private final long size;
-
-    public LoadSpecResult(long size)
-    {
-      this.size = size;
-    }
-
-    public long getSize()
-    {
-      return this.size;
-    }
-  }
-}
diff --git a/api/src/main/java/io/druid/segment/loading/SegmentLoadingException.java b/api/src/main/java/io/druid/segment/loading/SegmentLoadingException.java
deleted file mode 100644
index 3bd388dc9f9..00000000000
--- a/api/src/main/java/io/druid/segment/loading/SegmentLoadingException.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.segment.loading;
-
-import io.druid.guice.annotations.PublicApi;
-import io.druid.java.util.common.StringUtils;
-
-/**
- */
-@PublicApi
-public class SegmentLoadingException extends Exception
-{
-  public SegmentLoadingException(
-      String formatString,
-      Object... objs
-  )
-  {
-    super(StringUtils.nonStrictFormat(formatString, objs));
-  }
-
-  public SegmentLoadingException(
-      Throwable cause,
-      String formatString,
-      Object... objs
-  )
-  {
-    super(StringUtils.nonStrictFormat(formatString, objs), cause);
-  }
-}
diff --git a/api/src/main/java/io/druid/segment/loading/URIDataPuller.java b/api/src/main/java/io/druid/segment/loading/URIDataPuller.java
deleted file mode 100644
index 41ea811b5ae..00000000000
--- a/api/src/main/java/io/druid/segment/loading/URIDataPuller.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.segment.loading;
-
-import com.google.common.base.Predicate;
-import io.druid.guice.annotations.ExtensionPoint;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.URI;
-
-/**
- * A URIDataPuller has handlings for URI based data
- */
-@ExtensionPoint
-public interface URIDataPuller
-{
-  /**
-   * Create a new InputStream based on the URI
-   *
-   * @param uri The URI to open an Input Stream to
-   *
-   * @return A new InputStream which streams the URI in question
-   *
-   * @throws IOException
-   */
-  InputStream getInputStream(URI uri) throws IOException;
-
-  /**
-   * Returns an abstract "version" for the URI. The exact meaning of the version is left up to the implementation.
-   *
-   * @param uri The URI to check
-   *
-   * @return A "version" as interpreted by the URIDataPuller implementation
-   *
-   * @throws IOException on error
-   */
-  String getVersion(URI uri) throws IOException;
-
-  /**
-   * Evaluates a Throwable to see if it is recoverable. This is expected to be used in conjunction with the other methods
-   * to determine if anything thrown from the method should be retried.
-   *
-   * @return Predicate function indicating if the Throwable is recoverable
-   */
-  Predicate<Throwable> shouldRetryPredicate();
-}
diff --git a/api/src/main/java/io/druid/tasklogs/NoopTaskLogs.java b/api/src/main/java/io/druid/tasklogs/NoopTaskLogs.java
deleted file mode 100644
index 3c413303c70..00000000000
--- a/api/src/main/java/io/druid/tasklogs/NoopTaskLogs.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.tasklogs;
-
-import com.google.common.base.Optional;
-import com.google.common.io.ByteSource;
-import io.druid.java.util.common.logger.Logger;
-
-import java.io.File;
-import java.io.IOException;
-
-public class NoopTaskLogs implements TaskLogs
-{
-  private final Logger log = new Logger(TaskLogs.class);
-
-  @Override
-  public Optional<ByteSource> streamTaskLog(String taskid, long offset) throws IOException
-  {
-    return Optional.absent();
-  }
-
-  @Override
-  public void pushTaskLog(String taskid, File logFile) throws IOException
-  {
-    log.info("Not pushing logs for task: %s", taskid);
-  }
-
-  @Override
-  public void killAll() throws IOException
-  {
-    log.info("Noop: No task logs are deleted.");
-  }
-
-  @Override
-  public void killOlderThan(long timestamp) throws IOException
-  {
-    log.info("Noop: No task logs are deleted.");
-  }
-}
diff --git a/api/src/main/java/io/druid/tasklogs/TaskLogKiller.java b/api/src/main/java/io/druid/tasklogs/TaskLogKiller.java
deleted file mode 100644
index 7a63f640a8f..00000000000
--- a/api/src/main/java/io/druid/tasklogs/TaskLogKiller.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.tasklogs;
-
-import io.druid.guice.annotations.ExtensionPoint;
-
-import java.io.IOException;
-
-/**
- */
-@ExtensionPoint
-public interface TaskLogKiller
-{
-  void killAll() throws IOException;
-  void killOlderThan(long timestamp) throws IOException;
-}
diff --git a/api/src/main/java/io/druid/tasklogs/TaskLogPusher.java b/api/src/main/java/io/druid/tasklogs/TaskLogPusher.java
deleted file mode 100644
index a904a16f5d1..00000000000
--- a/api/src/main/java/io/druid/tasklogs/TaskLogPusher.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.tasklogs;
-
-import io.druid.guice.annotations.ExtensionPoint;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * Something that knows how to persist local task logs to some form of long-term storage.
- */
-@ExtensionPoint
-public interface TaskLogPusher
-{
-  void pushTaskLog(String taskid, File logFile) throws IOException;
-}
diff --git a/api/src/main/java/io/druid/tasklogs/TaskLogStreamer.java b/api/src/main/java/io/druid/tasklogs/TaskLogStreamer.java
deleted file mode 100644
index b685c7b7659..00000000000
--- a/api/src/main/java/io/druid/tasklogs/TaskLogStreamer.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.tasklogs;
-
-import com.google.common.base.Optional;
-import com.google.common.io.ByteSource;
-import io.druid.guice.annotations.ExtensionPoint;
-
-import java.io.IOException;
-
-/**
- * Something that knows how to stream logs for tasks.
- */
-@ExtensionPoint
-public interface TaskLogStreamer
-{
-  /**
-   * Stream log for a task.
-   *
-   * @param offset If zero, stream the entire log. If positive, attempt to read from this position onwards. If
-   *               negative, attempt to read this many bytes from the end of the file (like <tt>tail -n</tt>).
-   *
-   * @return input supplier for this log, if available from this provider
-   */
-  Optional<ByteSource> streamTaskLog(String taskid, long offset) throws IOException;
-}
diff --git a/api/src/main/java/io/druid/tasklogs/TaskLogs.java b/api/src/main/java/io/druid/tasklogs/TaskLogs.java
deleted file mode 100644
index 383c3559ae1..00000000000
--- a/api/src/main/java/io/druid/tasklogs/TaskLogs.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.tasklogs;
-
-import io.druid.guice.annotations.ExtensionPoint;
-
-@ExtensionPoint
-public interface TaskLogs extends TaskLogStreamer, TaskLogPusher, TaskLogKiller
-{
-}
diff --git a/api/src/main/java/io/druid/timeline/DataSegment.java b/api/src/main/java/io/druid/timeline/DataSegment.java
deleted file mode 100644
index bfb2653378f..00000000000
--- a/api/src/main/java/io/druid/timeline/DataSegment.java
+++ /dev/null
@@ -1,494 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.timeline;
-
-import com.fasterxml.jackson.annotation.JacksonInject;
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
-import com.fasterxml.jackson.databind.annotation.JsonSerialize;
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Interner;
-import com.google.common.collect.Interners;
-import com.google.inject.Inject;
-import io.druid.guice.annotations.PublicApi;
-import io.druid.jackson.CommaListJoinDeserializer;
-import io.druid.jackson.CommaListJoinSerializer;
-import io.druid.java.util.common.granularity.Granularities;
-import io.druid.query.SegmentDescriptor;
-import io.druid.timeline.partition.NoneShardSpec;
-import io.druid.timeline.partition.ShardSpec;
-import it.unimi.dsi.fastutil.objects.Object2ObjectArrayMap;
-import org.joda.time.DateTime;
-import org.joda.time.Interval;
-
-import javax.annotation.Nullable;
-import java.util.Comparator;
-import java.util.List;
-import java.util.Map;
-import java.util.stream.Collectors;
-
-/**
- */
-@PublicApi
-public class DataSegment implements Comparable<DataSegment>
-{
-  public static String delimiter = "_";
-  private final Integer binaryVersion;
-  private static final Interner<String> STRING_INTERNER = Interners.newWeakInterner();
-  private static final Interner<List<String>> DIMENSIONS_INTERNER = Interners.newWeakInterner();
-  private static final Interner<List<String>> METRICS_INTERNER = Interners.newWeakInterner();
-  private static final Map<String, Object> PRUNED_LOAD_SPEC = ImmutableMap.of(
-      "load spec is pruned, because it's not needed on Brokers, but eats a lot of heap space",
-      ""
-  );
-
-  public static String makeDataSegmentIdentifier(
-      String dataSource,
-      DateTime start,
-      DateTime end,
-      String version,
-      ShardSpec shardSpec
-  )
-  {
-    StringBuilder sb = new StringBuilder();
-
-    sb.append(dataSource).append(delimiter)
-      .append(start).append(delimiter)
-      .append(end).append(delimiter)
-      .append(version);
-
-    if (shardSpec.getPartitionNum() != 0) {
-      sb.append(delimiter).append(shardSpec.getPartitionNum());
-    }
-
-    return sb.toString();
-  }
-
-  /**
-   * This class is needed for optional injection of pruneLoadSpec, see
-   * github.com/google/guice/wiki/FrequentlyAskedQuestions#how-can-i-inject-optional-parameters-into-a-constructor
-   */
-  @VisibleForTesting
-  public static class PruneLoadSpecHolder
-  {
-    @VisibleForTesting
-    public static final PruneLoadSpecHolder DEFAULT = new PruneLoadSpecHolder();
-
-    @Inject(optional = true) @PruneLoadSpec boolean pruneLoadSpec = false;
-  }
-
-  private final String dataSource;
-  private final Interval interval;
-  private final String version;
-  @Nullable
-  private final Map<String, Object> loadSpec;
-  private final List<String> dimensions;
-  private final List<String> metrics;
-  private final ShardSpec shardSpec;
-  private final long size;
-  private final String identifier;
-
-  public DataSegment(
-      String dataSource,
-      Interval interval,
-      String version,
-      Map<String, Object> loadSpec,
-      List<String> dimensions,
-      List<String> metrics,
-      ShardSpec shardSpec,
-      Integer binaryVersion,
-      long size
-  )
-  {
-    this(
-        dataSource,
-        interval,
-        version,
-        loadSpec,
-        dimensions,
-        metrics,
-        shardSpec,
-        binaryVersion,
-        size,
-        PruneLoadSpecHolder.DEFAULT
-    );
-  }
-
-  @JsonCreator
-  public DataSegment(
-      @JsonProperty("dataSource") String dataSource,
-      @JsonProperty("interval") Interval interval,
-      @JsonProperty("version") String version,
-      // use `Map` *NOT* `LoadSpec` because we want to do lazy materialization to prevent dependency pollution
-      @JsonProperty("loadSpec") @Nullable Map<String, Object> loadSpec,
-      @JsonProperty("dimensions")
-      @JsonDeserialize(using = CommaListJoinDeserializer.class)
-      @Nullable
-          List<String> dimensions,
-      @JsonProperty("metrics")
-      @JsonDeserialize(using = CommaListJoinDeserializer.class)
-      @Nullable
-          List<String> metrics,
-      @JsonProperty("shardSpec") @Nullable ShardSpec shardSpec,
-      @JsonProperty("binaryVersion") Integer binaryVersion,
-      @JsonProperty("size") long size,
-      @JacksonInject PruneLoadSpecHolder pruneLoadSpecHolder
-  )
-  {
-    // dataSource, dimensions & metrics are stored as canonical string values to decrease memory required for storing
-    // large numbers of segments.
-    this.dataSource = STRING_INTERNER.intern(dataSource);
-    this.interval = interval;
-    this.loadSpec = pruneLoadSpecHolder.pruneLoadSpec ? PRUNED_LOAD_SPEC : prepareLoadSpec(loadSpec);
-    this.version = version;
-    // Deduplicating dimensions and metrics lists as a whole because they are very likely the same for the same
-    // dataSource
-    this.dimensions = prepareDimensionsOrMetrics(dimensions, DIMENSIONS_INTERNER);
-    this.metrics = prepareDimensionsOrMetrics(metrics, METRICS_INTERNER);
-    this.shardSpec = (shardSpec == null) ? NoneShardSpec.instance() : shardSpec;
-    this.binaryVersion = binaryVersion;
-    this.size = size;
-
-    this.identifier = makeDataSegmentIdentifier(
-        this.dataSource,
-        this.interval.getStart(),
-        this.interval.getEnd(),
-        this.version,
-        this.shardSpec
-    );
-  }
-
-  @Nullable
-  private Map<String, Object> prepareLoadSpec(@Nullable Map<String, Object> loadSpec)
-  {
-    if (loadSpec == null) {
-      return null;
-    }
-    // Load spec is just of 3 entries on average; HashMap/LinkedHashMap consumes much more memory than ArrayMap
-    Map<String, Object> result = new Object2ObjectArrayMap<>(loadSpec.size());
-    for (Map.Entry<String, Object> e : loadSpec.entrySet()) {
-      result.put(STRING_INTERNER.intern(e.getKey()), e.getValue());
-    }
-    return result;
-  }
-
-  private List<String> prepareDimensionsOrMetrics(@Nullable List<String> list, Interner<List<String>> interner)
-  {
-    if (list == null) {
-      return ImmutableList.of();
-    } else {
-      List<String> result = list
-          .stream()
-          .filter(s -> !Strings.isNullOrEmpty(s))
-          .map(STRING_INTERNER::intern)
-          // TODO replace with ImmutableList.toImmutableList() when updated to Guava 21+
-          .collect(Collectors.collectingAndThen(Collectors.toList(), ImmutableList::copyOf));
-      return interner.intern(result);
-    }
-  }
-
-  /**
-   * Get dataSource
-   *
-   * @return the dataSource
-   */
-  @JsonProperty
-  public String getDataSource()
-  {
-    return dataSource;
-  }
-
-  @JsonProperty
-  public Interval getInterval()
-  {
-    return interval;
-  }
-
-  @Nullable
-  @JsonProperty
-  public Map<String, Object> getLoadSpec()
-  {
-    return loadSpec;
-  }
-
-  @JsonProperty
-  public String getVersion()
-  {
-    return version;
-  }
-
-  @JsonProperty
-  @JsonSerialize(using = CommaListJoinSerializer.class)
-  public List<String> getDimensions()
-  {
-    return dimensions;
-  }
-
-  @JsonProperty
-  @JsonSerialize(using = CommaListJoinSerializer.class)
-  public List<String> getMetrics()
-  {
-    return metrics;
-  }
-
-  @JsonProperty
-  public ShardSpec getShardSpec()
-  {
-    return shardSpec;
-  }
-
-  @JsonProperty
-  public Integer getBinaryVersion()
-  {
-    return binaryVersion;
-  }
-
-  @JsonProperty
-  public long getSize()
-  {
-    return size;
-  }
-
-  @JsonProperty
-  public String getIdentifier()
-  {
-    return identifier;
-  }
-
-  public SegmentDescriptor toDescriptor()
-  {
-    return new SegmentDescriptor(interval, version, shardSpec.getPartitionNum());
-  }
-
-  public DataSegment withLoadSpec(Map<String, Object> loadSpec)
-  {
-    return builder(this).loadSpec(loadSpec).build();
-  }
-
-  public DataSegment withDimensions(List<String> dimensions)
-  {
-    return builder(this).dimensions(dimensions).build();
-  }
-
-  public DataSegment withMetrics(List<String> metrics)
-  {
-    return builder(this).metrics(metrics).build();
-  }
-
-  public DataSegment withSize(long size)
-  {
-    return builder(this).size(size).build();
-  }
-
-  public DataSegment withVersion(String version)
-  {
-    return builder(this).version(version).build();
-  }
-
-  public DataSegment withBinaryVersion(int binaryVersion)
-  {
-    return builder(this).binaryVersion(binaryVersion).build();
-  }
-
-  @Override
-  public int compareTo(DataSegment dataSegment)
-  {
-    return getIdentifier().compareTo(dataSegment.getIdentifier());
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (o instanceof DataSegment) {
-      return getIdentifier().equals(((DataSegment) o).getIdentifier());
-    }
-    return false;
-  }
-
-  @Override
-  public int hashCode()
-  {
-    return getIdentifier().hashCode();
-  }
-
-  @Override
-  public String toString()
-  {
-    return "DataSegment{" +
-           "size=" + size +
-           ", shardSpec=" + shardSpec +
-           ", metrics=" + metrics +
-           ", dimensions=" + dimensions +
-           ", version='" + version + '\'' +
-           ", loadSpec=" + loadSpec +
-           ", interval=" + interval +
-           ", dataSource='" + dataSource + '\'' +
-           ", binaryVersion='" + binaryVersion + '\'' +
-           '}';
-  }
-
-  public static Comparator<DataSegment> bucketMonthComparator()
-  {
-    return new Comparator<DataSegment>()
-    {
-      @Override
-      public int compare(DataSegment lhs, DataSegment rhs)
-      {
-        int retVal;
-
-        DateTime lhsMonth = Granularities.MONTH.bucketStart(lhs.getInterval().getStart());
-        DateTime rhsMonth = Granularities.MONTH.bucketStart(rhs.getInterval().getStart());
-
-        retVal = lhsMonth.compareTo(rhsMonth);
-
-        if (retVal != 0) {
-          return retVal;
-        }
-
-        return lhs.compareTo(rhs);
-      }
-    };
-  }
-
-  public static Builder builder()
-  {
-    return new Builder();
-  }
-
-  public static Builder builder(DataSegment segment)
-  {
-    return new Builder(segment);
-  }
-
-  public static class Builder
-  {
-    private String dataSource;
-    private Interval interval;
-    private String version;
-    private Map<String, Object> loadSpec;
-    private List<String> dimensions;
-    private List<String> metrics;
-    private ShardSpec shardSpec;
-    private Integer binaryVersion;
-    private long size;
-
-    public Builder()
-    {
-      this.loadSpec = ImmutableMap.of();
-      this.dimensions = ImmutableList.of();
-      this.metrics = ImmutableList.of();
-      this.shardSpec = NoneShardSpec.instance();
-      this.size = -1;
-    }
-
-    public Builder(DataSegment segment)
-    {
-      this.dataSource = segment.getDataSource();
-      this.interval = segment.getInterval();
-      this.version = segment.getVersion();
-      this.loadSpec = segment.getLoadSpec();
-      this.dimensions = segment.getDimensions();
-      this.metrics = segment.getMetrics();
-      this.shardSpec = segment.getShardSpec();
-      this.binaryVersion = segment.getBinaryVersion();
-      this.size = segment.getSize();
-    }
-
-    public Builder dataSource(String dataSource)
-    {
-      this.dataSource = dataSource;
-      return this;
-    }
-
-    public Builder interval(Interval interval)
-    {
-      this.interval = interval;
-      return this;
-    }
-
-    public Builder version(String version)
-    {
-      this.version = version;
-      return this;
-    }
-
-    public Builder loadSpec(Map<String, Object> loadSpec)
-    {
-      this.loadSpec = loadSpec;
-      return this;
-    }
-
-    public Builder dimensions(List<String> dimensions)
-    {
-      this.dimensions = dimensions;
-      return this;
-    }
-
-    public Builder metrics(List<String> metrics)
-    {
-      this.metrics = metrics;
-      return this;
-    }
-
-    public Builder shardSpec(ShardSpec shardSpec)
-    {
-      this.shardSpec = shardSpec;
-      return this;
-    }
-
-    public Builder binaryVersion(Integer binaryVersion)
-    {
-      this.binaryVersion = binaryVersion;
-      return this;
-    }
-
-    public Builder size(long size)
-    {
-      this.size = size;
-      return this;
-    }
-
-    public DataSegment build()
-    {
-      // Check stuff that goes into the identifier, at least.
-      Preconditions.checkNotNull(dataSource, "dataSource");
-      Preconditions.checkNotNull(interval, "interval");
-      Preconditions.checkNotNull(version, "version");
-      Preconditions.checkNotNull(shardSpec, "shardSpec");
-
-      return new DataSegment(
-          dataSource,
-          interval,
-          version,
-          loadSpec,
-          dimensions,
-          metrics,
-          shardSpec,
-          binaryVersion,
-          size
-      );
-    }
-  }
-}
diff --git a/api/src/main/java/io/druid/timeline/DataSegmentUtils.java b/api/src/main/java/io/druid/timeline/DataSegmentUtils.java
deleted file mode 100644
index a89e9709c83..00000000000
--- a/api/src/main/java/io/druid/timeline/DataSegmentUtils.java
+++ /dev/null
@@ -1,205 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.timeline;
-
-import com.google.common.base.Function;
-import io.druid.guice.annotations.PublicApi;
-import io.druid.java.util.common.DateTimes;
-import io.druid.java.util.common.IAE;
-import io.druid.java.util.common.StringUtils;
-import io.druid.java.util.common.logger.Logger;
-import org.joda.time.DateTime;
-import org.joda.time.Interval;
-
-import java.util.Objects;
-
-@PublicApi
-public class DataSegmentUtils
-{
-  private static final Logger LOGGER = new Logger(DataSegmentUtils.class);
-
-  public static Function<String, Interval> INTERVAL_EXTRACTOR(final String datasource)
-  {
-    return new Function<String, Interval>()
-    {
-      @Override
-      public Interval apply(String identifier)
-      {
-        SegmentIdentifierParts segmentIdentifierParts = valueOf(datasource, identifier);
-        if (segmentIdentifierParts == null) {
-          throw new IAE("Invalid identifier [%s]", identifier);
-        }
-
-        return segmentIdentifierParts.getInterval();
-      }
-    };
-  }
-
-  /**
-   * Parses a segment identifier into its components: dataSource, interval, version, and any trailing tags. Ignores
-   * shard spec.
-   *
-   * It is possible that this method may incorrectly parse an identifier, for example if the dataSource name in the
-   * identifier contains a DateTime parseable string such as 'datasource_2000-01-01T00:00:00.000Z' and dataSource was
-   * provided as 'datasource'. The desired behavior in this case would be to return null since the identifier does not
-   * actually belong to the provided dataSource but a non-null result would be returned. This is an edge case that would
-   * currently only affect paged select queries with a union dataSource of two similarly-named dataSources as in the
-   * given example.
-   *
-   * @param dataSource the dataSource corresponding to this identifier
-   * @param identifier segment identifier
-   * @return a {@link DataSegmentUtils.SegmentIdentifierParts} object if the identifier could be parsed, null otherwise
-   */
-  public static SegmentIdentifierParts valueOf(String dataSource, String identifier)
-  {
-    if (!identifier.startsWith(StringUtils.format("%s_", dataSource))) {
-      return null;
-    }
-
-    String remaining = identifier.substring(dataSource.length() + 1);
-    String[] splits = remaining.split(DataSegment.delimiter);
-    if (splits.length < 3) {
-      return null;
-    }
-
-    try {
-      DateTime start = DateTimes.ISO_DATE_TIME.parse(splits[0]);
-      DateTime end = DateTimes.ISO_DATE_TIME.parse(splits[1]);
-      String version = splits[2];
-      String trail = splits.length > 3 ? join(splits, DataSegment.delimiter, 3, splits.length) : null;
-
-      return new SegmentIdentifierParts(
-          dataSource,
-          new Interval(start, end),
-          version,
-          trail
-      );
-    }
-    catch (IllegalArgumentException e) {
-      return null;
-    }
-  }
-
-  public static String withInterval(final String dataSource, final String identifier, Interval newInterval)
-  {
-    SegmentIdentifierParts segmentDesc = DataSegmentUtils.valueOf(dataSource, identifier);
-    if (segmentDesc == null) {
-      // happens for test segments which has invalid segment id.. ignore for now
-      LOGGER.warn("Invalid segment identifier " + identifier);
-      return identifier;
-    }
-    return segmentDesc.withInterval(newInterval).toString();
-  }
-
-  public static class SegmentIdentifierParts
-  {
-    private final String dataSource;
-    private final Interval interval;
-    private final String version;
-    private final String trail;
-
-    public SegmentIdentifierParts(String dataSource, Interval interval, String version, String trail)
-    {
-      this.dataSource = dataSource;
-      this.interval = interval;
-      this.version = version;
-      this.trail = trail;
-    }
-
-    @PublicApi
-    public String getDataSource()
-    {
-      return dataSource;
-    }
-
-    public Interval getInterval()
-    {
-      return interval;
-    }
-
-    @PublicApi
-    public String getVersion()
-    {
-      return version;
-    }
-
-    public SegmentIdentifierParts withInterval(Interval interval)
-    {
-      return new SegmentIdentifierParts(dataSource, interval, version, trail);
-    }
-
-    @Override
-    public boolean equals(Object o)
-    {
-      if (this == o) {
-        return true;
-      }
-      if (o == null || getClass() != o.getClass()) {
-        return false;
-      }
-
-      SegmentIdentifierParts that = (SegmentIdentifierParts) o;
-
-      if (!Objects.equals(dataSource, that.dataSource)) {
-        return false;
-      }
-      if (!Objects.equals(interval, that.interval)) {
-        return false;
-      }
-      if (!Objects.equals(version, that.version)) {
-        return false;
-      }
-      if (!Objects.equals(trail, that.trail)) {
-        return false;
-      }
-
-      return true;
-    }
-
-    @Override
-    public int hashCode()
-    {
-      return Objects.hash(dataSource, interval, version, trail);
-    }
-
-    @Override
-    public String toString()
-    {
-      return join(
-          new Object[]{dataSource, interval.getStart(), interval.getEnd(), version, trail},
-          DataSegment.delimiter, 0, version == null ? 3 : trail == null ? 4 : 5
-      );
-    }
-  }
-
-  private static String join(Object[] input, String delimiter, int start, int end)
-  {
-    StringBuilder builder = new StringBuilder();
-    for (int i = start; i < end; i++) {
-      if (i > start) {
-        builder.append(delimiter);
-      }
-      if (input[i] != null) {
-        builder.append(input[i]);
-      }
-    }
-    return builder.toString();
-  }
-}
diff --git a/api/src/main/java/io/druid/timeline/PruneLoadSpec.java b/api/src/main/java/io/druid/timeline/PruneLoadSpec.java
deleted file mode 100644
index 758b5b5462b..00000000000
--- a/api/src/main/java/io/druid/timeline/PruneLoadSpec.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.timeline;
-
-import com.google.inject.BindingAnnotation;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- * This annnotation is used to inject a boolean parameter into a {@link DataSegment} constructor, which prescribes to
- * drop deserialized "loadSpec" and don't store it in a field of a {@link DataSegment}. It's very useful on Brokers,
- * because they store a lot of DataSegments in their heap, and loadSpec takes a lot of space, while it's not used on
- * Brokers.
- */
-@Target({ElementType.PARAMETER, ElementType.FIELD})
-@Retention(RetentionPolicy.RUNTIME)
-@BindingAnnotation
-public @interface PruneLoadSpec
-{
-}
diff --git a/api/src/main/java/io/druid/timeline/partition/NoneShardSpec.java b/api/src/main/java/io/druid/timeline/partition/NoneShardSpec.java
deleted file mode 100644
index c116c60c8b8..00000000000
--- a/api/src/main/java/io/druid/timeline/partition/NoneShardSpec.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.timeline.partition;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Range;
-import io.druid.data.input.InputRow;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- */
-public class NoneShardSpec implements ShardSpec
-{
-  private final static NoneShardSpec INSTANCE = new NoneShardSpec();
-
-  @JsonCreator
-  public static NoneShardSpec instance()
-  {
-    return INSTANCE;
-  }
-
-  /**
-   * @deprecated use {@link #instance()} instead
-   */
-  @Deprecated
-  public NoneShardSpec()
-  {
-    // empty
-  }
-
-  @Override
-  public <T> PartitionChunk<T> createChunk(T obj)
-  {
-    return new SingleElementPartitionChunk<T>(obj);
-  }
-
-  @Override
-  public boolean isInChunk(long timestamp, InputRow inputRow)
-  {
-    return true;
-  }
-
-  @Override
-  @JsonIgnore
-  public int getPartitionNum()
-  {
-    return 0;
-  }
-
-  @Override
-  public ShardSpecLookup getLookup(final List<ShardSpec> shardSpecs)
-  {
-
-    return new ShardSpecLookup()
-    {
-      @Override
-      public ShardSpec getShardSpec(long timestamp, InputRow row)
-      {
-        return shardSpecs.get(0);
-      }
-    };
-  }
-
-  @Override
-  public Map<String, Range<String>> getDomain()
-  {
-    return ImmutableMap.of();
-  }
-
-  @Override
-  public boolean equals(Object obj)
-  {
-    return obj instanceof NoneShardSpec;
-  }
-
-  @Override
-  public int hashCode()
-  {
-    return 0;
-  }
-
-  @Override
-  public String toString()
-  {
-    return "NoneShardSpec";
-  }
-}
diff --git a/api/src/main/java/io/druid/timeline/partition/PartitionChunk.java b/api/src/main/java/io/druid/timeline/partition/PartitionChunk.java
deleted file mode 100644
index 9c4450aa65e..00000000000
--- a/api/src/main/java/io/druid/timeline/partition/PartitionChunk.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.timeline.partition;
-
-/**
- * A PartitionChunk represents a chunk of a partitioned(sharded) space.  It has knowledge of whether it is
- * the start of the domain of partitions, the end of the domain, if it abuts another partition and where it stands
- * inside of a sorted collection of partitions.
- *
- * The ordering of PartitionChunks is based entirely upon the partition boundaries defined inside the concrete
- * PartitionChunk class.  That is, the payload (the object returned by getObject()) should *not* be involved in
- * comparisons between PartitionChunk objects.
- */
-public interface PartitionChunk<T> extends Comparable<PartitionChunk<T>>
-{
-  /**
-   * Returns the payload, generally an object that can be used to perform some action against the shard.
-   *
-   * @return the payload
-   */
-  T getObject();
-
-  /**
-   * Determines if this PartitionChunk abuts another PartitionChunk.  A sequence of abutting PartitionChunks should
-   * start with an object where isStart() == true and eventually end with an object where isEnd() == true.
-   *
-   * @param chunk input chunk
-   * @return true if this chunk abuts the input chunk
-   */
-  boolean abuts(PartitionChunk<T> chunk);
-
-  /**
-   * Returns true if this chunk is the beginning of the partition. Most commonly, that means it represents the range
-   * [-infinity, X) for some concrete X.
-   *
-   * @return true if the chunk is the beginning of the partition
-   */
-  boolean isStart();
-
-  /**
-   * Returns true if this chunk is the end of the partition.  Most commonly, that means it represents the range
-   * [X, infinity] for some concrete X.
-   *
-   * @return true if the chunk is the beginning of the partition
-   */
-  boolean isEnd();
-
-  /**
-   * Returns the partition chunk number of this PartitionChunk.  I.e. if there are 4 partitions in total and this
-   * is the 3rd partition, it will return 2
-   *
-   * @return the sequential numerical id of this partition chunk
-   */
-  int getChunkNumber();
-}
diff --git a/api/src/main/java/io/druid/timeline/partition/ShardSpec.java b/api/src/main/java/io/druid/timeline/partition/ShardSpec.java
deleted file mode 100644
index 5461544c609..00000000000
--- a/api/src/main/java/io/druid/timeline/partition/ShardSpec.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.timeline.partition;
-
-import com.fasterxml.jackson.annotation.JsonSubTypes;
-import com.fasterxml.jackson.annotation.JsonTypeInfo;
-import com.google.common.collect.Range;
-import io.druid.data.input.InputRow;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- * A Marker interface that exists to combine ShardSpec objects together for Jackson. Note that this is not an
- * extension API. Extensions are not expected to create new kinds of ShardSpecs.
- */
-@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
-@JsonSubTypes({
-                  @JsonSubTypes.Type(name = "none", value = NoneShardSpec.class),
-              })
-public interface ShardSpec
-{
-  <T> PartitionChunk<T> createChunk(T obj);
-
-  boolean isInChunk(long timestamp, InputRow inputRow);
-
-  int getPartitionNum();
-
-  ShardSpecLookup getLookup(List<ShardSpec> shardSpecs);
-
-  /**
-   * Get the possible range of each dimension for the rows this shard contains.
-   *
-   * @return map of dimensions to its possible range. Dimensions with unknown possible range are not mapped
-   */
-  Map<String, Range<String>> getDomain();
-}
diff --git a/api/src/main/java/io/druid/timeline/partition/ShardSpecLookup.java b/api/src/main/java/io/druid/timeline/partition/ShardSpecLookup.java
deleted file mode 100644
index 721b83af3e7..00000000000
--- a/api/src/main/java/io/druid/timeline/partition/ShardSpecLookup.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.timeline.partition;
-
-import io.druid.data.input.InputRow;
-
-public interface ShardSpecLookup
-{
-  ShardSpec getShardSpec(long timestamp, InputRow row);
-}
diff --git a/api/src/main/java/io/druid/timeline/partition/SingleElementPartitionChunk.java b/api/src/main/java/io/druid/timeline/partition/SingleElementPartitionChunk.java
deleted file mode 100644
index fd5da6932b4..00000000000
--- a/api/src/main/java/io/druid/timeline/partition/SingleElementPartitionChunk.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.timeline.partition;
-
-/**
- */
-public class SingleElementPartitionChunk<T> implements PartitionChunk<T>
-{
-  private final T element;
-
-  public SingleElementPartitionChunk(T element)
-  {
-    this.element = element;
-  }
-
-  @Override
-  public T getObject()
-  {
-    return element;
-  }
-
-  @Override
-  public boolean abuts(PartitionChunk<T> tPartitionChunk)
-  {
-    return false;
-  }
-
-  @Override
-  public boolean isStart()
-  {
-    return true;
-  }
-
-  @Override
-  public boolean isEnd()
-  {
-    return true;
-  }
-
-  @Override
-  public int getChunkNumber()
-  {
-    return 0;
-  }
-
-  /**
-   * The ordering of PartitionChunks is determined entirely by the partition boundaries and has nothing to do
-   * with the object.  Thus, if there are two SingleElementPartitionChunks, they are equal because they both
-   * represent the full partition space.
-   *
-   * SingleElementPartitionChunks are currently defined as less than every other type of PartitionChunk.  There
-   * is no good reason for it, nor is there a bad reason, that's just the way it is.  This is subject to change.
-   *
-   * @param chunk
-   * @return
-   */
-  @Override
-  public int compareTo(PartitionChunk<T> chunk)
-  {
-    return chunk instanceof SingleElementPartitionChunk ? 0 : -1;
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    return true;
-  }
-
-  @Override
-  public int hashCode()
-  {
-    return element != null ? element.hashCode() : 0;
-  }
-
-  @Override
-  public String toString()
-  {
-    return "SingleElementPartitionChunk{" +
-           "element=" + element +
-           '}';
-  }
-}
diff --git a/api/src/main/java/io/druid/utils/CompressionUtils.java b/api/src/main/java/io/druid/utils/CompressionUtils.java
deleted file mode 100644
index 6a551e319e0..00000000000
--- a/api/src/main/java/io/druid/utils/CompressionUtils.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.utils;
-
-
-import io.druid.guice.annotations.PublicApi;
-import io.druid.java.util.common.logger.Logger;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
-/**
- */
-@PublicApi
-public class CompressionUtils
-{
-  private static final Logger log = new Logger(CompressionUtils.class);
-
-
-  @Deprecated // Use com.metamx.common.CompressionUtils.zip
-  public static long zip(File directory, File outputZipFile) throws IOException
-  {
-    return io.druid.java.util.common.CompressionUtils.zip(directory, outputZipFile);
-  }
-
-
-  @Deprecated // Use com.metamx.common.CompressionUtils.zip
-  public static long zip(File directory, OutputStream out) throws IOException
-  {
-    return io.druid.java.util.common.CompressionUtils.zip(directory, out);
-  }
-
-  @Deprecated // Use com.metamx.common.CompressionUtils.unzip
-  public static void unzip(File pulledFile, File outDir) throws IOException
-  {
-    io.druid.java.util.common.CompressionUtils.unzip(pulledFile, outDir);
-  }
-
-  @Deprecated // Use com.metamx.common.CompressionUtils.unzip
-  public static void unzip(InputStream in, File outDir) throws IOException
-  {
-    io.druid.java.util.common.CompressionUtils.unzip(in, outDir);
-  }
-
-  /**
-   * Uncompress using a gzip uncompress algorithm from the `pulledFile` to the `outDir`.
-   * Unlike `com.metamx.common.CompressionUtils.gunzip`, this function takes an output *DIRECTORY* and tries to guess the file name.
-   * It is recommended that the caller use `com.metamx.common.CompressionUtils.gunzip` and specify the output file themselves to ensure names are as expected
-   *
-   * @param pulledFile The source file
-   * @param outDir     The destination directory to put the resulting file
-   *
-   * @throws IOException on propogated IO exception, IAE if it cannot determine the proper new name for `pulledFile`
-   */
-  @Deprecated // See description for alternative
-  public static void gunzip(File pulledFile, File outDir) throws IOException
-  {
-    final File outFile = new File(outDir, io.druid.java.util.common.CompressionUtils.getGzBaseName(pulledFile.getName()));
-    io.druid.java.util.common.CompressionUtils.gunzip(pulledFile, outFile);
-    if (!pulledFile.delete()) {
-      log.error("Could not delete tmpFile[%s].", pulledFile);
-    }
-  }
-
-}
diff --git a/api/src/main/java/io/druid/utils/Runnables.java b/api/src/main/java/io/druid/utils/Runnables.java
deleted file mode 100644
index 883aae65b8c..00000000000
--- a/api/src/main/java/io/druid/utils/Runnables.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.utils;
-
-import io.druid.guice.annotations.PublicApi;
-
-/**
- */
-@PublicApi
-public class Runnables
-{
-  public static Runnable getNoopRunnable()
-  {
-    return () -> {};
-  }
-}
diff --git a/api/src/test/java/io/druid/TestObjectMapper.java b/api/src/test/java/io/druid/TestObjectMapper.java
deleted file mode 100644
index 8ce6fedf843..00000000000
--- a/api/src/test/java/io/druid/TestObjectMapper.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid;
-
-import com.fasterxml.jackson.core.JsonParser;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.DeserializationContext;
-import com.fasterxml.jackson.databind.DeserializationFeature;
-import com.fasterxml.jackson.databind.MapperFeature;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.SerializationFeature;
-import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
-import com.fasterxml.jackson.databind.module.SimpleModule;
-import com.fasterxml.jackson.databind.ser.std.ToStringSerializer;
-import io.druid.java.util.common.Intervals;
-import org.joda.time.Interval;
-
-import java.io.IOException;
-
-/**
- */
-public class TestObjectMapper extends ObjectMapper
-{
-  public TestObjectMapper()
-  {
-    configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
-    configure(MapperFeature.AUTO_DETECT_GETTERS, false);
-    configure(MapperFeature.AUTO_DETECT_FIELDS, false);
-    configure(MapperFeature.AUTO_DETECT_IS_GETTERS, false);
-    configure(MapperFeature.AUTO_DETECT_SETTERS, false);
-    configure(SerializationFeature.INDENT_OUTPUT, false);
-    registerModule(new TestModule());
-  }
-
-  public static class TestModule extends SimpleModule
-  {
-    TestModule()
-    {
-      addSerializer(Interval.class, ToStringSerializer.instance);
-      addDeserializer(
-          Interval.class, new StdDeserializer<Interval>(Interval.class)
-          {
-            @Override
-            public Interval deserialize(
-                JsonParser jsonParser, DeserializationContext deserializationContext
-            ) throws IOException, JsonProcessingException
-            {
-              return Intervals.of(jsonParser.getText());
-            }
-          }
-      );
-    }
-  }
-}
diff --git a/api/src/test/java/io/druid/data/input/MapBasedRowTest.java b/api/src/test/java/io/druid/data/input/MapBasedRowTest.java
deleted file mode 100644
index b6b4d5a9840..00000000000
--- a/api/src/test/java/io/druid/data/input/MapBasedRowTest.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input;
-
-import com.google.common.collect.ImmutableMap;
-import io.druid.java.util.common.DateTimes;
-import org.junit.Assert;
-import org.junit.Test;
-
-public class MapBasedRowTest
-{
-  @Test
-  public void testGetLongMetricFromString()
-  {
-    MapBasedRow row = new MapBasedRow(
-        DateTimes.nowUtc(),
-        ImmutableMap.<String, Object>builder()
-          .put("k0", "-1.2")
-          .put("k1", "1.23")
-          .put("k2", "1.8")
-          .put("k3", "1e5")
-          .put("k4", "9223372036854775806")
-          .put("k5", "-9223372036854775807")
-          .put("k6", "+9223372036854775802")
-          .build()
-    );
-    
-    Assert.assertEquals(-1.2, row.getMetric("k0"));
-    Assert.assertEquals(1.23, row.getMetric("k1"));
-    Assert.assertEquals(1.8, row.getMetric("k2"));
-    Assert.assertEquals(100000.0, row.getMetric("k3"));
-    Assert.assertEquals(9223372036854775806L, row.getMetric("k4"));
-    Assert.assertEquals(-9223372036854775807L, row.getMetric("k5"));
-    Assert.assertEquals(9223372036854775802L, row.getMetric("k6"));
-  }
-}
diff --git a/api/src/test/java/io/druid/data/input/impl/CSVParseSpecTest.java b/api/src/test/java/io/druid/data/input/impl/CSVParseSpecTest.java
deleted file mode 100644
index 08e0f7bb869..00000000000
--- a/api/src/test/java/io/druid/data/input/impl/CSVParseSpecTest.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.google.common.collect.Lists;
-import org.junit.Test;
-
-import java.util.Arrays;
-import java.util.Collections;
-
-public class CSVParseSpecTest
-{
-  @Test(expected = IllegalArgumentException.class)
-  public void testColumnMissing() throws Exception
-  {
-    @SuppressWarnings("unused") // expected exception
-    final ParseSpec spec = new CSVParseSpec(
-        new TimestampSpec(
-            "timestamp",
-            "auto",
-            null
-        ),
-        new DimensionsSpec(
-            DimensionsSpec.getDefaultSchemas(Arrays.asList("a", "b")),
-            Lists.<String>newArrayList(),
-            Lists.<SpatialDimensionSchema>newArrayList()
-        ),
-        ",",
-        Collections.singletonList("a"),
-        false,
-        0
-    );
-  }
-
-  @Test(expected = IllegalArgumentException.class)
-  public void testComma() throws Exception
-  {
-    @SuppressWarnings("unused") // expected exception
-    final ParseSpec spec = new CSVParseSpec(
-        new TimestampSpec(
-            "timestamp",
-            "auto",
-            null
-        ),
-        new DimensionsSpec(
-            DimensionsSpec.getDefaultSchemas(Arrays.asList("a,", "b")),
-            Lists.<String>newArrayList(),
-            Lists.<SpatialDimensionSchema>newArrayList()
-        ),
-        ",",
-        Collections.singletonList("a"),
-        false,
-        0
-    );
-  }
-}
diff --git a/api/src/test/java/io/druid/data/input/impl/DelimitedParseSpecTest.java b/api/src/test/java/io/druid/data/input/impl/DelimitedParseSpecTest.java
deleted file mode 100644
index 2ced059721e..00000000000
--- a/api/src/test/java/io/druid/data/input/impl/DelimitedParseSpecTest.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.collect.Lists;
-import io.druid.TestObjectMapper;
-import org.junit.Assert;
-import org.junit.Test;
-
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.Collections;
-
-public class DelimitedParseSpecTest
-{
-  private final ObjectMapper jsonMapper = new TestObjectMapper();
-
-  @Test
-  public void testSerde() throws IOException
-  {
-    DelimitedParseSpec spec = new DelimitedParseSpec(
-        new TimestampSpec("abc", "iso", null),
-        new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Collections.singletonList("abc")), null, null),
-        "\u0001",
-        "\u0002",
-        Collections.singletonList("abc"),
-        false,
-        0
-    );
-    final DelimitedParseSpec serde = jsonMapper.readValue(
-        jsonMapper.writeValueAsString(spec),
-        DelimitedParseSpec.class
-    );
-    Assert.assertEquals("abc", serde.getTimestampSpec().getTimestampColumn());
-    Assert.assertEquals("iso", serde.getTimestampSpec().getTimestampFormat());
-
-    Assert.assertEquals(Collections.singletonList("abc"), serde.getColumns());
-    Assert.assertEquals("\u0001", serde.getDelimiter());
-    Assert.assertEquals("\u0002", serde.getListDelimiter());
-    Assert.assertEquals(Collections.singletonList("abc"), serde.getDimensionsSpec().getDimensionNames());
-  }
-
-  @Test(expected = IllegalArgumentException.class)
-  public void testColumnMissing() throws Exception
-  {
-    @SuppressWarnings("unused") // expected exception
-    final ParseSpec spec = new DelimitedParseSpec(
-        new TimestampSpec(
-            "timestamp",
-            "auto",
-            null
-        ),
-        new DimensionsSpec(
-            DimensionsSpec.getDefaultSchemas(Arrays.asList("a", "b")),
-            Lists.<String>newArrayList(),
-            Lists.<SpatialDimensionSchema>newArrayList()
-        ),
-        ",",
-        " ",
-        Collections.singletonList("a"),
-        false,
-        0
-    );
-  }
-
-  @Test(expected = IllegalArgumentException.class)
-  public void testComma() throws Exception
-  {
-    @SuppressWarnings("unused") // expected exception
-    final ParseSpec spec = new DelimitedParseSpec(
-        new TimestampSpec(
-            "timestamp",
-            "auto",
-            null
-        ),
-        new DimensionsSpec(
-            DimensionsSpec.getDefaultSchemas(Arrays.asList("a,", "b")),
-            Lists.<String>newArrayList(),
-            Lists.<SpatialDimensionSchema>newArrayList()
-        ),
-        ",",
-        null,
-        Collections.singletonList("a"),
-        false,
-        0
-    );
-  }
-
-  @Test(expected = IllegalArgumentException.class)
-  public void testDefaultColumnList()
-  {
-    @SuppressWarnings("unused") // expected exception
-    final DelimitedParseSpec spec = new DelimitedParseSpec(
-        new TimestampSpec(
-            "timestamp",
-            "auto",
-            null
-        ),
-        new DimensionsSpec(
-            DimensionsSpec.getDefaultSchemas(Arrays.asList("a", "b")),
-            Lists.<String>newArrayList(),
-            Lists.<SpatialDimensionSchema>newArrayList()
-        ),
-        ",",
-        null,
-        null,
-        false,
-        0
-    );
-  }
-}
diff --git a/api/src/test/java/io/druid/data/input/impl/DimensionsSpecSerdeTest.java b/api/src/test/java/io/druid/data/input/impl/DimensionsSpecSerdeTest.java
deleted file mode 100644
index 3bc8f86b776..00000000000
--- a/api/src/test/java/io/druid/data/input/impl/DimensionsSpecSerdeTest.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import junit.framework.Assert;
-import org.junit.Test;
-
-import java.util.Arrays;
-import java.util.List;
-
-/**
- */
-public class DimensionsSpecSerdeTest
-{
-  private final ObjectMapper mapper = new ObjectMapper();
-
-  @Test
-  public void testDimensionsSpecSerde() throws Exception
-  {
-    DimensionsSpec expected = new DimensionsSpec(
-        Arrays.asList(
-            new StringDimensionSchema("AAA"),
-            new StringDimensionSchema("BBB"),
-            new FloatDimensionSchema("C++"),
-            new NewSpatialDimensionSchema("DDT", null),
-            new LongDimensionSchema("EEE"),
-            new NewSpatialDimensionSchema("DDT2", Arrays.asList("A", "B")),
-            new NewSpatialDimensionSchema("IMPR", Arrays.asList("S", "P", "Q", "R"))
-        ),
-        Arrays.asList("FOO", "HAR"),
-        null
-    );
-
-    String jsonStr = "{\"dimensions\":"
-                     + "[\"AAA\", \"BBB\","
-                     + "{\"name\":\"C++\", \"type\":\"float\"},"
-                     + "{\"name\":\"DDT\", \"type\":\"spatial\"},"
-                     + "{\"name\":\"EEE\", \"type\":\"long\"},"
-                     + "{\"name\":\"DDT2\", \"type\": \"spatial\", \"dims\":[\"A\", \"B\"]}],"
-                     + "\"dimensionExclusions\": [\"FOO\", \"HAR\"],"
-                     + "\"spatialDimensions\": [{\"dimName\":\"IMPR\", \"dims\":[\"S\",\"P\",\"Q\",\"R\"]}]"
-                     + "}";
-
-    DimensionsSpec actual = mapper.readValue(
-        mapper.writeValueAsString(
-            mapper.readValue(jsonStr, DimensionsSpec.class)
-        ),
-        DimensionsSpec.class
-    );
-
-    List<SpatialDimensionSchema> expectedSpatials = Arrays.asList(
-        new SpatialDimensionSchema("DDT", null),
-        new SpatialDimensionSchema("DDT2", Arrays.asList("A", "B")),
-        new SpatialDimensionSchema("IMPR", Arrays.asList("S", "P", "Q", "R"))
-    );
-
-    Assert.assertEquals(expected, actual);
-    Assert.assertEquals(expectedSpatials, actual.getSpatialDimensions());
-  }
-}
diff --git a/api/src/test/java/io/druid/data/input/impl/FileIteratingFirehoseTest.java b/api/src/test/java/io/druid/data/input/impl/FileIteratingFirehoseTest.java
deleted file mode 100644
index 7335fcbea38..00000000000
--- a/api/src/test/java/io/druid/data/input/impl/FileIteratingFirehoseTest.java
+++ /dev/null
@@ -1,170 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.google.common.base.Joiner;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Lists;
-import io.druid.data.input.InputRow;
-import org.apache.commons.io.LineIterator;
-import org.junit.Assert;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
-
-import java.io.Closeable;
-import java.io.IOException;
-import java.io.Reader;
-import java.io.StringReader;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.List;
-import java.util.stream.Collectors;
-import java.util.stream.IntStream;
-
-@RunWith(Parameterized.class)
-public class FileIteratingFirehoseTest
-{
-  @Parameters(name = "{0}, {1}")
-  public static Collection<Object[]> constructorFeeder() throws IOException
-  {
-    final List<List<String>> inputTexts = ImmutableList.of(
-        ImmutableList.of("2000,foo"),
-        ImmutableList.of("2000,foo\n2000,bar\n"),
-        ImmutableList.of("2000,foo\n2000,bar\n", "2000,baz"),
-        ImmutableList.of("2000,foo\n2000,bar\n", "", "2000,baz"),
-        ImmutableList.of("2000,foo\n2000,bar\n", "", "2000,baz", ""),
-        ImmutableList.of("2000,foo\n2000,bar\n2000,baz", "", "2000,baz", "2000,foo\n2000,bar\n3000,baz"),
-        ImmutableList.of(""),
-        ImmutableList.of()
-    );
-
-    final List<Object[]> args = new ArrayList<>();
-    for (int numSkipHeadRows = 0; numSkipHeadRows < 3; numSkipHeadRows++) {
-      for (List<String> texts : inputTexts) {
-        args.add(new Object[] {texts, numSkipHeadRows});
-      }
-    }
-
-    return args;
-  }
-
-  private static final char[] LINE_CHARS = "\n".toCharArray();
-
-  private final StringInputRowParser parser;
-  private final List<String> inputs;
-  private final List<String> expectedResults;
-
-  public FileIteratingFirehoseTest(List<String> texts, int numSkipHeaderRows)
-  {
-    parser = new StringInputRowParser(
-        new CSVParseSpec(
-            new TimestampSpec("ts", "auto", null),
-            new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("x")), null, null),
-            ",",
-            ImmutableList.of("ts", "x"),
-            false,
-            numSkipHeaderRows
-        ),
-        null
-    );
-
-    this.inputs = texts;
-    this.expectedResults = inputs.stream()
-        .map(input -> input.split("\n"))
-        .flatMap(lines -> {
-          final List<String> filteredLines = Arrays.asList(lines).stream()
-              .filter(line -> line.length() > 0)
-              .map(line -> line.split(",")[1])
-              .collect(Collectors.toList());
-
-          final int numRealSkippedRows = Math.min(filteredLines.size(), numSkipHeaderRows);
-          IntStream.range(0, numRealSkippedRows).forEach(i -> filteredLines.set(i, null));
-          return filteredLines.stream();
-        })
-        .collect(Collectors.toList());
-  }
-
-  @Test
-  public void testFirehose() throws Exception
-  {
-    final List<LineIterator> lineIterators = inputs.stream()
-        .map(s -> new LineIterator(new StringReader(s)))
-        .collect(Collectors.toList());
-
-    try (final FileIteratingFirehose firehose = new FileIteratingFirehose(lineIterators.iterator(), parser)) {
-      final List<String> results = Lists.newArrayList();
-
-      while (firehose.hasMore()) {
-        final InputRow inputRow = firehose.nextRow();
-        if (inputRow == null) {
-          results.add(null);
-        } else {
-          results.add(Joiner.on("|").join(inputRow.getDimension("x")));
-        }
-      }
-
-      Assert.assertEquals(expectedResults, results);
-    }
-  }
-
-  @Test(expected = RuntimeException.class)
-  public void testClose() throws IOException
-  {
-    final LineIterator lineIterator = new LineIterator(new Reader()
-    {
-      @Override
-      public int read(char[] cbuf, int off, int len) throws IOException
-      {
-        System.arraycopy(LINE_CHARS, 0, cbuf, 0, LINE_CHARS.length);
-        return LINE_CHARS.length;
-      }
-
-      @Override
-      public void close() throws IOException
-      {
-        throw new RuntimeException("close test for FileIteratingFirehose");
-      }
-    });
-
-    final TestCloseable closeable = new TestCloseable();
-    final FileIteratingFirehose firehose = new FileIteratingFirehose(
-        ImmutableList.of(lineIterator).iterator(),
-        parser,
-        closeable
-    );
-    firehose.hasMore(); // initialize lineIterator
-    firehose.close();
-    Assert.assertTrue(closeable.closed);
-  }
-
-  private static final class TestCloseable implements Closeable
-  {
-    private boolean closed;
-
-    @Override
-    public void close() throws IOException
-    {
-      closed = true;
-    }
-  }
-}
diff --git a/api/src/test/java/io/druid/data/input/impl/InputRowParserSerdeTest.java b/api/src/test/java/io/druid/data/input/impl/InputRowParserSerdeTest.java
deleted file mode 100644
index 1a5d59f2f61..00000000000
--- a/api/src/test/java/io/druid/data/input/impl/InputRowParserSerdeTest.java
+++ /dev/null
@@ -1,235 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Charsets;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Lists;
-import io.druid.TestObjectMapper;
-import io.druid.data.input.ByteBufferInputRowParser;
-import io.druid.data.input.InputRow;
-import io.druid.java.util.common.DateTimes;
-import io.druid.java.util.common.StringUtils;
-import io.druid.java.util.common.parsers.JSONPathFieldSpec;
-import io.druid.java.util.common.parsers.JSONPathFieldType;
-import io.druid.java.util.common.parsers.JSONPathSpec;
-import org.junit.Assert;
-import org.junit.Test;
-
-import java.nio.ByteBuffer;
-import java.nio.charset.Charset;
-import java.util.ArrayList;
-import java.util.List;
-
-public class InputRowParserSerdeTest
-{
-  private final ObjectMapper jsonMapper = new TestObjectMapper();
-
-  @Test
-  public void testStringInputRowParserSerde() throws Exception
-  {
-    final StringInputRowParser parser = new StringInputRowParser(
-        new JSONParseSpec(
-            new TimestampSpec("timestamp", "iso", null),
-            new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("foo", "bar")), null, null),
-            null,
-            null
-        ),
-        null
-    );
-    final ByteBufferInputRowParser parser2 = jsonMapper.readValue(
-        jsonMapper.writeValueAsBytes(parser),
-        ByteBufferInputRowParser.class
-    );
-    final InputRow parsed = parser2.parseBatch(
-        ByteBuffer.wrap(StringUtils.toUtf8("{\"foo\":\"x\",\"bar\":\"y\",\"qux\":\"z\",\"timestamp\":\"2000\"}"))
-    ).get(0);
-    Assert.assertEquals(ImmutableList.of("foo", "bar"), parsed.getDimensions());
-    Assert.assertEquals(ImmutableList.of("x"), parsed.getDimension("foo"));
-    Assert.assertEquals(ImmutableList.of("y"), parsed.getDimension("bar"));
-    Assert.assertEquals(DateTimes.of("2000").getMillis(), parsed.getTimestampFromEpoch());
-  }
-
-  @Test
-  public void testStringInputRowParserSerdeMultiCharset() throws Exception
-  {
-    Charset[] testCharsets = {
-        Charsets.US_ASCII, Charsets.ISO_8859_1, Charsets.UTF_8,
-        Charsets.UTF_16BE, Charsets.UTF_16LE, Charsets.UTF_16
-    };
-
-    for (Charset testCharset : testCharsets) {
-      InputRow parsed = testCharsetParseHelper(testCharset);
-      Assert.assertEquals(ImmutableList.of("foo", "bar"), parsed.getDimensions());
-      Assert.assertEquals(ImmutableList.of("x"), parsed.getDimension("foo"));
-      Assert.assertEquals(ImmutableList.of("y"), parsed.getDimension("bar"));
-      Assert.assertEquals(DateTimes.of("3000").getMillis(), parsed.getTimestampFromEpoch());
-    }
-  }
-
-  @Test
-  public void testMapInputRowParserSerde() throws Exception
-  {
-    final MapInputRowParser parser = new MapInputRowParser(
-        new JSONParseSpec(
-            new TimestampSpec("timeposix", "posix", null),
-            new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("foo", "bar")), ImmutableList.of("baz"), null),
-            null,
-            null
-        )
-    );
-    final MapInputRowParser parser2 = jsonMapper.readValue(
-        jsonMapper.writeValueAsBytes(parser),
-        MapInputRowParser.class
-    );
-    final InputRow parsed = parser2.parseBatch(
-        ImmutableMap.<String, Object>of(
-            "foo", "x",
-            "bar", "y",
-            "qux", "z",
-            "timeposix", "1"
-        )
-    ).get(0);
-    Assert.assertEquals(ImmutableList.of("foo", "bar"), parsed.getDimensions());
-    Assert.assertEquals(ImmutableList.of("x"), parsed.getDimension("foo"));
-    Assert.assertEquals(ImmutableList.of("y"), parsed.getDimension("bar"));
-    Assert.assertEquals(1000, parsed.getTimestampFromEpoch());
-  }
-
-  @Test
-  public void testMapInputRowParserNumbersSerde() throws Exception
-  {
-    final MapInputRowParser parser = new MapInputRowParser(
-        new JSONParseSpec(
-            new TimestampSpec("timemillis", "millis", null),
-            new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("foo", "values")), ImmutableList.of("toobig", "value"), null),
-            null,
-            null
-        )
-    );
-    final MapInputRowParser parser2 = jsonMapper.readValue(
-        jsonMapper.writeValueAsBytes(parser),
-        MapInputRowParser.class
-    );
-    final InputRow parsed = parser2.parseBatch(
-        ImmutableMap.<String, Object>of(
-            "timemillis", 1412705931123L,
-            "toobig", 123E64,
-            "value", 123.456,
-            "long", 123456789000L,
-            "values", Lists.newArrayList(1412705931123L, 123.456, 123E45, "hello")
-        )
-    ).get(0);
-    Assert.assertEquals(ImmutableList.of("foo", "values"), parsed.getDimensions());
-    Assert.assertEquals(ImmutableList.of(), parsed.getDimension("foo"));
-    Assert.assertEquals(
-        ImmutableList.of("1412705931123", "123.456", "1.23E47", "hello"),
-        parsed.getDimension("values")
-    );
-    Assert.assertEquals(Float.POSITIVE_INFINITY, parsed.getMetric("toobig").floatValue(), 0.0);
-    Assert.assertEquals(123E64, parsed.getRaw("toobig"));
-    Assert.assertEquals(123.456f, parsed.getMetric("value").floatValue(), 0.0f);
-    Assert.assertEquals(123456789000L, parsed.getRaw("long"));
-    Assert.assertEquals(1.23456791E11f, parsed.getMetric("long").floatValue(), 0.0f);
-    Assert.assertEquals(1412705931123L, parsed.getTimestampFromEpoch());
-  }
-
-  private InputRow testCharsetParseHelper(Charset charset) throws Exception
-  {
-    final StringInputRowParser parser = new StringInputRowParser(
-        new JSONParseSpec(
-            new TimestampSpec("timestamp", "iso", null),
-            new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("foo", "bar")), null, null),
-            null,
-            null
-        ),
-        charset.name()
-    );
-
-    final ByteBufferInputRowParser parser2 = jsonMapper.readValue(
-        jsonMapper.writeValueAsBytes(parser),
-        ByteBufferInputRowParser.class
-    );
-
-    final InputRow parsed = parser2.parseBatch(
-        ByteBuffer.wrap(
-            "{\"foo\":\"x\",\"bar\":\"y\",\"qux\":\"z\",\"timestamp\":\"3000\"}".getBytes(charset)
-        )
-    ).get(0);
-
-    return parsed;
-  }
-
-  @Test
-  public void testFlattenParse() throws Exception
-  {
-    List<JSONPathFieldSpec> fields = new ArrayList<>();
-    fields.add(JSONPathFieldSpec.createNestedField("foobar1", "$.foo.bar1"));
-    fields.add(JSONPathFieldSpec.createNestedField("foobar2", "$.foo.bar2"));
-    fields.add(JSONPathFieldSpec.createNestedField("baz0", "$.baz[0]"));
-    fields.add(JSONPathFieldSpec.createNestedField("baz1", "$.baz[1]"));
-    fields.add(JSONPathFieldSpec.createNestedField("baz2", "$.baz[2]"));
-    fields.add(JSONPathFieldSpec.createNestedField("hey0barx", "$.hey[0].barx"));
-    fields.add(JSONPathFieldSpec.createNestedField("metA", "$.met.a"));
-    fields.add(JSONPathFieldSpec.createRootField("timestamp"));
-    fields.add(JSONPathFieldSpec.createRootField("foo.bar1"));
-
-    JSONPathSpec flattenSpec = new JSONPathSpec(true, fields);
-    final StringInputRowParser parser = new StringInputRowParser(
-        new JSONParseSpec(
-            new TimestampSpec("timestamp", "iso", null),
-            new DimensionsSpec(null, null, null),
-            flattenSpec,
-            null
-        ),
-        null
-    );
-
-    final StringInputRowParser parser2 = jsonMapper.readValue(
-        jsonMapper.writeValueAsBytes(parser),
-        StringInputRowParser.class
-    );
-
-    final InputRow parsed = parser2.parse(
-        "{\"blah\":[4,5,6], \"newmet\":5, \"foo\":{\"bar1\":\"aaa\", \"bar2\":\"bbb\"}, \"baz\":[1,2,3], \"timestamp\":\"2999\", \"foo.bar1\":\"Hello world!\", \"hey\":[{\"barx\":\"asdf\"}], \"met\":{\"a\":456}}"
-    );
-    Assert.assertEquals(ImmutableList.of("foobar1", "foobar2", "baz0", "baz1", "baz2", "hey0barx", "metA", "timestamp", "foo.bar1", "blah", "newmet", "baz"), parsed.getDimensions());
-    Assert.assertEquals(ImmutableList.of("aaa"), parsed.getDimension("foobar1"));
-    Assert.assertEquals(ImmutableList.of("bbb"), parsed.getDimension("foobar2"));
-    Assert.assertEquals(ImmutableList.of("1"), parsed.getDimension("baz0"));
-    Assert.assertEquals(ImmutableList.of("2"), parsed.getDimension("baz1"));
-    Assert.assertEquals(ImmutableList.of("3"), parsed.getDimension("baz2"));
-    Assert.assertEquals(ImmutableList.of("Hello world!"), parsed.getDimension("foo.bar1"));
-    Assert.assertEquals(ImmutableList.of("asdf"), parsed.getDimension("hey0barx"));
-    Assert.assertEquals(ImmutableList.of("456"), parsed.getDimension("metA"));
-    Assert.assertEquals(ImmutableList.of("5"), parsed.getDimension("newmet"));
-    Assert.assertEquals(DateTimes.of("2999").getMillis(), parsed.getTimestampFromEpoch());
-
-    String testSpec = "{\"enabled\": true,\"useFieldDiscovery\": true, \"fields\": [\"parseThisRootField\"]}";
-    final JSONPathSpec parsedSpec = jsonMapper.readValue(testSpec, JSONPathSpec.class);
-    List<JSONPathFieldSpec> fieldSpecs = parsedSpec.getFields();
-    Assert.assertEquals(JSONPathFieldType.ROOT, fieldSpecs.get(0).getType());
-    Assert.assertEquals("parseThisRootField", fieldSpecs.get(0).getName());
-    Assert.assertEquals("parseThisRootField", fieldSpecs.get(0).getExpr());
-  }
-
-}
diff --git a/api/src/test/java/io/druid/data/input/impl/JSONLowercaseParseSpecTest.java b/api/src/test/java/io/druid/data/input/impl/JSONLowercaseParseSpecTest.java
deleted file mode 100644
index 9d6b6180b40..00000000000
--- a/api/src/test/java/io/druid/data/input/impl/JSONLowercaseParseSpecTest.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.google.common.collect.Lists;
-import io.druid.java.util.common.parsers.Parser;
-import junit.framework.Assert;
-import org.junit.Test;
-
-import java.util.Arrays;
-import java.util.Map;
-
-public class JSONLowercaseParseSpecTest
-{
-  @Test
-  public void testLowercasing() throws Exception
-  {
-    JSONLowercaseParseSpec spec = new JSONLowercaseParseSpec(
-        new TimestampSpec(
-            "timestamp",
-            "auto",
-            null
-        ),
-        new DimensionsSpec(
-            DimensionsSpec.getDefaultSchemas(Arrays.asList("A", "B")),
-            Lists.<String>newArrayList(),
-            Lists.<SpatialDimensionSchema>newArrayList()
-        )
-    );
-    Parser parser = spec.makeParser();
-    Map<String, Object> event = parser.parseToMap("{\"timestamp\":\"2015-01-01\",\"A\":\"foo\"}");
-    Assert.assertEquals("foo", event.get("a"));
-  }
-}
diff --git a/api/src/test/java/io/druid/data/input/impl/JSONParseSpecTest.java b/api/src/test/java/io/druid/data/input/impl/JSONParseSpecTest.java
deleted file mode 100644
index de2814eda9a..00000000000
--- a/api/src/test/java/io/druid/data/input/impl/JSONParseSpecTest.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.collect.ImmutableList;
-import io.druid.TestObjectMapper;
-import io.druid.java.util.common.parsers.JSONPathFieldSpec;
-import io.druid.java.util.common.parsers.JSONPathFieldType;
-import io.druid.java.util.common.parsers.JSONPathSpec;
-import io.druid.java.util.common.parsers.Parser;
-import org.junit.Assert;
-import org.junit.Test;
-
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Map;
-
-public class JSONParseSpecTest
-{
-  private final ObjectMapper jsonMapper = new TestObjectMapper();
-
-  @Test
-  public void testParseRow()
-  {
-    final JSONParseSpec parseSpec = new JSONParseSpec(
-        new TimestampSpec("timestamp", "iso", null),
-        new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("bar", "foo")), null, null),
-        new JSONPathSpec(
-            true,
-            ImmutableList.of(
-                new JSONPathFieldSpec(JSONPathFieldType.ROOT, "root_baz", "baz"),
-                new JSONPathFieldSpec(JSONPathFieldType.ROOT, "root_baz2", "baz2"),
-                new JSONPathFieldSpec(JSONPathFieldType.PATH, "path_omg", "$.o.mg"),
-                new JSONPathFieldSpec(JSONPathFieldType.PATH, "path_omg2", "$.o.mg2"),
-                new JSONPathFieldSpec(JSONPathFieldType.JQ, "jq_omg", ".o.mg"),
-                new JSONPathFieldSpec(JSONPathFieldType.JQ, "jq_omg2", ".o.mg2")
-            )
-        ),
-        null
-    );
-
-    final Map<String, Object> expected = new HashMap<>();
-    expected.put("foo", "x");
-    expected.put("baz", 4L);
-    expected.put("root_baz", 4L);
-    expected.put("root_baz2", null);
-    expected.put("path_omg", 1L);
-    expected.put("path_omg2", null);
-    expected.put("jq_omg", 1L);
-    expected.put("jq_omg2", null);
-
-    final Parser<String, Object> parser = parseSpec.makeParser();
-    final Map<String, Object> parsedRow = parser.parseToMap("{\"bar\":null,\"foo\":\"x\",\"baz\":4,\"o\":{\"mg\":1}}");
-    Assert.assertNotNull(parsedRow);
-    Assert.assertEquals(expected, parsedRow);
-    Assert.assertNull(parsedRow.get("bar"));
-    Assert.assertNull(parsedRow.get("buzz"));
-    Assert.assertNull(parsedRow.get("root_baz2"));
-    Assert.assertNull(parsedRow.get("jq_omg2"));
-    Assert.assertNull(parsedRow.get("path_omg2"));
-  }
-
-  @Test
-  public void testSerde() throws IOException
-  {
-    HashMap<String, Boolean> feature = new HashMap<String, Boolean>();
-    feature.put("ALLOW_UNQUOTED_CONTROL_CHARS", true);
-    JSONParseSpec spec = new JSONParseSpec(
-        new TimestampSpec("timestamp", "iso", null),
-        new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("bar", "foo")), null, null),
-        null,
-        feature
-    );
-
-    final JSONParseSpec serde = jsonMapper.readValue(
-        jsonMapper.writeValueAsString(spec),
-        JSONParseSpec.class
-    );
-    Assert.assertEquals("timestamp", serde.getTimestampSpec().getTimestampColumn());
-    Assert.assertEquals("iso", serde.getTimestampSpec().getTimestampFormat());
-
-    Assert.assertEquals(Arrays.asList("bar", "foo"), serde.getDimensionsSpec().getDimensionNames());
-    Assert.assertEquals(feature, serde.getFeatureSpec());
-  }
-}
diff --git a/api/src/test/java/io/druid/data/input/impl/JSONPathSpecTest.java b/api/src/test/java/io/druid/data/input/impl/JSONPathSpecTest.java
deleted file mode 100644
index 3ef2714ef2a..00000000000
--- a/api/src/test/java/io/druid/data/input/impl/JSONPathSpecTest.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import io.druid.TestObjectMapper;
-import io.druid.java.util.common.parsers.JSONPathFieldSpec;
-import io.druid.java.util.common.parsers.JSONPathFieldType;
-import io.druid.java.util.common.parsers.JSONPathSpec;
-import org.junit.Assert;
-import org.junit.Test;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-public class JSONPathSpecTest
-{
-  private final ObjectMapper jsonMapper = new TestObjectMapper();
-
-  @Test
-  public void testSerde() throws IOException
-  {
-    List<JSONPathFieldSpec> fields = new ArrayList<>();
-    fields.add(JSONPathFieldSpec.createNestedField("foobar1", "$.foo.bar1"));
-    fields.add(JSONPathFieldSpec.createNestedField("baz0", "$.baz[0]"));
-    fields.add(JSONPathFieldSpec.createNestedField("hey0barx", "$.hey[0].barx"));
-    fields.add(JSONPathFieldSpec.createRootField("timestamp"));
-    fields.add(JSONPathFieldSpec.createRootField("foo.bar1"));
-    fields.add(JSONPathFieldSpec.createJqField("foobar1", ".foo.bar1"));
-    fields.add(JSONPathFieldSpec.createJqField("baz0", ".baz[0]"));
-    fields.add(JSONPathFieldSpec.createJqField("hey0barx", ".hey[0].barx"));
-
-    JSONPathSpec flattenSpec = new JSONPathSpec(true, fields);
-
-    final JSONPathSpec serde = jsonMapper.readValue(
-        jsonMapper.writeValueAsString(flattenSpec),
-        JSONPathSpec.class
-    );
-    Assert.assertTrue(serde.isUseFieldDiscovery());
-    List<JSONPathFieldSpec> serdeFields = serde.getFields();
-    JSONPathFieldSpec foobar1 = serdeFields.get(0);
-    JSONPathFieldSpec baz0 = serdeFields.get(1);
-    JSONPathFieldSpec hey0barx = serdeFields.get(2);
-    JSONPathFieldSpec timestamp = serdeFields.get(3);
-    JSONPathFieldSpec foodotbar1 = serdeFields.get(4);
-    JSONPathFieldSpec jqFoobar1 = serdeFields.get(5);
-    JSONPathFieldSpec jqBaz0 = serdeFields.get(6);
-    JSONPathFieldSpec jqHey0barx = serdeFields.get(7);
-
-    Assert.assertEquals(JSONPathFieldType.PATH, foobar1.getType());
-    Assert.assertEquals("foobar1", foobar1.getName());
-    Assert.assertEquals("$.foo.bar1", foobar1.getExpr());
-
-    Assert.assertEquals(JSONPathFieldType.PATH, baz0.getType());
-    Assert.assertEquals("baz0", baz0.getName());
-    Assert.assertEquals("$.baz[0]", baz0.getExpr());
-
-    Assert.assertEquals(JSONPathFieldType.PATH, hey0barx.getType());
-    Assert.assertEquals("hey0barx", hey0barx.getName());
-    Assert.assertEquals("$.hey[0].barx", hey0barx.getExpr());
-
-    Assert.assertEquals(JSONPathFieldType.JQ, jqFoobar1.getType());
-    Assert.assertEquals("foobar1", jqFoobar1.getName());
-    Assert.assertEquals(".foo.bar1", jqFoobar1.getExpr());
-
-    Assert.assertEquals(JSONPathFieldType.JQ, jqBaz0.getType());
-    Assert.assertEquals("baz0", jqBaz0.getName());
-    Assert.assertEquals(".baz[0]", jqBaz0.getExpr());
-
-    Assert.assertEquals(JSONPathFieldType.JQ, jqHey0barx.getType());
-    Assert.assertEquals("hey0barx", jqHey0barx.getName());
-    Assert.assertEquals(".hey[0].barx", jqHey0barx.getExpr());
-
-    Assert.assertEquals(JSONPathFieldType.ROOT, timestamp.getType());
-    Assert.assertEquals("timestamp", timestamp.getName());
-    Assert.assertEquals("timestamp", timestamp.getExpr());
-
-    Assert.assertEquals(JSONPathFieldType.ROOT, foodotbar1.getType());
-    Assert.assertEquals("foo.bar1", foodotbar1.getName());
-    Assert.assertEquals("foo.bar1", foodotbar1.getExpr());
-  }
-}
diff --git a/api/src/test/java/io/druid/data/input/impl/JavaScriptParseSpecTest.java b/api/src/test/java/io/druid/data/input/impl/JavaScriptParseSpecTest.java
deleted file mode 100644
index 805e019b1b0..00000000000
--- a/api/src/test/java/io/druid/data/input/impl/JavaScriptParseSpecTest.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.databind.InjectableValues;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.collect.ImmutableMap;
-import io.druid.TestObjectMapper;
-import io.druid.java.util.common.parsers.Parser;
-import io.druid.js.JavaScriptConfig;
-import org.junit.Assert;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
-
-import java.io.IOException;
-import java.util.Collections;
-import java.util.Map;
-
-/**
- */
-public class JavaScriptParseSpecTest
-{
-  private final ObjectMapper jsonMapper = new TestObjectMapper();
-
-  @Rule
-  public ExpectedException expectedException = ExpectedException.none();
-
-  @Test
-  public void testSerde() throws IOException
-  {
-    jsonMapper.setInjectableValues(
-        new InjectableValues.Std().addValue(
-            JavaScriptConfig.class,
-            JavaScriptConfig.getEnabledInstance()
-        )
-    );
-    JavaScriptParseSpec spec = new JavaScriptParseSpec(
-        new TimestampSpec("abc", "iso", null),
-        new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Collections.singletonList("abc")), null, null),
-        "abc",
-        JavaScriptConfig.getEnabledInstance()
-    );
-    final JavaScriptParseSpec serde = jsonMapper.readValue(
-        jsonMapper.writeValueAsString(spec),
-        JavaScriptParseSpec.class
-    );
-    Assert.assertEquals("abc", serde.getTimestampSpec().getTimestampColumn());
-    Assert.assertEquals("iso", serde.getTimestampSpec().getTimestampFormat());
-
-    Assert.assertEquals("abc", serde.getFunction());
-    Assert.assertEquals(Collections.singletonList("abc"), serde.getDimensionsSpec().getDimensionNames());
-  }
-
-  @Test
-  public void testMakeParser()
-  {
-    final JavaScriptConfig config = JavaScriptConfig.getEnabledInstance();
-    JavaScriptParseSpec spec = new JavaScriptParseSpec(
-        new TimestampSpec("abc", "iso", null),
-        new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Collections.singletonList("abc")), null, null),
-        "function(str) { var parts = str.split(\"-\"); return { one: parts[0], two: parts[1] } }",
-        config
-    );
-
-    final Parser<String, Object> parser = spec.makeParser();
-    final Map<String, Object> obj = parser.parseToMap("x-y");
-    Assert.assertEquals(ImmutableMap.of("one", "x", "two", "y"), obj);
-  }
-
-  @Test
-  public void testMakeParserNotAllowed()
-  {
-    final JavaScriptConfig config = new JavaScriptConfig(false);
-    JavaScriptParseSpec spec = new JavaScriptParseSpec(
-        new TimestampSpec("abc", "iso", null),
-        new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Collections.singletonList("abc")), null, null),
-        "abc",
-        config
-    );
-
-    expectedException.expect(IllegalStateException.class);
-    expectedException.expectMessage("JavaScript is disabled");
-    spec.makeParser();
-  }
-}
diff --git a/api/src/test/java/io/druid/data/input/impl/NoopInputRowParserTest.java b/api/src/test/java/io/druid/data/input/impl/NoopInputRowParserTest.java
deleted file mode 100644
index 7e3be694741..00000000000
--- a/api/src/test/java/io/druid/data/input/impl/NoopInputRowParserTest.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.collect.ImmutableList;
-import org.junit.Assert;
-import org.junit.Test;
-
-/**
- */
-public class NoopInputRowParserTest
-{
-  private final ObjectMapper mapper = new ObjectMapper();
-
-  @Test
-  public void testSerdeWithNullParseSpec() throws Exception
-  {
-    String jsonStr = "{ \"type\":\"noop\" }";
-
-    InputRowParser actual = mapper.readValue(
-        mapper.writeValueAsString(
-            mapper.readValue(jsonStr, InputRowParser.class)
-        ),
-        InputRowParser.class
-    );
-
-    Assert.assertEquals(new NoopInputRowParser(null), actual);
-  }
-
-  @Test
-  public void testSerdeWithNonNullParseSpec() throws Exception
-  {
-    String jsonStr = "{"
-                     + "\"type\":\"noop\","
-                     + "\"parseSpec\":{ \"format\":\"timeAndDims\", \"dimensionsSpec\": { \"dimensions\": [\"host\"] } }"
-                     + "}";
-
-    InputRowParser actual = mapper.readValue(
-        mapper.writeValueAsString(
-            mapper.readValue(jsonStr, InputRowParser.class)
-        ),
-        InputRowParser.class
-    );
-
-    Assert.assertEquals(
-        new NoopInputRowParser(
-            new TimeAndDimsParseSpec(
-                null,
-                new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("host")), null, null)
-            )
-        ),
-        actual
-    );
-  }
-}
diff --git a/api/src/test/java/io/druid/data/input/impl/ParseSpecTest.java b/api/src/test/java/io/druid/data/input/impl/ParseSpecTest.java
deleted file mode 100644
index ffb07dc80e9..00000000000
--- a/api/src/test/java/io/druid/data/input/impl/ParseSpecTest.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.google.common.collect.Lists;
-import io.druid.java.util.common.parsers.ParseException;
-import org.junit.Test;
-
-import java.util.Arrays;
-import java.util.Collections;
-
-public class ParseSpecTest
-{
-  @Test(expected = ParseException.class)
-  public void testDuplicateNames() throws Exception
-  {
-    @SuppressWarnings("unused") // expected exception
-    final ParseSpec spec = new DelimitedParseSpec(
-        new TimestampSpec(
-            "timestamp",
-            "auto",
-            null
-        ),
-        new DimensionsSpec(
-            DimensionsSpec.getDefaultSchemas(Arrays.asList("a", "b", "a")),
-            Lists.<String>newArrayList(),
-            Lists.<SpatialDimensionSchema>newArrayList()
-        ),
-        ",",
-        " ",
-        Arrays.asList("a", "b"),
-        false,
-        0
-    );
-  }
-
-  @Test(expected = IllegalArgumentException.class)
-  public void testDimAndDimExcluOverlap() throws Exception
-  {
-    @SuppressWarnings("unused") // expected exception
-    final ParseSpec spec = new DelimitedParseSpec(
-        new TimestampSpec(
-            "timestamp",
-            "auto",
-            null
-        ),
-        new DimensionsSpec(
-            DimensionsSpec.getDefaultSchemas(Arrays.asList("a", "B")),
-            Lists.newArrayList("B"),
-            Lists.<SpatialDimensionSchema>newArrayList()
-        ),
-        ",",
-        null,
-        Arrays.asList("a", "B"),
-        false,
-        0
-    );
-  }
-
-  @Test
-  public void testDimExclusionDuplicate() throws Exception
-  {
-    @SuppressWarnings("unused") // expected exception
-    final ParseSpec spec = new DelimitedParseSpec(
-        new TimestampSpec(
-            "timestamp",
-            "auto",
-            null
-        ),
-        new DimensionsSpec(
-            DimensionsSpec.getDefaultSchemas(Collections.singletonList("a")),
-            Lists.newArrayList("B", "B"),
-            Lists.<SpatialDimensionSchema>newArrayList()
-        ),
-        ",",
-        null,
-        Arrays.asList("a", "B"),
-        false,
-        0
-    );
-  }
-}
diff --git a/api/src/test/java/io/druid/data/input/impl/RegexParseSpecTest.java b/api/src/test/java/io/druid/data/input/impl/RegexParseSpecTest.java
deleted file mode 100644
index 68930ea6269..00000000000
--- a/api/src/test/java/io/druid/data/input/impl/RegexParseSpecTest.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import io.druid.TestObjectMapper;
-import org.junit.Assert;
-import org.junit.Test;
-
-import java.io.IOException;
-import java.util.Collections;
-
-/**
- */
-public class RegexParseSpecTest
-{
-  private final ObjectMapper jsonMapper = new TestObjectMapper();
-
-  @Test
-  public void testSerde() throws IOException
-  {
-    RegexParseSpec spec = new RegexParseSpec(
-        new TimestampSpec("abc", "iso", null),
-        new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Collections.singletonList("abc")), null, null),
-        "\u0001",
-        Collections.singletonList("abc"),
-        "abc"
-    );
-    final RegexParseSpec serde = jsonMapper.readValue(
-        jsonMapper.writeValueAsString(spec),
-        RegexParseSpec.class
-    );
-    Assert.assertEquals("abc", serde.getTimestampSpec().getTimestampColumn());
-    Assert.assertEquals("iso", serde.getTimestampSpec().getTimestampFormat());
-
-    Assert.assertEquals("abc", serde.getPattern());
-    Assert.assertEquals("\u0001", serde.getListDelimiter());
-    Assert.assertEquals(Collections.singletonList("abc"), serde.getDimensionsSpec().getDimensionNames());
-  }
-}
diff --git a/api/src/test/java/io/druid/data/input/impl/StringInputRowParserTest.java b/api/src/test/java/io/druid/data/input/impl/StringInputRowParserTest.java
deleted file mode 100644
index d532ba5fd40..00000000000
--- a/api/src/test/java/io/druid/data/input/impl/StringInputRowParserTest.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.google.common.collect.ImmutableList;
-import io.druid.js.JavaScriptConfig;
-import org.hamcrest.CoreMatchers;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
-
-public class StringInputRowParserTest
-{
-  @Rule
-  public ExpectedException expectedException = ExpectedException.none();
-
-  @Test
-  public void testDisableJavaScript()
-  {
-    final JavaScriptParseSpec parseSpec = new JavaScriptParseSpec(
-        new TimestampSpec("timestamp", "auto", null),
-        new DimensionsSpec(
-            DimensionsSpec.getDefaultSchemas(
-                ImmutableList.of(
-                    "dim1",
-                    "dim2"
-                )
-            ),
-            null,
-            null
-        ),
-        "func",
-        new JavaScriptConfig(false)
-    );
-    final StringInputRowParser parser = new StringInputRowParser(parseSpec, "UTF-8");
-
-    expectedException.expect(CoreMatchers.instanceOf(IllegalStateException.class));
-    expectedException.expectMessage("JavaScript is disabled");
-
-    parser.startFileFromBeginning();
-  }
-
-  @Test
-  public void testDisableJavaScript2()
-  {
-    final JavaScriptParseSpec parseSpec = new JavaScriptParseSpec(
-        new TimestampSpec("timestamp", "auto", null),
-        new DimensionsSpec(
-            DimensionsSpec.getDefaultSchemas(
-                ImmutableList.of(
-                    "dim1",
-                    "dim2"
-                )
-            ),
-            null,
-            null
-        ),
-        "func",
-        new JavaScriptConfig(false)
-    );
-    final StringInputRowParser parser = new StringInputRowParser(parseSpec, "UTF-8");
-
-    expectedException.expect(CoreMatchers.instanceOf(IllegalStateException.class));
-    expectedException.expectMessage("JavaScript is disabled");
-
-    parser.parse("");
-  }
-}
diff --git a/api/src/test/java/io/druid/data/input/impl/TimeAndDimsParseSpecTest.java b/api/src/test/java/io/druid/data/input/impl/TimeAndDimsParseSpecTest.java
deleted file mode 100644
index 8f36bacf490..00000000000
--- a/api/src/test/java/io/druid/data/input/impl/TimeAndDimsParseSpecTest.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.collect.ImmutableList;
-import junit.framework.Assert;
-import org.junit.Test;
-
-/**
- */
-public class TimeAndDimsParseSpecTest
-{
-  private final ObjectMapper mapper = new ObjectMapper();
-
-  @Test
-  public void testSerdeWithNulls() throws Exception
-  {
-    String jsonStr = "{ \"format\":\"timeAndDims\" }";
-
-    ParseSpec actual = mapper.readValue(
-        mapper.writeValueAsString(
-            mapper.readValue(jsonStr, ParseSpec.class)
-        ),
-        ParseSpec.class
-    );
-
-    Assert.assertEquals(new TimeAndDimsParseSpec(null, null), actual);
-  }
-
-  @Test
-  public void testSerdeWithNonNulls() throws Exception
-  {
-    String jsonStr = "{"
-                     + "\"format\":\"timeAndDims\","
-                     + "\"timestampSpec\": { \"column\": \"tcol\" },"
-                     + "\"dimensionsSpec\": { \"dimensions\": [\"host\"] }"
-                     + "}";
-
-    ParseSpec actual = mapper.readValue(
-        mapper.writeValueAsString(
-            mapper.readValue(jsonStr, ParseSpec.class)
-        ),
-        ParseSpec.class
-    );
-
-    Assert.assertEquals(
-        new TimeAndDimsParseSpec(
-            new TimestampSpec("tcol", null, null),
-            new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("host")), null, null)
-        ),
-        actual
-    );
-  }
-}
diff --git a/api/src/test/java/io/druid/data/input/impl/TimestampSpecTest.java b/api/src/test/java/io/druid/data/input/impl/TimestampSpecTest.java
deleted file mode 100644
index 9aca31c4516..00000000000
--- a/api/src/test/java/io/druid/data/input/impl/TimestampSpecTest.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl;
-
-import com.google.common.collect.ImmutableMap;
-import io.druid.java.util.common.DateTimes;
-import org.joda.time.DateTime;
-import org.joda.time.format.ISODateTimeFormat;
-import org.junit.Assert;
-import org.junit.Test;
-
-public class TimestampSpecTest
-{
-  @Test
-  public void testExtractTimestamp() throws Exception
-  {
-    TimestampSpec spec = new TimestampSpec("TIMEstamp", "yyyy-MM-dd", null);
-    Assert.assertEquals(
-        DateTimes.of("2014-03-01"),
-        spec.extractTimestamp(ImmutableMap.<String, Object>of("TIMEstamp", "2014-03-01"))
-    );
-  }
-
-  @Test
-  public void testExtractTimestampWithMissingTimestampColumn() throws Exception
-  {
-    TimestampSpec spec = new TimestampSpec(null, null, DateTimes.EPOCH);
-    Assert.assertEquals(
-        DateTimes.of("1970-01-01"),
-        spec.extractTimestamp(ImmutableMap.<String, Object>of("dim", "foo"))
-    );
-  }
-
-  @Test
-  public void testContextualTimestampList() throws Exception
-  {
-    String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss";
-    String[] dates = new String[]{
-        "2000-01-01T05:00:00",
-        "2000-01-01T05:00:01",
-        "2000-01-01T05:00:01",
-        "2000-01-01T05:00:02",
-        "2000-01-01T05:00:03",
-        };
-    TimestampSpec spec = new TimestampSpec("TIMEstamp", DATE_FORMAT, null);
-
-    DateTimes.UtcFormatter formatter = DateTimes.wrapFormatter(ISODateTimeFormat.dateHourMinuteSecond());
-
-    for (String date : dates) {
-      DateTime dateTime = spec.extractTimestamp(ImmutableMap.<String, Object>of("TIMEstamp", date));
-      DateTime expectedDateTime = formatter.parse(date);
-      Assert.assertEquals(expectedDateTime, dateTime);
-    }
-  }
-}
diff --git a/api/src/test/java/io/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactoryTest.java b/api/src/test/java/io/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactoryTest.java
deleted file mode 100644
index ddec6a27c4e..00000000000
--- a/api/src/test/java/io/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactoryTest.java
+++ /dev/null
@@ -1,498 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.data.input.impl.prefetch;
-
-import com.google.common.base.Charsets;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import com.google.common.io.CountingOutputStream;
-import io.druid.data.input.Firehose;
-import io.druid.data.input.Row;
-import io.druid.data.input.impl.CSVParseSpec;
-import io.druid.data.input.impl.DimensionsSpec;
-import io.druid.data.input.impl.StringInputRowParser;
-import io.druid.data.input.impl.TimestampSpec;
-import io.druid.java.util.common.DateTimes;
-import io.druid.java.util.common.StringUtils;
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.filefilter.TrueFileFilter;
-import org.hamcrest.CoreMatchers;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
-
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStreamWriter;
-import java.io.Writer;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.List;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.TimeoutException;
-
-public class PrefetchableTextFilesFirehoseFactoryTest
-{
-  private static final List<File> FIREHOSE_TMP_DIRS = new ArrayList<>();
-  private static File TEST_DIR;
-  private static long FILE_SIZE = -1;
-
-  private static final StringInputRowParser parser = new StringInputRowParser(
-      new CSVParseSpec(
-          new TimestampSpec(
-              "timestamp",
-              "auto",
-              null
-          ),
-          new DimensionsSpec(
-              DimensionsSpec.getDefaultSchemas(Arrays.asList("timestamp", "a", "b")),
-              Lists.newArrayList(),
-              Lists.newArrayList()
-          ),
-          ",",
-          Arrays.asList("timestamp", "a", "b"),
-          false,
-          0
-      ),
-      Charsets.UTF_8.name()
-  );
-
-  @Rule
-  public ExpectedException expectedException = ExpectedException.none();
-
-  @BeforeClass
-  public static void setup() throws IOException
-  {
-    TEST_DIR = File.createTempFile(PrefetchableTextFilesFirehoseFactoryTest.class.getSimpleName(), "testDir");
-    FileUtils.forceDelete(TEST_DIR);
-    FileUtils.forceMkdir(TEST_DIR);
-
-    for (int i = 0; i < 100; i++) {
-      try (
-          CountingOutputStream cos = new CountingOutputStream(
-              Files.newOutputStream(new File(TEST_DIR, "test_" + i).toPath())
-          );
-          Writer writer = new BufferedWriter(new OutputStreamWriter(cos, StandardCharsets.UTF_8))
-      ) {
-        for (int j = 0; j < 100; j++) {
-          final String a = StringUtils.format("%d,%03d,%03d\n", (20171220 + i), i, j);
-          writer.write(a);
-        }
-        writer.flush();
-        // Every file size must be same
-        if (FILE_SIZE == -1) {
-          FILE_SIZE = cos.getCount();
-        } else {
-          Assert.assertEquals(FILE_SIZE, cos.getCount());
-        }
-      }
-    }
-  }
-
-  @AfterClass
-  public static void teardown() throws IOException
-  {
-    FileUtils.forceDelete(TEST_DIR);
-    for (File dir : FIREHOSE_TMP_DIRS) {
-      FileUtils.forceDelete(dir);
-    }
-  }
-
-  private static void assertResult(List<Row> rows)
-  {
-    Assert.assertEquals(10000, rows.size());
-    rows.sort((r1, r2) -> {
-      int c = r1.getTimestamp().compareTo(r2.getTimestamp());
-      if (c != 0) {
-        return c;
-      }
-      c = Integer.valueOf(r1.getDimension("a").get(0)).compareTo(Integer.valueOf(r2.getDimension("a").get(0)));
-      if (c != 0) {
-        return c;
-      }
-
-      return Integer.valueOf(r1.getDimension("b").get(0)).compareTo(Integer.valueOf(r2.getDimension("b").get(0)));
-    });
-
-    for (int i = 0; i < 100; i++) {
-      for (int j = 0; j < 100; j++) {
-        final Row row = rows.get(i * 100 + j);
-        Assert.assertEquals(DateTimes.utc(20171220 + i), row.getTimestamp());
-        Assert.assertEquals(i, Integer.valueOf(row.getDimension("a").get(0)).intValue());
-        Assert.assertEquals(j, Integer.valueOf(row.getDimension("b").get(0)).intValue());
-      }
-    }
-  }
-
-  private static void assertNumRemainingCacheFiles(File firehoseTmpDir, int expectedNumFiles)
-  {
-    final String[] files = firehoseTmpDir.list();
-    Assert.assertNotNull(files);
-    Assert.assertEquals(expectedNumFiles, files.length);
-  }
-
-  private static File createFirehoseTmpDir(String dirSuffix) throws IOException
-  {
-    final File firehoseTempDir = File.createTempFile(
-        PrefetchableTextFilesFirehoseFactoryTest.class.getSimpleName(),
-        dirSuffix
-    );
-    FileUtils.forceDelete(firehoseTempDir);
-    FileUtils.forceMkdir(firehoseTempDir);
-    FIREHOSE_TMP_DIRS.add(firehoseTempDir);
-    return firehoseTempDir;
-  }
-
-  @Test
-  public void testWithoutCacheAndFetch() throws IOException
-  {
-    final TestPrefetchableTextFilesFirehoseFactory factory =
-        TestPrefetchableTextFilesFirehoseFactory.with(TEST_DIR, 0, 0);
-
-    final List<Row> rows = new ArrayList<>();
-    final File firehoseTmpDir = createFirehoseTmpDir("testWithoutCacheAndFetch");
-    try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
-      while (firehose.hasMore()) {
-        rows.add(firehose.nextRow());
-      }
-    }
-
-    Assert.assertEquals(0, factory.getCacheManager().getTotalCachedBytes());
-    assertResult(rows);
-    assertNumRemainingCacheFiles(firehoseTmpDir, 0);
-  }
-
-  @Test
-  public void testWithoutCache() throws IOException
-  {
-    final TestPrefetchableTextFilesFirehoseFactory factory =
-        TestPrefetchableTextFilesFirehoseFactory.with(TEST_DIR, 0, 2048);
-
-    final List<Row> rows = new ArrayList<>();
-    final File firehoseTmpDir = createFirehoseTmpDir("testWithoutCache");
-    try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
-      while (firehose.hasMore()) {
-        rows.add(firehose.nextRow());
-      }
-    }
-
-    Assert.assertEquals(0, factory.getCacheManager().getTotalCachedBytes());
-    assertResult(rows);
-    assertNumRemainingCacheFiles(firehoseTmpDir, 0);
-  }
-
-  @Test
-  public void testWithZeroFetchCapacity() throws IOException
-  {
-    final TestPrefetchableTextFilesFirehoseFactory factory =
-        TestPrefetchableTextFilesFirehoseFactory.with(TEST_DIR, 2048, 0);
-
-    final List<Row> rows = new ArrayList<>();
-    final File firehoseTmpDir = createFirehoseTmpDir("testWithZeroFetchCapacity");
-    try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
-      while (firehose.hasMore()) {
-        rows.add(firehose.nextRow());
-      }
-    }
-
-    assertResult(rows);
-    assertNumRemainingCacheFiles(firehoseTmpDir, 2);
-  }
-
-  @Test
-  public void testWithCacheAndFetch() throws IOException
-  {
-    final TestPrefetchableTextFilesFirehoseFactory factory =
-        TestPrefetchableTextFilesFirehoseFactory.of(TEST_DIR);
-
-    final List<Row> rows = new ArrayList<>();
-    final File firehoseTmpDir = createFirehoseTmpDir("testWithCacheAndFetch");
-    try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
-      while (firehose.hasMore()) {
-        rows.add(firehose.nextRow());
-      }
-    }
-
-    assertResult(rows);
-    assertNumRemainingCacheFiles(firehoseTmpDir, 2);
-  }
-
-  @Test
-  public void testWithLargeCacheAndSmallFetch() throws IOException
-  {
-    final TestPrefetchableTextFilesFirehoseFactory factory =
-        TestPrefetchableTextFilesFirehoseFactory.with(TEST_DIR, 2048, 1024);
-
-    final List<Row> rows = new ArrayList<>();
-    final File firehoseTmpDir = createFirehoseTmpDir("testWithLargeCacheAndSmallFetch");
-    try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
-      while (firehose.hasMore()) {
-        rows.add(firehose.nextRow());
-      }
-    }
-
-    assertResult(rows);
-    assertNumRemainingCacheFiles(firehoseTmpDir, 2);
-  }
-
-  @Test
-  public void testWithSmallCacheAndLargeFetch() throws IOException
-  {
-    final TestPrefetchableTextFilesFirehoseFactory factory =
-        TestPrefetchableTextFilesFirehoseFactory.with(TEST_DIR, 1024, 2048);
-
-    final List<Row> rows = new ArrayList<>();
-    final File firehoseTmpDir = createFirehoseTmpDir("testWithSmallCacheAndLargeFetch");
-    try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
-      while (firehose.hasMore()) {
-        rows.add(firehose.nextRow());
-      }
-    }
-
-    assertResult(rows);
-    assertNumRemainingCacheFiles(firehoseTmpDir, 1);
-  }
-
-  @Test
-  public void testRetry() throws IOException
-  {
-    final TestPrefetchableTextFilesFirehoseFactory factory =
-        TestPrefetchableTextFilesFirehoseFactory.withOpenExceptions(TEST_DIR, 1);
-
-    final List<Row> rows = new ArrayList<>();
-    final File firehoseTmpDir = createFirehoseTmpDir("testRetry");
-    try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
-      while (firehose.hasMore()) {
-        rows.add(firehose.nextRow());
-      }
-    }
-
-    assertResult(rows);
-    assertNumRemainingCacheFiles(firehoseTmpDir, 2);
-  }
-
-  @Test
-  public void testMaxRetry() throws IOException
-  {
-    expectedException.expect(RuntimeException.class);
-    expectedException.expectCause(CoreMatchers.instanceOf(ExecutionException.class));
-    expectedException.expectMessage("Exception for retry test");
-
-    final TestPrefetchableTextFilesFirehoseFactory factory =
-        TestPrefetchableTextFilesFirehoseFactory.withOpenExceptions(TEST_DIR, 5);
-
-    try (Firehose firehose = factory.connect(parser, createFirehoseTmpDir("testMaxRetry"))) {
-      while (firehose.hasMore()) {
-        firehose.nextRow();
-      }
-    }
-  }
-
-  @Test
-  public void testTimeout() throws IOException
-  {
-    expectedException.expect(RuntimeException.class);
-    expectedException.expectCause(CoreMatchers.instanceOf(TimeoutException.class));
-
-    final TestPrefetchableTextFilesFirehoseFactory factory =
-        TestPrefetchableTextFilesFirehoseFactory.withSleepMillis(TEST_DIR, 1000);
-
-    try (Firehose firehose = factory.connect(parser, createFirehoseTmpDir("testTimeout"))) {
-      while (firehose.hasMore()) {
-        firehose.nextRow();
-      }
-    }
-  }
-
-  @Test
-  public void testReconnectWithCacheAndPrefetch() throws IOException
-  {
-    final TestPrefetchableTextFilesFirehoseFactory factory =
-        TestPrefetchableTextFilesFirehoseFactory.of(TEST_DIR);
-    final File firehoseTmpDir = createFirehoseTmpDir("testReconnectWithCacheAndPrefetch");
-
-    for (int i = 0; i < 5; i++) {
-      final List<Row> rows = new ArrayList<>();
-      try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
-        if (i > 0) {
-          Assert.assertEquals(FILE_SIZE * 2, factory.getCacheManager().getTotalCachedBytes());
-        }
-
-        while (firehose.hasMore()) {
-          rows.add(firehose.nextRow());
-        }
-      }
-      assertResult(rows);
-      assertNumRemainingCacheFiles(firehoseTmpDir, 2);
-    }
-  }
-
-  @Test
-  public void testReconnectWithCache() throws IOException
-  {
-    final TestPrefetchableTextFilesFirehoseFactory factory =
-        TestPrefetchableTextFilesFirehoseFactory.with(TEST_DIR, 2048, 0);
-    final File firehoseTmpDir = createFirehoseTmpDir("testReconnectWithCache");
-
-    for (int i = 0; i < 5; i++) {
-      final List<Row> rows = new ArrayList<>();
-      try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
-        if (i > 0) {
-          Assert.assertEquals(FILE_SIZE * 2, factory.getCacheManager().getTotalCachedBytes());
-        }
-
-        while (firehose.hasMore()) {
-          rows.add(firehose.nextRow());
-        }
-      }
-      assertResult(rows);
-      assertNumRemainingCacheFiles(firehoseTmpDir, 2);
-    }
-  }
-
-  static class TestPrefetchableTextFilesFirehoseFactory extends PrefetchableTextFilesFirehoseFactory<File>
-  {
-    private static final long defaultTimeout = 1000;
-    private final long sleepMillis;
-    private final File baseDir;
-    private int openExceptionCount;
-
-    static TestPrefetchableTextFilesFirehoseFactory with(File baseDir, long cacheCapacity, long fetchCapacity)
-    {
-      return new TestPrefetchableTextFilesFirehoseFactory(
-          baseDir,
-          1024,
-          cacheCapacity,
-          fetchCapacity,
-          defaultTimeout,
-          3,
-          0,
-          0
-      );
-    }
-
-    static TestPrefetchableTextFilesFirehoseFactory of(File baseDir)
-    {
-      return new TestPrefetchableTextFilesFirehoseFactory(
-          baseDir,
-          1024,
-          2048,
-          2048,
-          defaultTimeout,
-          3,
-          0,
-          0
-      );
-    }
-
-    static TestPrefetchableTextFilesFirehoseFactory withOpenExceptions(File baseDir, int count)
-    {
-      return new TestPrefetchableTextFilesFirehoseFactory(
-          baseDir,
-          1024,
-          2048,
-          2048,
-          defaultTimeout,
-          3,
-          count,
-          0
-      );
-    }
-
-    static TestPrefetchableTextFilesFirehoseFactory withSleepMillis(File baseDir, long ms)
-    {
-      return new TestPrefetchableTextFilesFirehoseFactory(
-          baseDir,
-          1024,
-          2048,
-          2048,
-          100,
-          3,
-          0,
-          ms
-      );
-    }
-
-    public TestPrefetchableTextFilesFirehoseFactory(
-        File baseDir,
-        long prefetchTriggerThreshold,
-        long maxCacheCapacityBytes,
-        long maxFetchCapacityBytes,
-        long timeout,
-        int maxRetry,
-        int openExceptionCount,
-        long sleepMillis
-    )
-    {
-      super(
-          maxCacheCapacityBytes,
-          maxFetchCapacityBytes,
-          prefetchTriggerThreshold,
-          timeout,
-          maxRetry
-      );
-      this.openExceptionCount = openExceptionCount;
-      this.sleepMillis = sleepMillis;
-      this.baseDir = baseDir;
-    }
-
-    @Override
-    protected Collection<File> initObjects()
-    {
-      return FileUtils.listFiles(
-          Preconditions.checkNotNull(baseDir).getAbsoluteFile(),
-          TrueFileFilter.INSTANCE,
-          TrueFileFilter.INSTANCE
-      );
-    }
-
-    @Override
-    protected InputStream openObjectStream(File object) throws IOException
-    {
-      if (openExceptionCount > 0) {
-        openExceptionCount--;
-        throw new IOException("Exception for retry test");
-      }
-      if (sleepMillis > 0) {
-        try {
-          Thread.sleep(sleepMillis);
-        }
-        catch (InterruptedException e) {
-          throw new RuntimeException(e);
-        }
-      }
-      return FileUtils.openInputStream(object);
-    }
-
-    @Override
-    protected InputStream wrapObjectStream(File object, InputStream stream) throws IOException
-    {
-      return stream;
-    }
-  }
-}
diff --git a/api/src/test/java/io/druid/guice/ConditionalMultibindTest.java b/api/src/test/java/io/druid/guice/ConditionalMultibindTest.java
deleted file mode 100644
index 4cbca482d85..00000000000
--- a/api/src/test/java/io/druid/guice/ConditionalMultibindTest.java
+++ /dev/null
@@ -1,458 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.common.base.Predicates;
-import com.google.common.collect.ImmutableSet;
-import com.google.inject.Binder;
-import com.google.inject.BindingAnnotation;
-import com.google.inject.Guice;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
-import com.google.inject.Key;
-import com.google.inject.Module;
-import com.google.inject.TypeLiteral;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-import java.util.HashSet;
-import java.util.Properties;
-import java.util.Set;
-
-/**
- */
-public class ConditionalMultibindTest
-{
-
-  private static final String ANIMAL_TYPE = "animal.type";
-
-  private Properties props;
-
-  @Before
-  public void setUp() throws Exception
-  {
-    props = new Properties();
-  }
-
-  @Test
-  public void testMultiConditionalBind_cat()
-  {
-    props.setProperty("animal.type", "cat");
-
-    Injector injector = Guice.createInjector(new Module()
-    {
-      @Override
-      public void configure(Binder binder)
-      {
-        ConditionalMultibind.create(props, binder, Animal.class)
-                            .addConditionBinding(ANIMAL_TYPE, Predicates.equalTo("cat"), Cat.class)
-                            .addConditionBinding(ANIMAL_TYPE, Predicates.equalTo("dog"), Dog.class);
-      }
-    });
-
-    Set<Animal> animalSet = injector.getInstance(Key.get(new TypeLiteral<Set<Animal>>()
-    {
-    }));
-
-    Assert.assertEquals(1, animalSet.size());
-    Assert.assertEquals(animalSet, ImmutableSet.<Animal>of(new Cat()));
-  }
-
-  @Test
-  public void testMultiConditionalBind_cat_dog()
-  {
-    props.setProperty("animal.type", "pets");
-
-    Injector injector = Guice.createInjector(new Module()
-    {
-      @Override
-      public void configure(Binder binder)
-      {
-        ConditionalMultibind.create(props, binder, Animal.class)
-                            .addConditionBinding(ANIMAL_TYPE, Predicates.equalTo("pets"), Cat.class)
-                            .addConditionBinding(ANIMAL_TYPE, Predicates.equalTo("pets"), Dog.class);
-      }
-    });
-
-    Set<Animal> animalSet = injector.getInstance(Key.get(new TypeLiteral<Set<Animal>>()
-    {
-    }));
-
-    Assert.assertEquals(2, animalSet.size());
-    Assert.assertEquals(animalSet, ImmutableSet.<Animal>of(new Cat(), new Dog()));
-  }
-
-  @Test
-  public void testMultiConditionalBind_cat_dog_non_continuous_syntax()
-  {
-    props.setProperty("animal.type", "pets");
-
-    Injector injector = Guice.createInjector(new Module()
-    {
-      @Override
-      public void configure(Binder binder)
-      {
-        ConditionalMultibind.create(props, binder, Animal.class)
-                            .addConditionBinding(ANIMAL_TYPE, Predicates.equalTo("pets"), Cat.class);
-
-        ConditionalMultibind.create(props, binder, Animal.class)
-                            .addConditionBinding(ANIMAL_TYPE, Predicates.equalTo("pets"), Dog.class);
-
-      }
-    });
-
-    Set<Animal> animalSet = injector.getInstance(Key.get(new TypeLiteral<Set<Animal>>()
-    {
-    }));
-
-    Assert.assertEquals(2, animalSet.size());
-    Assert.assertEquals(animalSet, ImmutableSet.<Animal>of(new Cat(), new Dog()));
-  }
-
-  @Test
-  public void testMultiConditionalBind_multiple_modules()
-  {
-    props.setProperty("animal.type", "pets");
-
-    Injector injector = Guice.createInjector(
-        new Module()
-        {
-          @Override
-          public void configure(Binder binder)
-          {
-            ConditionalMultibind.create(props, binder, Animal.class)
-                                .addConditionBinding(ANIMAL_TYPE, Predicates.equalTo("pets"), Cat.class)
-                                .addConditionBinding(ANIMAL_TYPE, Predicates.equalTo("pets"), Dog.class);
-          }
-        },
-        new Module()
-        {
-          @Override
-          public void configure(Binder binder)
-          {
-            ConditionalMultibind.create(props, binder, Animal.class)
-                                .addConditionBinding(ANIMAL_TYPE, Predicates.equalTo("not_match"), Tiger.class)
-                                .addConditionBinding(ANIMAL_TYPE, Predicates.equalTo("pets"), Fish.class);
-          }
-        }
-    );
-
-    Set<Animal> animalSet = injector.getInstance(Key.get(new TypeLiteral<Set<Animal>>()
-    {
-    }));
-
-    Assert.assertEquals(3, animalSet.size());
-    Assert.assertEquals(animalSet, ImmutableSet.<Animal>of(new Cat(), new Dog(), new Fish()));
-  }
-
-  @Test
-  public void testMultiConditionalBind_multiple_modules_with_annotation()
-  {
-    props.setProperty("animal.type", "pets");
-
-    Injector injector = Guice.createInjector(
-        new Module()
-        {
-          @Override
-          public void configure(Binder binder)
-          {
-            ConditionalMultibind.create(props, binder, Animal.class, SanDiego.class)
-                                .addConditionBinding(ANIMAL_TYPE, Predicates.equalTo("pets"), Cat.class)
-                                .addConditionBinding(ANIMAL_TYPE, Predicates.equalTo("pets"), Dog.class);
-          }
-        },
-        new Module()
-        {
-          @Override
-          public void configure(Binder binder)
-          {
-            ConditionalMultibind.create(props, binder, Animal.class, SanDiego.class)
-                                .addBinding(new Bird())
-                                .addConditionBinding(ANIMAL_TYPE, Predicates.equalTo("pets"), Tiger.class);
-
-            ConditionalMultibind.create(props, binder, Animal.class, SanJose.class)
-                                .addConditionBinding(ANIMAL_TYPE, Predicates.equalTo("pets"), Fish.class);
-          }
-        }
-    );
-
-    Set<Animal> animalSet_1 = injector.getInstance(Key.get(new TypeLiteral<Set<Animal>>()
-    {
-    }, SanDiego.class));
-    Assert.assertEquals(4, animalSet_1.size());
-    Assert.assertEquals(animalSet_1, ImmutableSet.<Animal>of(new Bird(), new Cat(), new Dog(), new Tiger()));
-
-    Set<Animal> animalSet_2 = injector.getInstance(Key.get(new TypeLiteral<Set<Animal>>()
-    {
-    }, SanJose.class));
-    Assert.assertEquals(1, animalSet_2.size());
-    Assert.assertEquals(animalSet_2, ImmutableSet.<Animal>of(new Fish()));
-  }
-
-  @Test
-  public void testMultiConditionalBind_inject()
-  {
-    props.setProperty("animal.type", "pets");
-
-    Injector injector = Guice.createInjector(
-        new Module()
-        {
-          @Override
-          public void configure(Binder binder)
-          {
-            ConditionalMultibind.create(props, binder, Animal.class)
-                                .addBinding(Bird.class)
-                                .addConditionBinding(ANIMAL_TYPE, Predicates.equalTo("pets"), Cat.class)
-                                .addConditionBinding(ANIMAL_TYPE, Predicates.equalTo("pets"), Dog.class);
-          }
-        },
-        new Module()
-        {
-          @Override
-          public void configure(Binder binder)
-          {
-            ConditionalMultibind.create(props, binder, Animal.class)
-                                .addConditionBinding(ANIMAL_TYPE, Predicates.equalTo("not_match"), Tiger.class)
-                                .addConditionBinding(ANIMAL_TYPE, Predicates.equalTo("pets"), Fish.class);
-          }
-        }
-    );
-
-    PetShotAvails shop = new PetShotAvails();
-    injector.injectMembers(shop);
-
-    Assert.assertEquals(4, shop.animals.size());
-    Assert.assertEquals(shop.animals, ImmutableSet.<Animal>of(new Bird(), new Cat(), new Dog(), new Fish()));
-  }
-
-  @Test
-  public void testMultiConditionalBind_typeLiteral()
-  {
-    props.setProperty("animal.type", "pets");
-
-    final Set<Animal> set1 = ImmutableSet.<Animal>of(new Dog(), new Tiger());
-    final Set<Animal> set2 = ImmutableSet.<Animal>of(new Cat(), new Fish());
-    final Set<Animal> set3 = ImmutableSet.<Animal>of(new Cat());
-    final Set<Animal> union = new HashSet<>();
-    union.addAll(set1);
-    union.addAll(set2);
-
-    final Zoo<Animal> zoo1 = new Zoo<>(set1);
-    final Zoo<Animal> zoo2 = new Zoo<>();
-
-    Injector injector = Guice.createInjector(
-        new Module()
-        {
-          @Override
-          public void configure(Binder binder)
-          {
-            ConditionalMultibind
-                .create(props, binder, new TypeLiteral<Set<Animal>>() {})
-                .addConditionBinding(ANIMAL_TYPE, Predicates.equalTo("pets"), set1)
-                .addConditionBinding(ANIMAL_TYPE, Predicates.equalTo("pets"), set2);
-
-            ConditionalMultibind
-                .create(props, binder, new TypeLiteral<Zoo<Animal>>() {})
-                .addConditionBinding(ANIMAL_TYPE, Predicates.equalTo("pets"), zoo1);
-          }
-        },
-        new Module()
-        {
-          @Override
-          public void configure(Binder binder)
-          {
-            ConditionalMultibind
-                .create(props, binder, new TypeLiteral<Set<Animal>>() {})
-                .addConditionBinding(ANIMAL_TYPE, Predicates.equalTo("pets"), set3);
-
-            ConditionalMultibind
-                .create(props, binder, new TypeLiteral<Set<Animal>>() {}, SanDiego.class)
-                .addConditionBinding(ANIMAL_TYPE, Predicates.equalTo("pets"), union);
-
-            ConditionalMultibind
-                .create(props, binder, new TypeLiteral<Zoo<Animal>>() {})
-                .addBinding(new TypeLiteral<Zoo<Animal>>() {});
-
-          }
-        }
-    );
-
-    Set<Set<Animal>> actualAnimalSet = injector.getInstance(Key.get(new TypeLiteral<Set<Set<Animal>>>() {}));
-    Assert.assertEquals(3, actualAnimalSet.size());
-    Assert.assertEquals(ImmutableSet.of(set1, set2, set3), actualAnimalSet);
-
-    actualAnimalSet = injector.getInstance(Key.get(new TypeLiteral<Set<Set<Animal>>>() {}, SanDiego.class));
-    Assert.assertEquals(1, actualAnimalSet.size());
-    Assert.assertEquals(ImmutableSet.of(union), actualAnimalSet);
-
-    final Set<Zoo<Animal>> actualZooSet = injector.getInstance(Key.get(new TypeLiteral<Set<Zoo<Animal>>>() {}));
-    Assert.assertEquals(2, actualZooSet.size());
-    Assert.assertEquals(ImmutableSet.of(zoo1, zoo2), actualZooSet);
-  }
-
-  static abstract class Animal
-  {
-    private final String type;
-
-    Animal(String type)
-    {
-      this.type = type;
-    }
-
-    @Override
-    public String toString()
-    {
-      return "Animal{" +
-             "type='" + type + '\'' +
-             '}';
-    }
-
-    @Override
-    public boolean equals(Object o)
-    {
-      if (this == o) {
-        return true;
-      }
-      if (o == null || getClass() != o.getClass()) {
-        return false;
-      }
-
-      Animal animal = (Animal) o;
-
-      return type != null ? type.equals(animal.type) : animal.type == null;
-    }
-
-    @Override
-    public int hashCode()
-    {
-      return type != null ? type.hashCode() : 0;
-    }
-  }
-
-  static class PetShotAvails
-  {
-    @Inject
-    Set<Animal> animals;
-  }
-
-  static class Dog extends Animal
-  {
-    Dog()
-    {
-      super("dog");
-    }
-  }
-
-  static class Cat extends Animal
-  {
-    Cat()
-    {
-      super("cat");
-    }
-  }
-
-  static class Fish extends Animal
-  {
-    Fish()
-    {
-      super("fish");
-    }
-  }
-
-  static class Tiger extends Animal
-  {
-    Tiger()
-    {
-      super("tiger");
-    }
-  }
-
-  static class Bird extends Animal
-  {
-    Bird()
-    {
-      super("bird");
-    }
-  }
-
-  static class Zoo<T>
-  {
-    Set<T> animals;
-
-    public Zoo()
-    {
-      animals = new HashSet<>();
-    }
-
-    public Zoo(Set<T> animals)
-    {
-      this.animals = animals;
-    }
-
-    @Override
-    public boolean equals(Object o)
-    {
-      if (this == o) {
-        return true;
-      }
-      if (o == null || getClass() != o.getClass()) {
-        return false;
-      }
-
-      Zoo<?> zoo = (Zoo<?>) o;
-
-      return animals != null ? animals.equals(zoo.animals) : zoo.animals == null;
-    }
-
-    @Override
-    public int hashCode()
-    {
-      return animals != null ? animals.hashCode() : 0;
-    }
-
-    @Override
-    public String toString()
-    {
-      return "Zoo{" +
-             "animals=" + animals +
-             '}';
-    }
-  }
-
-  @Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
-  @Retention(RetentionPolicy.RUNTIME)
-  @BindingAnnotation
-  @interface SanDiego
-  {
-  }
-
-  @Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
-  @Retention(RetentionPolicy.RUNTIME)
-  @BindingAnnotation
-  @interface SanJose
-  {
-  }
-
-}
diff --git a/api/src/test/java/io/druid/guice/JsonConfiguratorTest.java b/api/src/test/java/io/druid/guice/JsonConfiguratorTest.java
deleted file mode 100644
index 0ce4f77a79a..00000000000
--- a/api/src/test/java/io/druid/guice/JsonConfiguratorTest.java
+++ /dev/null
@@ -1,201 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-import io.druid.TestObjectMapper;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import javax.validation.ConstraintViolation;
-import javax.validation.Validator;
-import javax.validation.executable.ExecutableValidator;
-import javax.validation.metadata.BeanDescriptor;
-import java.util.List;
-import java.util.Properties;
-import java.util.Set;
-
-public class JsonConfiguratorTest
-{
-  private static final String PROP_PREFIX = "test.property.prefix.";
-  private final ObjectMapper mapper = new TestObjectMapper();
-  private final Properties properties = new Properties();
-
-  @Before
-  public void setUp()
-  {
-    mapper.registerSubtypes(MappableObject.class);
-  }
-
-  final Validator validator = new Validator()
-  {
-    @Override
-    public <T> Set<ConstraintViolation<T>> validate(T object, Class<?>... groups)
-    {
-      return ImmutableSet.of();
-    }
-
-    @Override
-    public <T> Set<ConstraintViolation<T>> validateProperty(T object, String propertyName, Class<?>... groups)
-    {
-      return ImmutableSet.of();
-    }
-
-    @Override
-    public <T> Set<ConstraintViolation<T>> validateValue(
-        Class<T> beanType, String propertyName, Object value, Class<?>... groups
-    )
-    {
-      return ImmutableSet.of();
-    }
-
-    @Override
-    public BeanDescriptor getConstraintsForClass(Class<?> clazz)
-    {
-      return null;
-    }
-
-    @Override
-    public <T> T unwrap(Class<T> type)
-    {
-      return null;
-    }
-
-    @Override
-    public ExecutableValidator forExecutables()
-    {
-      return null;
-    }
-  };
-
-  @Test
-  public void testTest()
-  {
-    Assert.assertEquals(
-        new MappableObject("p1", ImmutableList.<String>of("p2")),
-        new MappableObject("p1", ImmutableList.<String>of("p2"))
-    );
-    Assert.assertEquals(new MappableObject("p1", null), new MappableObject("p1", ImmutableList.<String>of()));
-  }
-
-  @Test
-  public void testsimpleConfigurate() throws Exception
-  {
-    final JsonConfigurator configurator = new JsonConfigurator(mapper, validator);
-    properties.setProperty(PROP_PREFIX + "prop1", "prop1");
-    properties.setProperty(PROP_PREFIX + "prop1List", "[\"prop2\"]");
-    final MappableObject obj = configurator.configurate(properties, PROP_PREFIX, MappableObject.class);
-    Assert.assertEquals("prop1", obj.prop1);
-    Assert.assertEquals(ImmutableList.of("prop2"), obj.prop1List);
-  }
-
-  @Test
-  public void testMissingConfigList()
-  {
-    final JsonConfigurator configurator = new JsonConfigurator(mapper, validator);
-    properties.setProperty(PROP_PREFIX + "prop1", "prop1");
-    final MappableObject obj = configurator.configurate(properties, PROP_PREFIX, MappableObject.class);
-    Assert.assertEquals("prop1", obj.prop1);
-    Assert.assertEquals(ImmutableList.of(), obj.prop1List);
-  }
-
-  @Test
-  public void testMissingConfig()
-  {
-    final JsonConfigurator configurator = new JsonConfigurator(mapper, validator);
-    properties.setProperty(PROP_PREFIX + "prop1List", "[\"prop2\"]");
-    final MappableObject obj = configurator.configurate(properties, PROP_PREFIX, MappableObject.class);
-    Assert.assertNull(obj.prop1);
-    Assert.assertEquals(ImmutableList.of("prop2"), obj.prop1List);
-  }
-
-  @Test
-  public void testQuotedConfig()
-  {
-    final JsonConfigurator configurator = new JsonConfigurator(mapper, validator);
-    properties.setProperty(PROP_PREFIX + "prop1", "testing \"prop1\"");
-    final MappableObject obj = configurator.configurate(properties, PROP_PREFIX, MappableObject.class);
-    Assert.assertEquals("testing \"prop1\"", obj.prop1);
-    Assert.assertEquals(ImmutableList.of(), obj.prop1List);
-  }
-}
-
-class MappableObject
-{
-  @JsonProperty("prop1")
-  final String prop1;
-  @JsonProperty("prop1List")
-  final List<String> prop1List;
-
-  @JsonCreator
-  protected MappableObject(
-      @JsonProperty("prop1") final String prop1,
-      @JsonProperty("prop1List") final List<String> prop1List
-  )
-  {
-    this.prop1 = prop1;
-    this.prop1List = prop1List == null ? ImmutableList.<String>of() : prop1List;
-  }
-
-
-  @JsonProperty
-  public List<String> getProp1List()
-  {
-    return prop1List;
-  }
-
-  @JsonProperty
-  public String getProp1()
-  {
-    return prop1;
-  }
-
-  @Override
-  public boolean equals(Object o)
-  {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    MappableObject object = (MappableObject) o;
-
-    if (prop1 != null ? !prop1.equals(object.prop1) : object.prop1 != null) {
-      return false;
-    }
-    return prop1List != null ? prop1List.equals(object.prop1List) : object.prop1List == null;
-
-  }
-
-  @Override
-  public int hashCode()
-  {
-    int result = prop1 != null ? prop1.hashCode() : 0;
-    result = 31 * result + (prop1List != null ? prop1List.hashCode() : 0);
-    return result;
-  }
-}
diff --git a/api/src/test/java/io/druid/guice/PolyBindTest.java b/api/src/test/java/io/druid/guice/PolyBindTest.java
deleted file mode 100644
index 7b353d7c3b6..00000000000
--- a/api/src/test/java/io/druid/guice/PolyBindTest.java
+++ /dev/null
@@ -1,170 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.guice;
-
-import com.google.common.collect.Iterables;
-import com.google.inject.Binder;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import com.google.inject.Key;
-import com.google.inject.Module;
-import com.google.inject.ProvisionException;
-import com.google.inject.multibindings.MapBinder;
-import com.google.inject.name.Names;
-import org.junit.Assert;
-import org.junit.Test;
-
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Properties;
-
-/**
- */
-public class PolyBindTest
-{
-  private Properties props;
-  private Injector injector;
-
-  public void setUp(Module... modules) throws Exception
-  {
-    props = new Properties();
-    injector = Guice.createInjector(
-        Iterables.concat(
-            Collections.singletonList(
-                new Module()
-                {
-                  @Override
-                  public void configure(Binder binder)
-                  {
-                    binder.bind(Properties.class).toInstance(props);
-                    PolyBind.createChoice(binder, "billy", Key.get(Gogo.class), Key.get(GoA.class));
-                    PolyBind.createChoiceWithDefault(binder, "sally", Key.get(GogoSally.class), "b");
-
-                  }
-                }
-            ),
-            Arrays.asList(modules)
-        )
-    );
-  }
-
-  @Test
-  public void testSanity() throws Exception
-  {
-    setUp(
-        new Module()
-        {
-          @Override
-          public void configure(Binder binder)
-          {
-            final MapBinder<String, Gogo> gogoBinder = PolyBind.optionBinder(binder, Key.get(Gogo.class));
-            gogoBinder.addBinding("a").to(GoA.class);
-            gogoBinder.addBinding("b").to(GoB.class);
-
-            final MapBinder<String, GogoSally> gogoSallyBinder = PolyBind.optionBinder(binder, Key.get(GogoSally.class));
-            gogoSallyBinder.addBinding("a").to(GoA.class);
-            gogoSallyBinder.addBinding("b").to(GoB.class);
-
-            PolyBind.createChoice(binder, "billy", Key.get(Gogo.class, Names.named("reverse")), Key.get(GoB.class));
-            final MapBinder<String, Gogo> annotatedGogoBinder = PolyBind.optionBinder(
-                binder,
-                Key.get(Gogo.class, Names.named("reverse"))
-            );
-            annotatedGogoBinder.addBinding("a").to(GoB.class);
-            annotatedGogoBinder.addBinding("b").to(GoA.class);
-          }
-        }
-    );
-
-
-    Assert.assertEquals("A", injector.getInstance(Gogo.class).go());
-    Assert.assertEquals("B", injector.getInstance(Key.get(Gogo.class, Names.named("reverse"))).go());
-    props.setProperty("billy", "b");
-    Assert.assertEquals("B", injector.getInstance(Gogo.class).go());
-    Assert.assertEquals("A", injector.getInstance(Key.get(Gogo.class, Names.named("reverse"))).go());
-    props.setProperty("billy", "a");
-    Assert.assertEquals("A", injector.getInstance(Gogo.class).go());
-    Assert.assertEquals("B", injector.getInstance(Key.get(Gogo.class, Names.named("reverse"))).go());
-    props.setProperty("billy", "b");
-    Assert.assertEquals("B", injector.getInstance(Gogo.class).go());
-    Assert.assertEquals("A", injector.getInstance(Key.get(Gogo.class, Names.named("reverse"))).go());
-    props.setProperty("billy", "c");
-    try {
-      Assert.assertEquals("A", injector.getInstance(Gogo.class).go());
-      Assert.fail(); // should never be reached
-    }
-    catch (Exception e) {
-      Assert.assertTrue(e instanceof ProvisionException);
-      Assert.assertTrue(e.getMessage().contains("Unknown provider[c] of Key[type=io.druid.guice.PolyBindTest$Gogo"));
-    }
-    try {
-      Assert.assertEquals("B", injector.getInstance(Key.get(Gogo.class, Names.named("reverse"))).go());
-      Assert.fail(); // should never be reached
-    }
-    catch (Exception e) {
-      Assert.assertTrue(e instanceof ProvisionException);
-      Assert.assertTrue(e.getMessage().contains("Unknown provider[c] of Key[type=io.druid.guice.PolyBindTest$Gogo"));
-    }
-    
-    // test default property value
-    Assert.assertEquals("B", injector.getInstance(GogoSally.class).go());
-    props.setProperty("sally", "a");
-    Assert.assertEquals("A", injector.getInstance(GogoSally.class).go());
-    props.setProperty("sally", "b");
-    Assert.assertEquals("B", injector.getInstance(GogoSally.class).go());
-    props.setProperty("sally", "c");
-    try {
-      injector.getInstance(GogoSally.class).go();
-      Assert.fail(); // should never be reached
-    }
-    catch (Exception e) {
-      Assert.assertTrue(e instanceof ProvisionException);
-      Assert.assertTrue(e.getMessage().contains("Unknown provider[c] of Key[type=io.druid.guice.PolyBindTest$GogoSally"));
-    }
-  }
-
-  public interface Gogo
-  {
-    String go();
-  }
-
-  public interface GogoSally
-  {
-    String go();
-  }
-
-  public static class GoA implements Gogo, GogoSally
-  {
-    @Override
-    public String go()
-    {
-      return "A";
-    }
-  }
-
-  public static class GoB implements Gogo, GogoSally
-  {
-    @Override
-    public String go()
-    {
-      return "B";
-    }
-  }
-}
diff --git a/api/src/test/java/io/druid/jackson/JacksonExtremeDoubleValuesSerdeTest.java b/api/src/test/java/io/druid/jackson/JacksonExtremeDoubleValuesSerdeTest.java
deleted file mode 100644
index 789fd82def9..00000000000
--- a/api/src/test/java/io/druid/jackson/JacksonExtremeDoubleValuesSerdeTest.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.jackson;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.junit.Assert;
-import org.junit.Test;
-
-import java.io.IOException;
-
-public class JacksonExtremeDoubleValuesSerdeTest
-{
-  @Test
-  public void testExtremeDoubleValuesSerde() throws IOException
-  {
-    ObjectMapper objectMapper = new ObjectMapper();
-    for (double value : new double[] {Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY}) {
-      String serialized = objectMapper.writeValueAsString(value);
-      Assert.assertEquals(new Double(value), objectMapper.readValue(serialized, Double.class));
-    }
-    String negativeInfinityString = objectMapper.writeValueAsString(Double.NaN);
-    Assert.assertTrue(objectMapper.readValue(negativeInfinityString, Double.class).isNaN());
-  }
-}
diff --git a/api/src/test/java/io/druid/js/JavaScriptConfigTest.java b/api/src/test/java/io/druid/js/JavaScriptConfigTest.java
deleted file mode 100644
index 6917caebfda..00000000000
--- a/api/src/test/java/io/druid/js/JavaScriptConfigTest.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.js;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.junit.Assert;
-import org.junit.Test;
-
-public class JavaScriptConfigTest
-{
-  private static ObjectMapper mapper = new ObjectMapper();
-
-  @Test
-  public void testSerde() throws Exception
-  {
-    String json = "{\"enabled\":true}";
-
-    JavaScriptConfig config = mapper.readValue(
-        mapper.writeValueAsString(
-            mapper.readValue(
-                json,
-                JavaScriptConfig.class
-            )
-        ), JavaScriptConfig.class
-    );
-
-    Assert.assertTrue(config.isEnabled());
-  }
-
-  @Test
-  public void testSerdeWithDefaults() throws Exception
-  {
-    String json = "{}";
-
-    JavaScriptConfig config = mapper.readValue(
-        mapper.writeValueAsString(
-            mapper.readValue(
-                json,
-                JavaScriptConfig.class
-            )
-        ), JavaScriptConfig.class
-    );
-
-    Assert.assertFalse(config.isEnabled());
-  }
-}
diff --git a/api/src/test/java/io/druid/segment/SegmentUtilsTest.java b/api/src/test/java/io/druid/segment/SegmentUtilsTest.java
deleted file mode 100644
index 142b896797b..00000000000
--- a/api/src/test/java/io/druid/segment/SegmentUtilsTest.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.segment;
-
-import com.google.common.primitives.Ints;
-import org.apache.commons.io.FileUtils;
-import org.junit.Assert;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- */
-public class SegmentUtilsTest
-{
-  @Rule
-  public final TemporaryFolder tempFolder = new TemporaryFolder();
-
-  @Test
-  public void testVersionBin() throws Exception
-  {
-    File dir = tempFolder.newFolder();
-    FileUtils.writeByteArrayToFile(new File(dir, "version.bin"), Ints.toByteArray(9));
-    Assert.assertEquals(9, SegmentUtils.getVersionFromDir(dir));
-  }
-
-  @Test
-  public void testIndexDrd() throws Exception
-  {
-    File dir = tempFolder.newFolder();
-    FileUtils.writeByteArrayToFile(new File(dir, "index.drd"), new byte[]{(byte) 0x8});
-    Assert.assertEquals(8, SegmentUtils.getVersionFromDir(dir));
-  }
-
-  @Test(expected = IOException.class)
-  public void testException() throws Exception
-  {
-    SegmentUtils.getVersionFromDir(tempFolder.newFolder());
-  }
-}
diff --git a/api/src/test/java/io/druid/timeline/DataSegmentTest.java b/api/src/test/java/io/druid/timeline/DataSegmentTest.java
deleted file mode 100644
index 4222775521f..00000000000
--- a/api/src/test/java/io/druid/timeline/DataSegmentTest.java
+++ /dev/null
@@ -1,257 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.timeline;
-
-import com.fasterxml.jackson.databind.InjectableValues;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Range;
-import com.google.common.collect.Sets;
-import io.druid.TestObjectMapper;
-import io.druid.data.input.InputRow;
-import io.druid.java.util.common.DateTimes;
-import io.druid.java.util.common.Intervals;
-import io.druid.java.util.common.jackson.JacksonUtils;
-import io.druid.timeline.partition.NoneShardSpec;
-import io.druid.timeline.partition.PartitionChunk;
-import io.druid.timeline.partition.ShardSpec;
-import io.druid.timeline.partition.ShardSpecLookup;
-import org.joda.time.Interval;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-/**
- */
-public class DataSegmentTest
-{
-  private final static ObjectMapper mapper = new TestObjectMapper();
-  private final static int TEST_VERSION = 0x7;
-
-  private static ShardSpec getShardSpec(final int partitionNum)
-  {
-    return new ShardSpec()
-    {
-      @Override
-      public <T> PartitionChunk<T> createChunk(T obj)
-      {
-        return null;
-      }
-
-      @Override
-      public boolean isInChunk(long timestamp, InputRow inputRow)
-      {
-        return false;
-      }
-
-      @Override
-      public int getPartitionNum()
-      {
-        return partitionNum;
-      }
-
-      @Override
-      public ShardSpecLookup getLookup(List<ShardSpec> shardSpecs)
-      {
-        return null;
-      }
-
-      @Override
-      public Map<String, Range<String>> getDomain()
-      {
-        return ImmutableMap.of();
-      }
-    };
-  }
-
-  @Before
-  public void setUp()
-  {
-    InjectableValues.Std injectableValues = new InjectableValues.Std();
-    injectableValues.addValue(DataSegment.PruneLoadSpecHolder.class, DataSegment.PruneLoadSpecHolder.DEFAULT);
-    mapper.setInjectableValues(injectableValues);
-  }
-
-  @Test
-  public void testV1Serialization() throws Exception
-  {
-
-    final Interval interval = Intervals.of("2011-10-01/2011-10-02");
-    final ImmutableMap<String, Object> loadSpec = ImmutableMap.<String, Object>of("something", "or_other");
-
-    DataSegment segment = new DataSegment(
-        "something",
-        interval,
-        "1",
-        loadSpec,
-        Arrays.asList("dim1", "dim2"),
-        Arrays.asList("met1", "met2"),
-        NoneShardSpec.instance(),
-        TEST_VERSION,
-        1
-    );
-
-    final Map<String, Object> objectMap = mapper.readValue(
-        mapper.writeValueAsString(segment),
-        JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT
-    );
-
-    Assert.assertEquals(10, objectMap.size());
-    Assert.assertEquals("something", objectMap.get("dataSource"));
-    Assert.assertEquals(interval.toString(), objectMap.get("interval"));
-    Assert.assertEquals("1", objectMap.get("version"));
-    Assert.assertEquals(loadSpec, objectMap.get("loadSpec"));
-    Assert.assertEquals("dim1,dim2", objectMap.get("dimensions"));
-    Assert.assertEquals("met1,met2", objectMap.get("metrics"));
-    Assert.assertEquals(ImmutableMap.of("type", "none"), objectMap.get("shardSpec"));
-    Assert.assertEquals(TEST_VERSION, objectMap.get("binaryVersion"));
-    Assert.assertEquals(1, objectMap.get("size"));
-
-    DataSegment deserializedSegment = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class);
-
-    Assert.assertEquals(segment.getDataSource(), deserializedSegment.getDataSource());
-    Assert.assertEquals(segment.getInterval(), deserializedSegment.getInterval());
-    Assert.assertEquals(segment.getVersion(), deserializedSegment.getVersion());
-    Assert.assertEquals(segment.getLoadSpec(), deserializedSegment.getLoadSpec());
-    Assert.assertEquals(segment.getDimensions(), deserializedSegment.getDimensions());
-    Assert.assertEquals(segment.getMetrics(), deserializedSegment.getMetrics());
-    Assert.assertEquals(segment.getShardSpec(), deserializedSegment.getShardSpec());
-    Assert.assertEquals(segment.getSize(), deserializedSegment.getSize());
-    Assert.assertEquals(segment.getIdentifier(), deserializedSegment.getIdentifier());
-
-    deserializedSegment = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class);
-    Assert.assertEquals(0, segment.compareTo(deserializedSegment));
-
-    deserializedSegment = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class);
-    Assert.assertEquals(0, deserializedSegment.compareTo(segment));
-
-    deserializedSegment = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class);
-    Assert.assertEquals(segment.hashCode(), deserializedSegment.hashCode());
-  }
-
-  @Test
-  public void testIdentifier()
-  {
-    final DataSegment segment = DataSegment.builder()
-                                           .dataSource("foo")
-                                           .interval(Intervals.of("2012-01-01/2012-01-02"))
-                                           .version(DateTimes.of("2012-01-01T11:22:33.444Z").toString())
-                                           .shardSpec(NoneShardSpec.instance())
-                                           .build();
-
-    Assert.assertEquals(
-        "foo_2012-01-01T00:00:00.000Z_2012-01-02T00:00:00.000Z_2012-01-01T11:22:33.444Z",
-        segment.getIdentifier()
-    );
-  }
-
-  @Test
-  public void testIdentifierWithZeroPartition()
-  {
-    final DataSegment segment = DataSegment.builder()
-                                           .dataSource("foo")
-                                           .interval(Intervals.of("2012-01-01/2012-01-02"))
-                                           .version(DateTimes.of("2012-01-01T11:22:33.444Z").toString())
-                                           .shardSpec(getShardSpec(0))
-                                           .build();
-
-    Assert.assertEquals(
-        "foo_2012-01-01T00:00:00.000Z_2012-01-02T00:00:00.000Z_2012-01-01T11:22:33.444Z",
-        segment.getIdentifier()
-    );
-  }
-
-  @Test
-  public void testIdentifierWithNonzeroPartition()
-  {
-    final DataSegment segment = DataSegment.builder()
-                                           .dataSource("foo")
-                                           .interval(Intervals.of("2012-01-01/2012-01-02"))
-                                           .version(DateTimes.of("2012-01-01T11:22:33.444Z").toString())
-                                           .shardSpec(getShardSpec(7))
-                                           .build();
-
-    Assert.assertEquals(
-        "foo_2012-01-01T00:00:00.000Z_2012-01-02T00:00:00.000Z_2012-01-01T11:22:33.444Z_7",
-        segment.getIdentifier()
-    );
-  }
-
-  @Test
-  public void testV1SerializationNullMetrics() throws Exception
-  {
-    final DataSegment segment = DataSegment.builder()
-                                           .dataSource("foo")
-                                           .interval(Intervals.of("2012-01-01/2012-01-02"))
-                                           .version(DateTimes.of("2012-01-01T11:22:33.444Z").toString())
-                                           .build();
-
-    final DataSegment segment2 = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class);
-    Assert.assertEquals("empty dimensions", ImmutableList.of(), segment2.getDimensions());
-    Assert.assertEquals("empty metrics", ImmutableList.of(), segment2.getMetrics());
-  }
-
-  @Test
-  public void testBucketMonthComparator() throws Exception
-  {
-    DataSegment[] sortedOrder = {
-        makeDataSegment("test1", "2011-01-01/2011-01-02", "a"),
-        makeDataSegment("test1", "2011-01-02/2011-01-03", "a"),
-        makeDataSegment("test1", "2011-01-02/2011-01-03", "b"),
-        makeDataSegment("test2", "2011-01-01/2011-01-02", "a"),
-        makeDataSegment("test2", "2011-01-02/2011-01-03", "a"),
-        makeDataSegment("test1", "2011-02-01/2011-02-02", "a"),
-        makeDataSegment("test1", "2011-02-02/2011-02-03", "a"),
-        makeDataSegment("test1", "2011-02-02/2011-02-03", "b"),
-        makeDataSegment("test2", "2011-02-01/2011-02-02", "a"),
-        makeDataSegment("test2", "2011-02-02/2011-02-03", "a"),
-    };
-
-    List<DataSegment> shuffled = Lists.newArrayList(sortedOrder);
-    Collections.shuffle(shuffled);
-
-    Set<DataSegment> theSet = Sets.newTreeSet(DataSegment.bucketMonthComparator());
-    theSet.addAll(shuffled);
-
-    int index = 0;
-    for (DataSegment dataSegment : theSet) {
-      Assert.assertEquals(sortedOrder[index], dataSegment);
-      ++index;
-    }
-  }
-
-  private DataSegment makeDataSegment(String dataSource, String interval, String version)
-  {
-    return DataSegment.builder()
-                      .dataSource(dataSource)
-                      .interval(Intervals.of(interval))
-                      .version(version)
-                      .size(1)
-                      .build();
-  }
-}
diff --git a/api/src/test/java/io/druid/timeline/DataSegmentUtilsTest.java b/api/src/test/java/io/druid/timeline/DataSegmentUtilsTest.java
deleted file mode 100644
index a58fe0b6f6c..00000000000
--- a/api/src/test/java/io/druid/timeline/DataSegmentUtilsTest.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.timeline;
-
-import io.druid.java.util.common.Intervals;
-import io.druid.timeline.DataSegmentUtils.SegmentIdentifierParts;
-import org.junit.Assert;
-import org.junit.Test;
-
-/**
- */
-public class DataSegmentUtilsTest
-{
-  @Test
-  public void testBasic()
-  {
-    String datasource = "datasource";
-    SegmentIdentifierParts desc = new SegmentIdentifierParts(datasource, Intervals.of("2015-01-02/2015-01-03"), "ver", "0_0");
-    Assert.assertEquals("datasource_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver_0_0", desc.toString());
-    Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString()));
-
-    desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D"));
-    Assert.assertEquals("datasource_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver_0_0", desc.toString());
-    Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString()));
-
-    desc = new SegmentIdentifierParts(datasource, Intervals.of("2015-01-02/2015-01-03"), "ver", null);
-    Assert.assertEquals("datasource_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver", desc.toString());
-    Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString()));
-
-    desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D"));
-    Assert.assertEquals("datasource_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver", desc.toString());
-    Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString()));
-  }
-
-  @Test
-  public void testDataSourceWithUnderscore1()
-  {
-    String datasource = "datasource_1";
-    SegmentIdentifierParts desc = new SegmentIdentifierParts(datasource, Intervals.of("2015-01-02/2015-01-03"), "ver", "0_0");
-    Assert.assertEquals("datasource_1_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver_0_0", desc.toString());
-    Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString()));
-
-    desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D"));
-    Assert.assertEquals("datasource_1_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver_0_0", desc.toString());
-    Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString()));
-
-    desc = new SegmentIdentifierParts(datasource, Intervals.of("2015-01-02/2015-01-03"), "ver", null);
-    Assert.assertEquals("datasource_1_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver", desc.toString());
-    Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString()));
-
-    desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D"));
-    Assert.assertEquals("datasource_1_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver", desc.toString());
-    Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString()));
-  }
-
-  @Test
-  public void testDataSourceWithUnderscore2()
-  {
-    String dataSource = "datasource_2015-01-01T00:00:00.000Z";
-    SegmentIdentifierParts desc = new SegmentIdentifierParts(dataSource, Intervals.of("2015-01-02/2015-01-03"), "ver", "0_0");
-    Assert.assertEquals(
-        "datasource_2015-01-01T00:00:00.000Z_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver_0_0",
-        desc.toString()
-    );
-    Assert.assertEquals(desc, DataSegmentUtils.valueOf(dataSource, desc.toString()));
-
-    desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D"));
-    Assert.assertEquals(
-        "datasource_2015-01-01T00:00:00.000Z_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver_0_0",
-        desc.toString()
-    );
-    Assert.assertEquals(desc, DataSegmentUtils.valueOf(dataSource, desc.toString()));
-
-    desc = new SegmentIdentifierParts(dataSource, Intervals.of("2015-01-02/2015-01-03"), "ver", null);
-    Assert.assertEquals(
-        "datasource_2015-01-01T00:00:00.000Z_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver",
-        desc.toString()
-    );
-    Assert.assertEquals(desc, DataSegmentUtils.valueOf(dataSource, desc.toString()));
-
-    desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D"));
-    Assert.assertEquals(
-        "datasource_2015-01-01T00:00:00.000Z_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver",
-        desc.toString()
-    );
-    Assert.assertEquals(desc, DataSegmentUtils.valueOf(dataSource, desc.toString()));
-  }
-
-  @Test
-  public void testInvalidFormat0()
-  {
-    Assert.assertNull(DataSegmentUtils.valueOf("ds", "datasource_2015-01-02T00:00:00.000Z_2014-10-20T00:00:00.000Z_version"));
-  }
-
-  @Test
-  public void testInvalidFormat1()
-  {
-    Assert.assertNull(DataSegmentUtils.valueOf("datasource", "datasource_invalid_interval_version"));
-  }
-
-  @Test
-  public void testInvalidFormat2()
-  {
-    Assert.assertNull(DataSegmentUtils.valueOf("datasource", "datasource_2015-01-02T00:00:00.000Z_version"));
-  }
-}
diff --git a/api/src/test/java/io/druid/timeline/partition/NoneShardSpecTest.java b/api/src/test/java/io/druid/timeline/partition/NoneShardSpecTest.java
deleted file mode 100644
index bc9925a4ddc..00000000000
--- a/api/src/test/java/io/druid/timeline/partition/NoneShardSpecTest.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package io.druid.timeline.partition;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import io.druid.TestObjectMapper;
-import org.junit.Assert;
-import org.junit.Test;
-
-import java.io.IOException;
-
-public class NoneShardSpecTest
-{
-  @Test
-  public void testEqualsAndHashCode()
-  {
-    final ShardSpec one = NoneShardSpec.instance();
-    final ShardSpec two = NoneShardSpec.instance();
-    Assert.assertEquals(one, two);
-    Assert.assertEquals(one.hashCode(), two.hashCode());
-  }
-
-  @Test
-  public void testSerde() throws Exception
-  {
-    final NoneShardSpec one = NoneShardSpec.instance();
-    ObjectMapper mapper = new TestObjectMapper();
-    NoneShardSpec serde1 = mapper.readValue(mapper.writeValueAsString(one), NoneShardSpec.class);
-    NoneShardSpec serde2 = mapper.readValue(mapper.writeValueAsString(one), NoneShardSpec.class);
-
-    // Serde should return same object instead of creating new one every time.
-    Assert.assertTrue(serde1 == serde2);
-    Assert.assertTrue(one == serde1);
-  }
-
-  @Test
-  public void testPartitionFieldIgnored() throws IOException
-  {
-    final String jsonStr = "{\"type\": \"none\",\"partitionNum\": 2}";
-    ObjectMapper mapper = new TestObjectMapper();
-    final ShardSpec noneShardSpec = mapper.readValue(jsonStr, ShardSpec.class);
-    noneShardSpec.equals(NoneShardSpec.instance());
-  }
-}
diff --git a/api/src/test/resources/log4j2.xml b/api/src/test/resources/log4j2.xml
deleted file mode 100644
index 625f9fe1516..00000000000
--- a/api/src/test/resources/log4j2.xml
+++ /dev/null
@@ -1,35 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<!--
-  ~ Licensed to Metamarkets Group Inc. (Metamarkets) under one
-  ~ or more contributor license agreements.  See the NOTICE file
-  ~ distributed with this work for additional information
-  ~ regarding copyright ownership.  Metamarkets licenses this file
-  ~ to you under the Apache License, Version 2.0 (the
-  ~ "License"); you may not use this file except in compliance
-  ~ with the License.  You may obtain a copy of the License at
-  ~
-  ~   http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing,
-  ~ software distributed under the License is distributed on an
-  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  ~ KIND, either express or implied.  See the License for the
-  ~ specific language governing permissions and limitations
-  ~ under the License.
-  -->
-
-<Configuration status="WARN">
-  <Appenders>
-    <Console name="Console" target="SYSTEM_OUT">
-      <PatternLayout pattern="%d{ISO8601} %p [%t] %c - %m%n"/>
-    </Console>
-  </Appenders>
-  <Loggers>
-    <Root level="info">
-      <AppenderRef ref="Console"/>
-    </Root>
-    <Logger level="debug" name="io.druid" additivity="false">
-      <AppenderRef ref="Console"/>
-    </Logger>
-  </Loggers>
-</Configuration>
diff --git a/aws-common/pom.xml b/aws-common/pom.xml
index 608552f98f5..4b941c46c7d 100644
--- a/aws-common/pom.xml
+++ b/aws-common/pom.xml
@@ -1,19 +1,21 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <!--
-  ~ Druid - a distributed column store.
-  ~ Copyright 2012 - 2015 Metamarkets Group Inc.
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
   ~
-  ~ Licensed under the Apache License, Version 2.0 (the "License");
-  ~ you may not use this file except in compliance with the License.
-  ~ You may obtain a copy of the License at
+  ~   http://www.apache.org/licenses/LICENSE-2.0
   ~
-  ~     http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
   -->
 
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
@@ -24,21 +26,25 @@
     <description>druid-aws-common</description>
 
     <parent>
-        <groupId>io.druid</groupId>
+        <groupId>org.apache.druid</groupId>
         <artifactId>druid</artifactId>
-        <version>0.12.0-SNAPSHOT</version>
+        <version>0.13.0-incubating-SNAPSHOT</version>
     </parent>
 
     <dependencies>
         <dependency>
-            <groupId>io.druid</groupId>
-            <artifactId>druid-common</artifactId>
+            <groupId>org.apache.druid</groupId>
+            <artifactId>druid-core</artifactId>
             <version>${project.parent.version}</version>
         </dependency>
         <dependency>
             <groupId>com.amazonaws</groupId>
             <artifactId>aws-java-sdk-ec2</artifactId>
         </dependency>
+        <dependency>
+            <groupId>com.amazonaws</groupId>
+            <artifactId>aws-java-sdk-s3</artifactId>
+        </dependency>
 
         <!-- Tests -->
         <dependency>
diff --git a/aws-common/src/main/java/io/druid/common/aws/AWSCredentialsConfig.java b/aws-common/src/main/java/io/druid/common/aws/AWSCredentialsConfig.java
deleted file mode 100644
index ae213fe0dee..00000000000
--- a/aws-common/src/main/java/io/druid/common/aws/AWSCredentialsConfig.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.common.aws;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import io.druid.metadata.DefaultPasswordProvider;
-import io.druid.metadata.PasswordProvider;
-
-/**
- */
-public class AWSCredentialsConfig
-{
-  @JsonProperty
-  private PasswordProvider accessKey = new DefaultPasswordProvider("");
-
-  @JsonProperty
-  private PasswordProvider secretKey = new DefaultPasswordProvider("");
-
-  @JsonProperty
-  private String fileSessionCredentials = "";
-
-  public PasswordProvider getAccessKey()
-  {
-    return accessKey;
-  }
-
-  public PasswordProvider getSecretKey()
-  {
-    return secretKey;
-  }
-
-  public String getFileSessionCredentials()
-  {
-    return fileSessionCredentials;
-  }
-}
diff --git a/aws-common/src/main/java/io/druid/common/aws/AWSCredentialsUtils.java b/aws-common/src/main/java/io/druid/common/aws/AWSCredentialsUtils.java
deleted file mode 100644
index c38254d6c78..00000000000
--- a/aws-common/src/main/java/io/druid/common/aws/AWSCredentialsUtils.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.common.aws;
-
-import com.amazonaws.auth.AWSCredentialsProviderChain;
-import com.amazonaws.auth.EnvironmentVariableCredentialsProvider;
-import com.amazonaws.auth.InstanceProfileCredentialsProvider;
-import com.amazonaws.auth.SystemPropertiesCredentialsProvider;
-import com.amazonaws.auth.profile.ProfileCredentialsProvider;
-
-public class AWSCredentialsUtils
-{
-  public static AWSCredentialsProviderChain defaultAWSCredentialsProviderChain(final AWSCredentialsConfig config)
-  {
-    return new AWSCredentialsProviderChain(
-        new ConfigDrivenAwsCredentialsConfigProvider(config),
-        new LazyFileSessionCredentialsProvider(config),
-        new EnvironmentVariableCredentialsProvider(),
-        new SystemPropertiesCredentialsProvider(),
-        new ProfileCredentialsProvider(),
-        new InstanceProfileCredentialsProvider());
-  }
-}
diff --git a/aws-common/src/main/java/io/druid/common/aws/ConfigDrivenAwsCredentialsConfigProvider.java b/aws-common/src/main/java/io/druid/common/aws/ConfigDrivenAwsCredentialsConfigProvider.java
deleted file mode 100644
index 732c8c2a3bd..00000000000
--- a/aws-common/src/main/java/io/druid/common/aws/ConfigDrivenAwsCredentialsConfigProvider.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.common.aws;
-
-import com.amazonaws.AmazonClientException;
-import com.amazonaws.auth.AWSCredentials;
-import com.amazonaws.auth.AWSCredentialsProvider;
-import com.google.common.base.Strings;
-
-public class ConfigDrivenAwsCredentialsConfigProvider implements AWSCredentialsProvider
-{
-  private AWSCredentialsConfig config;
-
-  public ConfigDrivenAwsCredentialsConfigProvider(AWSCredentialsConfig config)
-  {
-    this.config = config;
-  }
-
-  @Override
-  public AWSCredentials getCredentials()
-  {
-    final String key = config.getAccessKey().getPassword();
-    final String secret = config.getSecretKey().getPassword();
-    if (!Strings.isNullOrEmpty(key) && !Strings.isNullOrEmpty(secret)) {
-      return new AWSCredentials()
-      {
-        @Override
-        public String getAWSAccessKeyId()
-        {
-          return key;
-        }
-
-        @Override
-        public String getAWSSecretKey()
-        {
-          return secret;
-        }
-      };
-    }
-    throw new AmazonClientException("Unable to load AWS credentials from druid AWSCredentialsConfig");
-  }
-
-  @Override
-  public void refresh() {}
-}
diff --git a/aws-common/src/main/java/io/druid/common/aws/FileSessionCredentialsProvider.java b/aws-common/src/main/java/io/druid/common/aws/FileSessionCredentialsProvider.java
deleted file mode 100644
index 920210f9867..00000000000
--- a/aws-common/src/main/java/io/druid/common/aws/FileSessionCredentialsProvider.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.common.aws;
-
-import com.amazonaws.auth.AWSCredentials;
-import com.amazonaws.auth.AWSCredentialsProvider;
-import com.amazonaws.auth.AWSSessionCredentials;
-import io.druid.java.util.common.concurrent.Execs;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Properties;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
-
-public class FileSessionCredentialsProvider implements AWSCredentialsProvider
-{
-  private final String sessionCredentials;
-  private volatile String sessionToken;
-  private volatile String accessKey;
-  private volatile String secretKey;
-
-  private final ScheduledExecutorService scheduler =
-      Execs.scheduledSingleThreaded("FileSessionCredentialsProviderRefresh-%d");
-
-  public FileSessionCredentialsProvider(String sessionCredentials)
-  {
-    this.sessionCredentials = sessionCredentials;
-    refresh();
-
-    scheduler.scheduleAtFixedRate(this::refresh, 1, 1, TimeUnit.HOURS); // refresh every hour
-  }
-
-  @Override
-  public AWSCredentials getCredentials()
-  {
-    return new AWSSessionCredentials()
-    {
-      @Override
-      public String getSessionToken()
-      {
-        return sessionToken;
-      }
-
-      @Override
-      public String getAWSAccessKeyId()
-      {
-        return accessKey;
-      }
-
-      @Override
-      public String getAWSSecretKey()
-      {
-        return secretKey;
-      }
-    };
-  }
-
-  @Override
-  public void refresh()
-  {
-    try {
-      Properties props = new Properties();
-      InputStream is = new FileInputStream(new File(sessionCredentials));
-      props.load(is);
-      is.close();
-
-      sessionToken = props.getProperty("sessionToken");
-      accessKey = props.getProperty("accessKey");
-      secretKey = props.getProperty("secretKey");
-    }
-    catch (IOException e) {
-      throw new RuntimeException("cannot refresh AWS credentials", e);
-    }
-  }
-}
diff --git a/aws-common/src/main/java/io/druid/common/aws/LazyFileSessionCredentialsProvider.java b/aws-common/src/main/java/io/druid/common/aws/LazyFileSessionCredentialsProvider.java
deleted file mode 100644
index 029ed348577..00000000000
--- a/aws-common/src/main/java/io/druid/common/aws/LazyFileSessionCredentialsProvider.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.common.aws;
-
-import com.amazonaws.auth.AWSCredentials;
-import com.amazonaws.auth.AWSCredentialsProvider;
-
-public class LazyFileSessionCredentialsProvider implements AWSCredentialsProvider
-{
-  private AWSCredentialsConfig config;
-  private FileSessionCredentialsProvider provider;
-
-  public LazyFileSessionCredentialsProvider(AWSCredentialsConfig config)
-  {
-    this.config = config;
-  }
-
-  private FileSessionCredentialsProvider getUnderlyingProvider()
-  {
-    if (provider == null) {
-      synchronized (config) {
-        if (provider == null) {
-          provider = new FileSessionCredentialsProvider(config.getFileSessionCredentials());
-        }
-      }
-    }
-    return provider;
-  }
-
-  @Override
-  public AWSCredentials getCredentials()
-  {
-    return getUnderlyingProvider().getCredentials();
-  }
-
-  @Override
-  public void refresh()
-  {
-    getUnderlyingProvider().refresh();
-  }
-}
diff --git a/aws-common/src/main/java/org/apache/druid/common/aws/AWSClientConfig.java b/aws-common/src/main/java/org/apache/druid/common/aws/AWSClientConfig.java
new file mode 100644
index 00000000000..7c8eb8aa130
--- /dev/null
+++ b/aws-common/src/main/java/org/apache/druid/common/aws/AWSClientConfig.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.druid.common.aws;
+
+import com.amazonaws.services.s3.S3ClientOptions;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class AWSClientConfig
+{
+  @JsonProperty
+  private boolean disableChunkedEncoding = S3ClientOptions.DEFAULT_CHUNKED_ENCODING_DISABLED;
+
+  @JsonProperty
+  private boolean enablePathStyleAccess = S3ClientOptions.DEFAULT_PATH_STYLE_ACCESS;
+
+  @JsonProperty
+  protected boolean forceGlobalBucketAccessEnabled = S3ClientOptions.DEFAULT_FORCE_GLOBAL_BUCKET_ACCESS_ENABLED;
+
+  public boolean isDisableChunkedEncoding()
+  {
+    return disableChunkedEncoding;
+  }
+
+  public boolean isEnablePathStyleAccess()
+  {
+    return enablePathStyleAccess;
+  }
+
+  public boolean isForceGlobalBucketAccessEnabled()
+  {
+    return forceGlobalBucketAccessEnabled;
+  }
+}
diff --git a/aws-common/src/main/java/org/apache/druid/common/aws/AWSCredentialsConfig.java b/aws-common/src/main/java/org/apache/druid/common/aws/AWSCredentialsConfig.java
new file mode 100644
index 00000000000..9eb4d018d61
--- /dev/null
+++ b/aws-common/src/main/java/org/apache/druid/common/aws/AWSCredentialsConfig.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.common.aws;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+import org.apache.druid.metadata.DefaultPasswordProvider;
+import org.apache.druid.metadata.PasswordProvider;
+
+/**
+ */
+public class AWSCredentialsConfig
+{
+  @JsonProperty
+  private PasswordProvider accessKey = new DefaultPasswordProvider("");
+
+  @JsonProperty
+  private PasswordProvider secretKey = new DefaultPasswordProvider("");
+
+  @JsonProperty
+  private String fileSessionCredentials = "";
+
+  public PasswordProvider getAccessKey()
+  {
+    return accessKey;
+  }
+
+  public PasswordProvider getSecretKey()
+  {
+    return secretKey;
+  }
+
+  public String getFileSessionCredentials()
+  {
+    return fileSessionCredentials;
+  }
+}
diff --git a/aws-common/src/main/java/org/apache/druid/common/aws/AWSCredentialsUtils.java b/aws-common/src/main/java/org/apache/druid/common/aws/AWSCredentialsUtils.java
new file mode 100644
index 00000000000..a0ccce4ad24
--- /dev/null
+++ b/aws-common/src/main/java/org/apache/druid/common/aws/AWSCredentialsUtils.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.common.aws;
+
+import com.amazonaws.auth.AWSCredentialsProviderChain;
+import com.amazonaws.auth.EnvironmentVariableCredentialsProvider;
+import com.amazonaws.auth.InstanceProfileCredentialsProvider;
+import com.amazonaws.auth.SystemPropertiesCredentialsProvider;
+import com.amazonaws.auth.profile.ProfileCredentialsProvider;
+
+public class AWSCredentialsUtils
+{
+  public static AWSCredentialsProviderChain defaultAWSCredentialsProviderChain(final AWSCredentialsConfig config)
+  {
+    return new AWSCredentialsProviderChain(
+        new ConfigDrivenAwsCredentialsConfigProvider(config),
+        new LazyFileSessionCredentialsProvider(config),
+        new EnvironmentVariableCredentialsProvider(),
+        new SystemPropertiesCredentialsProvider(),
+        new ProfileCredentialsProvider(),
+        new InstanceProfileCredentialsProvider());
+  }
+}
diff --git a/aws-common/src/main/java/org/apache/druid/common/aws/AWSEndpointConfig.java b/aws-common/src/main/java/org/apache/druid/common/aws/AWSEndpointConfig.java
new file mode 100644
index 00000000000..80216d9711f
--- /dev/null
+++ b/aws-common/src/main/java/org/apache/druid/common/aws/AWSEndpointConfig.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.common.aws;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import javax.annotation.Nullable;
+
+public class AWSEndpointConfig
+{
+  @Nullable
+  @JsonProperty
+  private String url;
+
+  @Nullable
+  @JsonProperty
+  private String signingRegion;
+
+  @Nullable
+  public String getUrl()
+  {
+    return url;
+  }
+
+  @Nullable
+  public String getSigningRegion()
+  {
+    return signingRegion;
+  }
+}
diff --git a/aws-common/src/main/java/org/apache/druid/common/aws/AWSProxyConfig.java b/aws-common/src/main/java/org/apache/druid/common/aws/AWSProxyConfig.java
new file mode 100644
index 00000000000..085e810834e
--- /dev/null
+++ b/aws-common/src/main/java/org/apache/druid/common/aws/AWSProxyConfig.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.common.aws;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class AWSProxyConfig
+{
+  @JsonProperty
+  private String host;
+
+  @JsonProperty
+  private int port = -1; // AWS's default proxy port is -1
+
+  @JsonProperty
+  private String username;
+
+  @JsonProperty
+  private String password;
+
+  public String getHost()
+  {
+    return host;
+  }
+
+  public int getPort()
+  {
+    return port;
+  }
+
+  public String getUsername()
+  {
+    return username;
+  }
+
+  public String getPassword()
+  {
+    return password;
+  }
+}
diff --git a/aws-common/src/main/java/org/apache/druid/common/aws/ConfigDrivenAwsCredentialsConfigProvider.java b/aws-common/src/main/java/org/apache/druid/common/aws/ConfigDrivenAwsCredentialsConfigProvider.java
new file mode 100644
index 00000000000..37681593524
--- /dev/null
+++ b/aws-common/src/main/java/org/apache/druid/common/aws/ConfigDrivenAwsCredentialsConfigProvider.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.common.aws;
+
+import com.amazonaws.AmazonClientException;
+import com.amazonaws.auth.AWSCredentials;
+import com.amazonaws.auth.AWSCredentialsProvider;
+import com.google.common.base.Strings;
+
+public class ConfigDrivenAwsCredentialsConfigProvider implements AWSCredentialsProvider
+{
+  private AWSCredentialsConfig config;
+
+  public ConfigDrivenAwsCredentialsConfigProvider(AWSCredentialsConfig config)
+  {
+    this.config = config;
+  }
+
+  @Override
+  public AWSCredentials getCredentials()
+  {
+    final String key = config.getAccessKey().getPassword();
+    final String secret = config.getSecretKey().getPassword();
+    if (!Strings.isNullOrEmpty(key) && !Strings.isNullOrEmpty(secret)) {
+      return new AWSCredentials()
+      {
+        @Override
+        public String getAWSAccessKeyId()
+        {
+          return key;
+        }
+
+        @Override
+        public String getAWSSecretKey()
+        {
+          return secret;
+        }
+      };
+    }
+    throw new AmazonClientException("Unable to load AWS credentials from druid AWSCredentialsConfig");
+  }
+
+  @Override
+  public void refresh() {}
+}
diff --git a/aws-common/src/main/java/org/apache/druid/common/aws/FileSessionCredentialsProvider.java b/aws-common/src/main/java/org/apache/druid/common/aws/FileSessionCredentialsProvider.java
new file mode 100644
index 00000000000..3dbe64b678f
--- /dev/null
+++ b/aws-common/src/main/java/org/apache/druid/common/aws/FileSessionCredentialsProvider.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.common.aws;
+
+import com.amazonaws.auth.AWSCredentials;
+import com.amazonaws.auth.AWSCredentialsProvider;
+import com.amazonaws.auth.AWSSessionCredentials;
+import org.apache.druid.java.util.common.concurrent.Execs;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Properties;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
+public class FileSessionCredentialsProvider implements AWSCredentialsProvider
+{
+  private final String sessionCredentials;
+  private volatile String sessionToken;
+  private volatile String accessKey;
+  private volatile String secretKey;
+
+  private final ScheduledExecutorService scheduler =
+      Execs.scheduledSingleThreaded("FileSessionCredentialsProviderRefresh-%d");
+
+  public FileSessionCredentialsProvider(String sessionCredentials)
+  {
+    this.sessionCredentials = sessionCredentials;
+    refresh();
+
+    scheduler.scheduleAtFixedRate(this::refresh, 1, 1, TimeUnit.HOURS); // refresh every hour
+  }
+
+  @Override
+  public AWSCredentials getCredentials()
+  {
+    return new AWSSessionCredentials()
+    {
+      @Override
+      public String getSessionToken()
+      {
+        return sessionToken;
+      }
+
+      @Override
+      public String getAWSAccessKeyId()
+      {
+        return accessKey;
+      }
+
+      @Override
+      public String getAWSSecretKey()
+      {
+        return secretKey;
+      }
+    };
+  }
+
+  @Override
+  public void refresh()
+  {
+    try {
+      Properties props = new Properties();
+      InputStream is = new FileInputStream(new File(sessionCredentials));
+      props.load(is);
+      is.close();
+
+      sessionToken = props.getProperty("sessionToken");
+      accessKey = props.getProperty("accessKey");
+      secretKey = props.getProperty("secretKey");
+    }
+    catch (IOException e) {
+      throw new RuntimeException("cannot refresh AWS credentials", e);
+    }
+  }
+}
diff --git a/aws-common/src/main/java/org/apache/druid/common/aws/LazyFileSessionCredentialsProvider.java b/aws-common/src/main/java/org/apache/druid/common/aws/LazyFileSessionCredentialsProvider.java
new file mode 100644
index 00000000000..7fc046b3165
--- /dev/null
+++ b/aws-common/src/main/java/org/apache/druid/common/aws/LazyFileSessionCredentialsProvider.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.common.aws;
+
+import com.amazonaws.auth.AWSCredentials;
+import com.amazonaws.auth.AWSCredentialsProvider;
+
+public class LazyFileSessionCredentialsProvider implements AWSCredentialsProvider
+{
+  private AWSCredentialsConfig config;
+  private FileSessionCredentialsProvider provider;
+
+  public LazyFileSessionCredentialsProvider(AWSCredentialsConfig config)
+  {
+    this.config = config;
+  }
+
+  private FileSessionCredentialsProvider getUnderlyingProvider()
+  {
+    if (provider == null) {
+      synchronized (config) {
+        if (provider == null) {
+          provider = new FileSessionCredentialsProvider(config.getFileSessionCredentials());
+        }
+      }
+    }
+    return provider;
+  }
+
+  @Override
+  public AWSCredentials getCredentials()
+  {
+    return getUnderlyingProvider().getCredentials();
+  }
+
+  @Override
+  public void refresh()
+  {
+    getUnderlyingProvider().refresh();
+  }
+}
diff --git a/aws-common/src/test/java/io/druid/common/aws/AWSCredentialsConfigTest.java b/aws-common/src/test/java/io/druid/common/aws/AWSCredentialsConfigTest.java
deleted file mode 100644
index daa66c557d0..00000000000
--- a/aws-common/src/test/java/io/druid/common/aws/AWSCredentialsConfigTest.java
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.common.aws;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import com.google.inject.Scopes;
-import com.google.inject.name.Names;
-import io.druid.guice.JsonConfigProvider;
-import io.druid.guice.JsonConfigurator;
-import io.druid.guice.LazySingleton;
-import io.druid.metadata.DefaultPasswordProvider;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import javax.validation.Validation;
-import javax.validation.Validator;
-import java.util.Properties;
-import java.util.UUID;
-
-public class AWSCredentialsConfigTest
-{
-  private static final String PROPERTY_PREFIX = UUID.randomUUID().toString();
-  private static final String SOME_SECRET = "someSecret";
-  private final Properties properties = new Properties();
-
-  @Before
-  public void setUp()
-  {
-    cleanProperties();
-  }
-
-  @After
-  public void tearDown()
-  {
-    cleanProperties();
-  }
-
-  private void cleanProperties()
-  {
-    properties.clear();
-  }
-
-  @Test
-  public void testStringProperty()
-  {
-    properties.put(PROPERTY_PREFIX + ".accessKey", SOME_SECRET);
-    properties.put(PROPERTY_PREFIX + ".secretKey", SOME_SECRET);
-
-    final Injector injector = Guice.createInjector(
-        binder -> {
-          binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/test/redis");
-          binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0);
-          binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1);
-          binder.bind(Validator.class).toInstance(Validation.buildDefaultValidatorFactory().getValidator());
-          binder.bindScope(LazySingleton.class, Scopes.SINGLETON);
-          binder.bind(JsonConfigurator.class).in(LazySingleton.class);
-          binder.bind(Properties.class).toInstance(properties);
-          JsonConfigProvider.bind(binder, PROPERTY_PREFIX, AWSCredentialsConfig.class);
-        }
-    );
-    final AWSCredentialsConfig credentialsConfig = injector.getInstance(AWSCredentialsConfig.class);
-    Assert.assertEquals(SOME_SECRET, credentialsConfig.getAccessKey().getPassword());
-    Assert.assertEquals(SOME_SECRET, credentialsConfig.getSecretKey().getPassword());
-  }
-
-  @Test
-  public void testJsonProperty() throws Exception
-  {
-    final String someSecret = new ObjectMapper().writeValueAsString(new DefaultPasswordProvider(SOME_SECRET));
-    properties.put(PROPERTY_PREFIX + ".accessKey", someSecret);
-    properties.put(PROPERTY_PREFIX + ".secretKey", someSecret);
-
-    final Injector injector = Guice.createInjector(
-        binder -> {
-          binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/test/redis");
-          binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0);
-          binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1);
-          binder.bind(Validator.class).toInstance(Validation.buildDefaultValidatorFactory().getValidator());
-          binder.bindScope(LazySingleton.class, Scopes.SINGLETON);
-          binder.bind(JsonConfigurator.class).in(LazySingleton.class);
-          binder.bind(Properties.class).toInstance(properties);
-          JsonConfigProvider.bind(binder, PROPERTY_PREFIX, AWSCredentialsConfig.class);
-        }
-    );
-    final AWSCredentialsConfig credentialsConfig = injector.getInstance(AWSCredentialsConfig.class);
-    Assert.assertEquals(SOME_SECRET, credentialsConfig.getAccessKey().getPassword());
-    Assert.assertEquals(SOME_SECRET, credentialsConfig.getSecretKey().getPassword());
-  }
-}
diff --git a/aws-common/src/test/java/org/apache/druid/common/aws/AWSCredentialsConfigTest.java b/aws-common/src/test/java/org/apache/druid/common/aws/AWSCredentialsConfigTest.java
new file mode 100644
index 00000000000..522ef1c0cd7
--- /dev/null
+++ b/aws-common/src/test/java/org/apache/druid/common/aws/AWSCredentialsConfigTest.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.common.aws;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.Scopes;
+import com.google.inject.name.Names;
+import org.apache.druid.guice.JsonConfigProvider;
+import org.apache.druid.guice.JsonConfigurator;
+import org.apache.druid.guice.LazySingleton;
+import org.apache.druid.metadata.DefaultPasswordProvider;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import javax.validation.Validation;
+import javax.validation.Validator;
+import java.util.Properties;
+import java.util.UUID;
+
+public class AWSCredentialsConfigTest
+{
+  private static final String PROPERTY_PREFIX = UUID.randomUUID().toString();
+  private static final String SOME_SECRET = "someSecret";
+  private final Properties properties = new Properties();
+
+  @Before
+  public void setUp()
+  {
+    cleanProperties();
+  }
+
+  @After
+  public void tearDown()
+  {
+    cleanProperties();
+  }
+
+  private void cleanProperties()
+  {
+    properties.clear();
+  }
+
+  @Test
+  public void testStringProperty()
+  {
+    properties.put(PROPERTY_PREFIX + ".accessKey", SOME_SECRET);
+    properties.put(PROPERTY_PREFIX + ".secretKey", SOME_SECRET);
+
+    final Injector injector = Guice.createInjector(
+        binder -> {
+          binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/test/redis");
+          binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0);
+          binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1);
+          binder.bind(Validator.class).toInstance(Validation.buildDefaultValidatorFactory().getValidator());
+          binder.bindScope(LazySingleton.class, Scopes.SINGLETON);
+          binder.bind(JsonConfigurator.class).in(LazySingleton.class);
+          binder.bind(Properties.class).toInstance(properties);
+          JsonConfigProvider.bind(binder, PROPERTY_PREFIX, AWSCredentialsConfig.class);
+        }
+    );
+    final AWSCredentialsConfig credentialsConfig = injector.getInstance(AWSCredentialsConfig.class);
+    Assert.assertEquals(SOME_SECRET, credentialsConfig.getAccessKey().getPassword());
+    Assert.assertEquals(SOME_SECRET, credentialsConfig.getSecretKey().getPassword());
+  }
+
+  @Test
+  public void testJsonProperty() throws Exception
+  {
+    final String someSecret = new ObjectMapper().writeValueAsString(new DefaultPasswordProvider(SOME_SECRET));
+    properties.put(PROPERTY_PREFIX + ".accessKey", someSecret);
+    properties.put(PROPERTY_PREFIX + ".secretKey", someSecret);
+
+    final Injector injector = Guice.createInjector(
+        binder -> {
+          binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/test/redis");
+          binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0);
+          binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1);
+          binder.bind(Validator.class).toInstance(Validation.buildDefaultValidatorFactory().getValidator());
+          binder.bindScope(LazySingleton.class, Scopes.SINGLETON);
+          binder.bind(JsonConfigurator.class).in(LazySingleton.class);
+          binder.bind(Properties.class).toInstance(properties);
+          JsonConfigProvider.bind(binder, PROPERTY_PREFIX, AWSCredentialsConfig.class);
+        }
+    );
+    final AWSCredentialsConfig credentialsConfig = injector.getInstance(AWSCredentialsConfig.class);
+    Assert.assertEquals(SOME_SECRET, credentialsConfig.getAccessKey().getPassword());
+    Assert.assertEquals(SOME_SECRET, credentialsConfig.getSecretKey().getPassword());
+  }
+}
diff --git a/benchmarks/pom.xml b/benchmarks/pom.xml
index 596931c1439..9e2bd8d22e5 100644
--- a/benchmarks/pom.xml
+++ b/benchmarks/pom.xml
@@ -1,22 +1,22 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <!--
- ~ Licensed to Metamarkets Group Inc. (Metamarkets) under one
- ~ or more contributor license agreements.  See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership.  Metamarkets licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License.  You may obtain a copy of the License at
- ~
- ~   http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied.  See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
 
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
@@ -25,9 +25,9 @@
   <name>druid-benchmarks</name>
   <packaging>jar</packaging>
   <parent>
-    <groupId>io.druid</groupId>
+    <groupId>org.apache.druid</groupId>
     <artifactId>druid</artifactId>
-    <version>0.12.0-SNAPSHOT</version>
+    <version>0.13.0-incubating-SNAPSHOT</version>
   </parent>
 
   <prerequisites>
@@ -47,28 +47,28 @@
       <scope>provided</scope>
     </dependency>
     <dependency>
-      <groupId>io.druid</groupId>
+      <groupId>org.apache.druid</groupId>
       <artifactId>druid-processing</artifactId>
       <version>${project.parent.version}</version>
     </dependency>
     <dependency>
-      <groupId>io.druid</groupId>
+      <groupId>org.apache.druid</groupId>
       <artifactId>druid-server</artifactId>
       <version>${project.parent.version}</version>
     </dependency>
     <dependency>
-      <groupId>io.druid</groupId>
+      <groupId>org.apache.druid</groupId>
       <artifactId>druid-sql</artifactId>
       <version>${project.parent.version}</version>
     </dependency>
     <dependency>
-      <groupId>io.druid</groupId>
+      <groupId>org.apache.druid</groupId>
       <artifactId>druid-processing</artifactId>
       <version>${project.parent.version}</version>
       <type>test-jar</type>
     </dependency>
     <dependency>
-      <groupId>io.druid</groupId>
+      <groupId>org.apache.druid</groupId>
       <artifactId>druid-sql</artifactId>
       <version>${project.parent.version}</version>
       <type>test-jar</type>
diff --git a/benchmarks/src/main/java/io/druid/benchmark/BasicAuthUserMapSerdeBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/BasicAuthUserMapSerdeBenchmark.java
deleted file mode 100644
index fcf7d0584ac..00000000000
--- a/benchmarks/src/main/java/io/druid/benchmark/BasicAuthUserMapSerdeBenchmark.java
+++ /dev/null
@@ -1,182 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.benchmark;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.dataformat.smile.SmileFactory;
-import org.openjdk.jmh.annotations.Benchmark;
-import org.openjdk.jmh.annotations.BenchmarkMode;
-import org.openjdk.jmh.annotations.Fork;
-import org.openjdk.jmh.annotations.Measurement;
-import org.openjdk.jmh.annotations.Mode;
-import org.openjdk.jmh.annotations.OutputTimeUnit;
-import org.openjdk.jmh.annotations.Param;
-import org.openjdk.jmh.annotations.Scope;
-import org.openjdk.jmh.annotations.Setup;
-import org.openjdk.jmh.annotations.State;
-import org.openjdk.jmh.annotations.Warmup;
-import org.openjdk.jmh.infra.Blackhole;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-import java.util.UUID;
-import java.util.concurrent.TimeUnit;
-
-@State(Scope.Benchmark)
-@Fork(value = 1)
-@Warmup(iterations = 10)
-@Measurement(iterations = 25)
-public class BasicAuthUserMapSerdeBenchmark
-{
-  @Param({"1000"})
-  private int numUsers;
-
-  private ObjectMapper smileMapper;
-  private Map<String, BenchmarkUser> userMap;
-  private List<byte[]> serializedUsers;
-
-  @Setup
-  public void setup() throws IOException
-  {
-    smileMapper = new ObjectMapper(new SmileFactory());
-    userMap = new HashMap<>();
-    for (int i = 0; i < numUsers; i++) {
-      BenchmarkUser user = makeUser();
-      userMap.put(user.getName(), user);
-    }
-
-    serializedUsers = new ArrayList<>();
-    for (BenchmarkUser user : userMap.values()) {
-      byte[] serializedUser = smileMapper.writeValueAsBytes(user);
-      serializedUsers.add(serializedUser);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void serialize(Blackhole blackhole) throws Exception
-  {
-    for (BenchmarkUser user : userMap.values()) {
-      byte[] serializedUser = smileMapper.writeValueAsBytes(user);
-      blackhole.consume(serializedUser);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void deserialize(Blackhole blackhole) throws Exception
-  {
-    for (byte[] serializedUser : serializedUsers) {
-      BenchmarkUser user = smileMapper.readValue(serializedUser, BenchmarkUser.class);
-      blackhole.consume(user);
-    }
-  }
-
-  private BenchmarkUser makeUser()
-  {
-    byte[] salt = new byte[32];
-    byte[] hash = new byte[64];
-
-    Random random = new Random();
-    random.nextBytes(salt);
-    random.nextBytes(hash);
-    return new BenchmarkUser(
-        UUID.randomUUID().toString(),
-        new BenchmarkCredentials(
-            salt,
-            hash,
-            10000
-        )
-    );
-  }
-
-  private static class BenchmarkUser
-  {
-    private final String name;
-    private final BenchmarkCredentials credentials;
-
-    @JsonCreator
-    public BenchmarkUser(
-        @JsonProperty("name") String name,
-        @JsonProperty("credentials") BenchmarkCredentials credentials
-    )
-    {
-      this.name = name;
-      this.credentials = credentials;
-    }
-
-    @JsonProperty
-    public String getName()
-    {
-      return name;
-    }
-
-    @JsonProperty
-    public BenchmarkCredentials getCredentials()
-    {
-      return credentials;
-    }
-  }
-
-  private static class BenchmarkCredentials
-  {
-    private final byte[] salt;
-    private final byte[] hash;
-    private final int iterations;
-
-    @JsonCreator
-    public BenchmarkCredentials(
-        @JsonProperty("salt") byte[] salt,
-        @JsonProperty("hash") byte[] hash,
-        @JsonProperty("iterations") int iterations
-    )
-    {
-      this.salt = salt;
-      this.hash = hash;
-      this.iterations = iterations;
-    }
-
-    @JsonProperty
-    public byte[] getSalt()
-    {
-      return salt;
-    }
-
-    @JsonProperty
-    public byte[] getHash()
-    {
-      return hash;
-    }
-
-    @JsonProperty
-    public int getIterations()
-    {
-      return iterations;
-    }
-  }
-}
diff --git a/benchmarks/src/main/java/io/druid/benchmark/BitmapIterationBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/BitmapIterationBenchmark.java
deleted file mode 100644
index 2551df1071b..00000000000
--- a/benchmarks/src/main/java/io/druid/benchmark/BitmapIterationBenchmark.java
+++ /dev/null
@@ -1,283 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.benchmark;
-
-import io.druid.collections.bitmap.BitSetBitmapFactory;
-import io.druid.collections.bitmap.BitmapFactory;
-import io.druid.collections.bitmap.ConciseBitmapFactory;
-import io.druid.collections.bitmap.ImmutableBitmap;
-import io.druid.collections.bitmap.MutableBitmap;
-import io.druid.collections.bitmap.RoaringBitmapFactory;
-import org.openjdk.jmh.annotations.Benchmark;
-import org.openjdk.jmh.annotations.BenchmarkMode;
-import org.openjdk.jmh.annotations.Fork;
-import org.openjdk.jmh.annotations.Measurement;
-import org.openjdk.jmh.annotations.Mode;
-import org.openjdk.jmh.annotations.OutputTimeUnit;
-import org.openjdk.jmh.annotations.Param;
-import org.openjdk.jmh.annotations.Scope;
-import org.openjdk.jmh.annotations.Setup;
-import org.openjdk.jmh.annotations.State;
-import org.openjdk.jmh.annotations.Warmup;
-import org.roaringbitmap.IntIterator;
-
-import java.util.Arrays;
-import java.util.Random;
-import java.util.concurrent.ThreadLocalRandom;
-import java.util.concurrent.TimeUnit;
-
-
-/**
- * Benchmarks of bitmap iteration and iteration + something (cumulative cost), the latter is useful for comparing total
- * "usage cost" of different {@link io.druid.segment.data.BitmapSerdeFactory}.
- *
- * @see #iter(IterState)
- * @see #constructAndIter(ConstructAndIterState)
- * @see #intersectionAndIter(BitmapsForIntersection)
- * @see #unionAndIter(BitmapsForUnion)
- */
-@State(Scope.Benchmark)
-@Fork(1)
-@BenchmarkMode(Mode.AverageTime)
-@OutputTimeUnit(TimeUnit.NANOSECONDS)
-@Warmup(iterations = 5)
-@Measurement(iterations = 5)
-public class BitmapIterationBenchmark
-{
-  @Param({"bitset", "concise", "roaring"})
-  public String bitmapAlgo;
-
-  /**
-   * Fraction of set bits in the bitmaps to iterate. For {@link #intersectionAndIter} and
-   * {@link #unionAndIter}, this is the fraction of set bits in the final result of intersection or union.
-   */
-  @Param({"0.0", "0.001", "0.1", "0.5", "0.99", "1.0"})
-  public double prob;
-
-  /**
-   * The size of all bitmaps, i. e. the number of rows in a segment for the most bitmap use cases.
-   */
-  @Param({"1000000"})
-  public int size;
-
-  private BitmapFactory makeFactory()
-  {
-    switch (bitmapAlgo) {
-      case "bitset":
-        return new BitSetBitmapFactory();
-      case "concise":
-        return new ConciseBitmapFactory();
-      case "roaring":
-        return new RoaringBitmapFactory();
-      default:
-        throw new IllegalStateException();
-    }
-  }
-
-  private BitmapFactory factory;
-
-  @Setup
-  public void setup()
-  {
-    factory = makeFactory();
-  }
-
-  private ImmutableBitmap makeBitmap(double prob)
-  {
-    MutableBitmap mutableBitmap = factory.makeEmptyMutableBitmap();
-    Random random = ThreadLocalRandom.current();
-    for (int bit = 0; bit < size; bit++) {
-      if (random.nextDouble() < prob) {
-        mutableBitmap.add(bit);
-      }
-    }
-    return factory.makeImmutableBitmap(mutableBitmap);
-  }
-
-  @State(Scope.Benchmark)
-  public static class IterState
-  {
-    private ImmutableBitmap bitmap;
-
-    @Setup
-    public void setup(BitmapIterationBenchmark state)
-    {
-      bitmap = state.makeBitmap(state.prob);
-    }
-  }
-
-  /**
-   * General benchmark of bitmap iteration, this is a part of {@link io.druid.segment.IndexMerger#merge} and
-   * query processing on both realtime and historical nodes.
-   */
-  @Benchmark
-  public int iter(IterState state)
-  {
-    ImmutableBitmap bitmap = state.bitmap;
-    return iter(bitmap);
-  }
-
-  private static int iter(ImmutableBitmap bitmap)
-  {
-    int consume = 0;
-    for (IntIterator it = bitmap.iterator(); it.hasNext();) {
-      consume ^= it.next();
-    }
-    return consume;
-  }
-
-  @State(Scope.Benchmark)
-  public static class ConstructAndIterState
-  {
-    private int dataSize;
-    private int[] data;
-
-    @Setup
-    public void setup(BitmapIterationBenchmark state)
-    {
-      data = new int[(int) (state.size * state.prob) * 2];
-      dataSize = 0;
-      Random random = ThreadLocalRandom.current();
-      for (int bit = 0; bit < state.size; bit++) {
-        if (random.nextDouble() < state.prob) {
-          data[dataSize] = bit;
-          dataSize++;
-        }
-      }
-    }
-  }
-
-  /**
-   * Benchmark of cumulative cost of construction of an immutable bitmap and then iterating over it. This is a pattern
-   * from realtime nodes, see {@link io.druid.segment.StringDimensionIndexer#fillBitmapsFromUnsortedEncodedKeyComponent}.
-   * However this benchmark is yet approximate and to be improved to better reflect actual workloads of realtime nodes.
-   */
-  @Benchmark
-  public int constructAndIter(ConstructAndIterState state)
-  {
-    int dataSize = state.dataSize;
-    int[] data = state.data;
-    MutableBitmap mutableBitmap = factory.makeEmptyMutableBitmap();
-    for (int i = 0; i < dataSize; i++) {
-      mutableBitmap.add(data[i]);
-    }
-    ImmutableBitmap bitmap = factory.makeImmutableBitmap(mutableBitmap);
-    return iter(bitmap);
-  }
-
-  @State(Scope.Benchmark)
-  public static class BitmapsForIntersection
-  {
-    /**
-     * Number of bitmaps to intersect.
-     */
-    @Param({"2", "10", "100"})
-    public int n;
-
-    private ImmutableBitmap[] bitmaps;
-
-    @Setup
-    public void setup(BitmapIterationBenchmark state)
-    {
-      // prob of intersection = product (probs of intersected bitmaps), prob = intersectedBitmapProb ^ n
-      double intersectedBitmapProb = Math.pow(state.prob, 1.0 / n);
-      bitmaps = new ImmutableBitmap[n];
-      for (int i = 0; i < n; i++) {
-        bitmaps[i] = state.makeBitmap(intersectedBitmapProb);
-      }
-    }
-  }
-
-  /**
-   * Benchmark of cumulative cost of bitmap intersection with subsequent iteration over the result. This is a pattern
-   * from query processing of historical nodes, when {@link io.druid.segment.filter.AndFilter} is used.
-   */
-  @Benchmark
-  public int intersectionAndIter(BitmapsForIntersection state)
-  {
-    ImmutableBitmap intersection = factory.intersection(Arrays.asList(state.bitmaps));
-    return iter(intersection);
-  }
-
-  @State(Scope.Benchmark)
-  public static class BitmapsForUnion
-  {
-    /**
-     * Number of bitmaps to union.
-     */
-    @Param({"2", "10", "100"})
-    public int n;
-
-    private ImmutableBitmap[] bitmaps;
-
-    @Setup
-    public void setup(BitmapIterationBenchmark state)
-    {
-      double prob = Math.pow(state.prob, 1.0 / n);
-      MutableBitmap[] mutableBitmaps = new MutableBitmap[n];
-      for (int i = 0; i < n; i++) {
-        mutableBitmaps[i] = state.factory.makeEmptyMutableBitmap();
-      }
-      Random r = ThreadLocalRandom.current();
-      for (int i = 0; i < state.size; i++) {
-        // unions are usually search/filter/select of multiple values of one dimension, so making bitmaps disjoint will
-        // make benchmarks closer to actual workloads
-        MutableBitmap bitmap = mutableBitmaps[r.nextInt(n)];
-        // In one selected bitmap, set the bit with probability=prob, to have the same fraction of set bit in the union
-        if (r.nextDouble() < prob) {
-          bitmap.add(i);
-        }
-      }
-      bitmaps = new ImmutableBitmap[n];
-      for (int i = 0; i < n; i++) {
-        bitmaps[i] = state.factory.makeImmutableBitmap(mutableBitmaps[i]);
-      }
-    }
-  }
-
-  /**
-   * Benchmark of cumulative cost of bitmap union with subsequent iteration over the result. This is a pattern from
-   * query processing on historical nodes, when filters like {@link io.druid.segment.filter.DimensionPredicateFilter},
-   * {@link io.druid.query.filter.RegexDimFilter}, {@link io.druid.query.filter.SearchQueryDimFilter} and similar are
-   * used.
-   */
-  @Benchmark
-  public int unionAndIter(BitmapsForUnion state)
-  {
-    ImmutableBitmap intersection = factory.union(Arrays.asList(state.bitmaps));
-    return iter(intersection);
-  }
-
-  /**
-   * This main() is for debugging from the IDE.
-   */
-  public static void main(String[] args)
-  {
-    BitmapIterationBenchmark state = new BitmapIterationBenchmark();
-    state.bitmapAlgo = "concise";
-    state.prob = 0.001;
-    state.size = 1000000;
-    state.setup();
-
-    BitmapsForIntersection state2 = new BitmapsForIntersection();
-    state2.setup(state);
-    state.intersectionAndIter(state2);
-  }
-}
diff --git a/benchmarks/src/main/java/io/druid/benchmark/BoundFilterBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/BoundFilterBenchmark.java
deleted file mode 100644
index 390e28d8dfb..00000000000
--- a/benchmarks/src/main/java/io/druid/benchmark/BoundFilterBenchmark.java
+++ /dev/null
@@ -1,305 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.benchmark;
-
-import com.google.common.base.Function;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.FluentIterable;
-import io.druid.collections.bitmap.BitmapFactory;
-import io.druid.collections.bitmap.ImmutableBitmap;
-import io.druid.collections.bitmap.MutableBitmap;
-import io.druid.collections.bitmap.RoaringBitmapFactory;
-import io.druid.collections.spatial.ImmutableRTree;
-import io.druid.extendedset.intset.ConciseSetUtils;
-import io.druid.query.filter.BitmapIndexSelector;
-import io.druid.query.filter.BoundDimFilter;
-import io.druid.query.ordering.StringComparators;
-import io.druid.segment.column.BitmapIndex;
-import io.druid.segment.data.BitmapSerdeFactory;
-import io.druid.segment.data.GenericIndexed;
-import io.druid.segment.data.Indexed;
-import io.druid.segment.data.RoaringBitmapSerdeFactory;
-import io.druid.segment.filter.BoundFilter;
-import io.druid.segment.serde.BitmapIndexColumnPartSupplier;
-import org.openjdk.jmh.annotations.Benchmark;
-import org.openjdk.jmh.annotations.BenchmarkMode;
-import org.openjdk.jmh.annotations.Fork;
-import org.openjdk.jmh.annotations.Measurement;
-import org.openjdk.jmh.annotations.Mode;
-import org.openjdk.jmh.annotations.OutputTimeUnit;
-import org.openjdk.jmh.annotations.Param;
-import org.openjdk.jmh.annotations.Scope;
-import org.openjdk.jmh.annotations.Setup;
-import org.openjdk.jmh.annotations.State;
-import org.openjdk.jmh.annotations.Warmup;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
-
-@State(Scope.Benchmark)
-@Fork(value = 1)
-@Warmup(iterations = 10)
-@Measurement(iterations = 10)
-public class BoundFilterBenchmark
-{
-  private static final int START_INT = 1_000_000_000;
-  private static final int END_INT = ConciseSetUtils.MAX_ALLOWED_INTEGER;
-
-  private static final BoundFilter NOTHING_LEXICOGRAPHIC = new BoundFilter(
-      new BoundDimFilter(
-          "foo",
-          String.valueOf(START_INT),
-          String.valueOf(START_INT),
-          true,
-          false,
-          false,
-          null,
-          StringComparators.LEXICOGRAPHIC
-      )
-  );
-
-  private static final BoundFilter HALF_LEXICOGRAPHIC = new BoundFilter(
-      new BoundDimFilter(
-          "foo",
-          String.valueOf(START_INT + (END_INT - START_INT) / 2),
-          String.valueOf(END_INT),
-          false,
-          false,
-          false,
-          null,
-          StringComparators.LEXICOGRAPHIC
-      )
-  );
-
-  private static final BoundFilter EVERYTHING_LEXICOGRAPHIC = new BoundFilter(
-      new BoundDimFilter(
-          "foo",
-          String.valueOf(START_INT),
-          String.valueOf(END_INT),
-          false,
-          false,
-          false,
-          null,
-          StringComparators.LEXICOGRAPHIC
-      )
-  );
-
-  private static final BoundFilter NOTHING_ALPHANUMERIC = new BoundFilter(
-      new BoundDimFilter(
-          "foo",
-          String.valueOf(START_INT),
-          String.valueOf(START_INT),
-          true,
-          false,
-          true,
-          null,
-          StringComparators.ALPHANUMERIC
-      )
-  );
-
-  private static final BoundFilter HALF_ALPHANUMERIC = new BoundFilter(
-      new BoundDimFilter(
-          "foo",
-          String.valueOf(START_INT + (END_INT - START_INT) / 2),
-          String.valueOf(END_INT),
-          false,
-          false,
-          true,
-          null,
-          StringComparators.ALPHANUMERIC
-      )
-  );
-
-  private static final BoundFilter EVERYTHING_ALPHANUMERIC = new BoundFilter(
-      new BoundDimFilter(
-          "foo",
-          String.valueOf(START_INT),
-          String.valueOf(END_INT),
-          false,
-          false,
-          true,
-          null,
-          StringComparators.ALPHANUMERIC
-      )
-  );
-
-  // cardinality, the dictionary will contain evenly spaced integers
-  @Param({"1000", "100000", "1000000"})
-  int cardinality;
-
-  int step;
-
-  // selector will contain a cardinality number of bitmaps; each one contains a single int: 0
-  BitmapIndexSelector selector;
-
-  @Setup
-  public void setup() throws IOException
-  {
-    step = (END_INT - START_INT) / cardinality;
-    final BitmapFactory bitmapFactory = new RoaringBitmapFactory();
-    final BitmapSerdeFactory serdeFactory = new RoaringBitmapSerdeFactory(null);
-    final List<Integer> ints = generateInts();
-    final GenericIndexed<String> dictionary = GenericIndexed.fromIterable(
-        FluentIterable.from(ints)
-                      .transform(
-                          new Function<Integer, String>()
-                          {
-                            @Override
-                            public String apply(Integer i)
-                            {
-                              return i.toString();
-                            }
-                          }
-                      ),
-        GenericIndexed.STRING_STRATEGY
-    );
-    final BitmapIndex bitmapIndex = new BitmapIndexColumnPartSupplier(
-        bitmapFactory,
-        GenericIndexed.fromIterable(
-            FluentIterable.from(ints)
-                          .transform(
-                              new Function<Integer, ImmutableBitmap>()
-                              {
-                                @Override
-                                public ImmutableBitmap apply(Integer i)
-                                {
-                                  final MutableBitmap mutableBitmap = bitmapFactory.makeEmptyMutableBitmap();
-                                  mutableBitmap.add((i - START_INT) / step);
-                                  return bitmapFactory.makeImmutableBitmap(mutableBitmap);
-                                }
-                              }
-                          ),
-            serdeFactory.getObjectStrategy()
-        ),
-        dictionary
-    ).get();
-    selector = new BitmapIndexSelector()
-    {
-      @Override
-      public Indexed<String> getDimensionValues(String dimension)
-      {
-        return dictionary;
-      }
-
-      @Override
-      public boolean hasMultipleValues(final String dimension)
-      {
-        throw new UnsupportedOperationException();
-      }
-
-      @Override
-      public int getNumRows()
-      {
-        throw new UnsupportedOperationException();
-      }
-
-      @Override
-      public BitmapFactory getBitmapFactory()
-      {
-        return bitmapFactory;
-      }
-
-      @Override
-      public ImmutableBitmap getBitmapIndex(String dimension, String value)
-      {
-        return bitmapIndex.getBitmap(bitmapIndex.getIndex(value));
-      }
-
-      @Override
-      public BitmapIndex getBitmapIndex(String dimension)
-      {
-        return bitmapIndex;
-      }
-
-      @Override
-      public ImmutableRTree getSpatialIndex(String dimension)
-      {
-        throw new UnsupportedOperationException();
-      }
-    };
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void matchNothingLexicographic()
-  {
-    final ImmutableBitmap bitmapIndex = NOTHING_LEXICOGRAPHIC.getBitmapIndex(selector);
-    Preconditions.checkState(bitmapIndex.size() == 0);
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void matchHalfLexicographic()
-  {
-    final ImmutableBitmap bitmapIndex = HALF_LEXICOGRAPHIC.getBitmapIndex(selector);
-    Preconditions.checkState(bitmapIndex.size() > 0 && bitmapIndex.size() < cardinality);
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void matchEverythingLexicographic()
-  {
-    final ImmutableBitmap bitmapIndex = EVERYTHING_LEXICOGRAPHIC.getBitmapIndex(selector);
-    Preconditions.checkState(bitmapIndex.size() == cardinality);
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void matchNothingAlphaNumeric()
-  {
-    final ImmutableBitmap bitmapIndex = NOTHING_ALPHANUMERIC.getBitmapIndex(selector);
-    Preconditions.checkState(bitmapIndex.size() == 0);
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void matchHalfAlphaNumeric()
-  {
-    final ImmutableBitmap bitmapIndex = HALF_ALPHANUMERIC.getBitmapIndex(selector);
-    Preconditions.checkState(bitmapIndex.size() > 0 && bitmapIndex.size() < cardinality);
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void matchEverythingAlphaNumeric()
-  {
-    final ImmutableBitmap bitmapIndex = EVERYTHING_ALPHANUMERIC.getBitmapIndex(selector);
-    Preconditions.checkState(bitmapIndex.size() == cardinality);
-  }
-
-  private List<Integer> generateInts()
-  {
-    final List<Integer> ints = new ArrayList<>(cardinality);
-
-    for (int i = 0; i < cardinality; i++) {
-      ints.add(START_INT + step * i);
-    }
-
-    return ints;
-  }
-}
diff --git a/benchmarks/src/main/java/io/druid/benchmark/CompressedColumnarIntsBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/CompressedColumnarIntsBenchmark.java
deleted file mode 100644
index 23f6d61a71f..00000000000
--- a/benchmarks/src/main/java/io/druid/benchmark/CompressedColumnarIntsBenchmark.java
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.benchmark;
-
-import io.druid.java.util.common.io.Closer;
-import io.druid.segment.data.ColumnarInts;
-import io.druid.segment.data.CompressedVSizeColumnarIntsSupplier;
-import io.druid.segment.data.CompressionStrategy;
-import io.druid.segment.data.IndexedInts;
-import io.druid.segment.data.VSizeColumnarInts;
-import io.druid.segment.data.WritableSupplier;
-import it.unimi.dsi.fastutil.ints.IntArrayList;
-import org.openjdk.jmh.annotations.Benchmark;
-import org.openjdk.jmh.annotations.BenchmarkMode;
-import org.openjdk.jmh.annotations.Mode;
-import org.openjdk.jmh.annotations.OutputTimeUnit;
-import org.openjdk.jmh.annotations.Param;
-import org.openjdk.jmh.annotations.Scope;
-import org.openjdk.jmh.annotations.Setup;
-import org.openjdk.jmh.annotations.State;
-import org.openjdk.jmh.infra.Blackhole;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.nio.channels.WritableByteChannel;
-import java.util.BitSet;
-import java.util.Random;
-import java.util.concurrent.TimeUnit;
-
-@State(Scope.Benchmark)
-public class CompressedColumnarIntsBenchmark
-{
-  private IndexedInts uncompressed;
-  private IndexedInts compressed;
-
-  @Param({"1", "2", "3", "4"})
-  int bytes;
-
-  // Number of rows to read, the test will read random rows
-  @Param({"1000", "10000", "100000", "1000000", "1000000"})
-  int filteredRowCount;
-
-  private BitSet filter;
-
-  @Setup
-  public void setup() throws IOException
-  {
-    Random rand = new Random(0);
-    int[] vals = new int[0x100000];
-    final int bound = 1 << bytes;
-    for (int i = 0; i < vals.length; ++i) {
-      vals[i] = rand.nextInt(bound);
-    }
-    final ByteBuffer bufferCompressed = serialize(
-        CompressedVSizeColumnarIntsSupplier.fromList(
-            IntArrayList.wrap(vals),
-            bound - 1,
-            CompressedVSizeColumnarIntsSupplier.maxIntsInBufferForBytes(bytes),
-            ByteOrder.nativeOrder(),
-            CompressionStrategy.LZ4,
-            Closer.create()
-        )
-    );
-    this.compressed = CompressedVSizeColumnarIntsSupplier.fromByteBuffer(
-        bufferCompressed,
-        ByteOrder.nativeOrder()
-    ).get();
-
-    final ByteBuffer bufferUncompressed = serialize(VSizeColumnarInts.fromArray(vals));
-    this.uncompressed = VSizeColumnarInts.readFromByteBuffer(bufferUncompressed);
-
-    filter = new BitSet();
-    for (int i = 0; i < filteredRowCount; i++) {
-      int rowToAccess = rand.nextInt(vals.length);
-      // Skip already selected rows if any
-      while (filter.get(rowToAccess)) {
-        rowToAccess = (rowToAccess + 1) % vals.length;
-      }
-      filter.set(rowToAccess);
-    }
-
-  }
-
-  private static ByteBuffer serialize(WritableSupplier<ColumnarInts> writableSupplier) throws IOException
-  {
-    final ByteBuffer buffer = ByteBuffer.allocateDirect((int) writableSupplier.getSerializedSize());
-
-    WritableByteChannel channel = new WritableByteChannel()
-    {
-      @Override
-      public int write(ByteBuffer src) throws IOException
-      {
-        int size = src.remaining();
-        buffer.put(src);
-        return size;
-      }
-
-      @Override
-      public boolean isOpen()
-      {
-        return true;
-      }
-
-      @Override
-      public void close() throws IOException
-      {
-      }
-    };
-
-    writableSupplier.writeTo(channel, null);
-    buffer.rewind();
-    return buffer;
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void uncompressed(Blackhole blackhole)
-  {
-    for (int i = filter.nextSetBit(0); i >= 0; i = filter.nextSetBit(i + 1)) {
-      blackhole.consume(uncompressed.get(i));
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void compressed(Blackhole blackhole)
-  {
-    for (int i = filter.nextSetBit(0); i >= 0; i = filter.nextSetBit(i + 1)) {
-      blackhole.consume(compressed.get(i));
-    }
-  }
-}
diff --git a/benchmarks/src/main/java/io/druid/benchmark/CompressedVSizeColumnarMultiIntsBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/CompressedVSizeColumnarMultiIntsBenchmark.java
deleted file mode 100644
index 958f3aa7d16..00000000000
--- a/benchmarks/src/main/java/io/druid/benchmark/CompressedVSizeColumnarMultiIntsBenchmark.java
+++ /dev/null
@@ -1,173 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.benchmark;
-
-import com.google.common.base.Function;
-import com.google.common.collect.Iterables;
-import com.google.common.collect.Lists;
-import io.druid.java.util.common.io.Closer;
-import io.druid.segment.data.ColumnarInts;
-import io.druid.segment.data.ColumnarMultiInts;
-import io.druid.segment.data.CompressedVSizeColumnarMultiIntsSupplier;
-import io.druid.segment.data.CompressionStrategy;
-import io.druid.segment.data.IndexedInts;
-import io.druid.segment.data.VSizeColumnarInts;
-import io.druid.segment.data.VSizeColumnarMultiInts;
-import io.druid.segment.data.WritableSupplier;
-import org.openjdk.jmh.annotations.Benchmark;
-import org.openjdk.jmh.annotations.BenchmarkMode;
-import org.openjdk.jmh.annotations.Mode;
-import org.openjdk.jmh.annotations.OutputTimeUnit;
-import org.openjdk.jmh.annotations.Param;
-import org.openjdk.jmh.annotations.Scope;
-import org.openjdk.jmh.annotations.Setup;
-import org.openjdk.jmh.annotations.State;
-import org.openjdk.jmh.infra.Blackhole;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.nio.channels.WritableByteChannel;
-import java.util.BitSet;
-import java.util.List;
-import java.util.Random;
-import java.util.concurrent.TimeUnit;
-
-@State(Scope.Benchmark)
-public class CompressedVSizeColumnarMultiIntsBenchmark
-{
-  private ColumnarMultiInts uncompressed;
-  private ColumnarMultiInts compressed;
-
-  @Param({"1", "2", "3", "4"})
-  int bytes;
-
-  @Param({"5", "10", "100", "1000"})
-  int valuesPerRowBound;
-
-  // Number of rows to read, the test will read random rows
-  @Param({"1000", "10000", "100000", "1000000", "1000000"})
-  int filteredRowCount;
-
-  private BitSet filter;
-
-  @Setup
-  public void setup() throws IOException
-  {
-    Random rand = new Random(0);
-    List<int[]> rows = Lists.newArrayList();
-    final int bound = 1 << bytes;
-    for (int i = 0; i < 0x100000; i++) {
-      int count = rand.nextInt(valuesPerRowBound) + 1;
-      int[] row = new int[rand.nextInt(count)];
-      for (int j = 0; j < row.length; j++) {
-        row[j] = rand.nextInt(bound);
-      }
-      rows.add(row);
-    }
-
-    final ByteBuffer bufferCompressed = serialize(
-        CompressedVSizeColumnarMultiIntsSupplier.fromIterable(
-            Iterables.transform(rows, (Function<int[], ColumnarInts>) input -> VSizeColumnarInts.fromArray(input, 20)),
-            bound - 1,
-            ByteOrder.nativeOrder(),
-            CompressionStrategy.LZ4,
-            Closer.create()
-        )
-    );
-    this.compressed = CompressedVSizeColumnarMultiIntsSupplier.fromByteBuffer(
-        bufferCompressed,
-        ByteOrder.nativeOrder()
-    ).get();
-
-    final ByteBuffer bufferUncompressed = serialize(
-        VSizeColumnarMultiInts.fromIterable(Iterables.transform(rows, input -> VSizeColumnarInts.fromArray(input, 20)))
-    );
-    this.uncompressed = VSizeColumnarMultiInts.readFromByteBuffer(bufferUncompressed);
-
-    filter = new BitSet();
-    for (int i = 0; i < filteredRowCount; i++) {
-      int rowToAccess = rand.nextInt(rows.size());
-      // Skip already selected rows if any
-      while (filter.get(rowToAccess)) {
-        rowToAccess = (rowToAccess + 1) % rows.size();
-      }
-      filter.set(rowToAccess);
-    }
-  }
-
-  private static ByteBuffer serialize(WritableSupplier<ColumnarMultiInts> writableSupplier)
-      throws IOException
-  {
-    final ByteBuffer buffer = ByteBuffer.allocateDirect((int) writableSupplier.getSerializedSize());
-
-    WritableByteChannel channel = new WritableByteChannel()
-    {
-      @Override
-      public int write(ByteBuffer src) throws IOException
-      {
-        int size = src.remaining();
-        buffer.put(src);
-        return size;
-      }
-
-      @Override
-      public boolean isOpen()
-      {
-        return true;
-      }
-
-      @Override
-      public void close() throws IOException
-      {
-      }
-    };
-
-    writableSupplier.writeTo(channel, null);
-    buffer.rewind();
-    return buffer;
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void uncompressed(Blackhole blackhole)
-  {
-    for (int i = filter.nextSetBit(0); i >= 0; i = filter.nextSetBit(i + 1)) {
-      IndexedInts row = uncompressed.get(i);
-      for (int j = 0; j < row.size(); j++) {
-        blackhole.consume(row.get(j));
-      }
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void compressed(Blackhole blackhole)
-  {
-    for (int i = filter.nextSetBit(0); i >= 0; i = filter.nextSetBit(i + 1)) {
-      IndexedInts row = compressed.get(i);
-      for (int j = 0; j < row.size(); j++) {
-        blackhole.consume(row.get(j));
-      }
-    }
-  }
-}
diff --git a/benchmarks/src/main/java/io/druid/benchmark/ConciseComplementBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/ConciseComplementBenchmark.java
deleted file mode 100644
index a31a3f713f3..00000000000
--- a/benchmarks/src/main/java/io/druid/benchmark/ConciseComplementBenchmark.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.benchmark;
-
-
-import io.druid.extendedset.intset.ImmutableConciseSet;
-import org.openjdk.jmh.annotations.Benchmark;
-import org.openjdk.jmh.annotations.BenchmarkMode;
-import org.openjdk.jmh.annotations.Mode;
-import org.openjdk.jmh.annotations.OutputTimeUnit;
-import org.openjdk.jmh.annotations.Param;
-import org.openjdk.jmh.annotations.Scope;
-import org.openjdk.jmh.annotations.State;
-import org.openjdk.jmh.infra.Blackhole;
-
-import java.util.concurrent.TimeUnit;
-
-@State(Scope.Benchmark)
-public class ConciseComplementBenchmark
-{
-
-  // Number of rows to read, the test will read random rows
-  @Param({"1000", "10000", "100000", "1000000", "1000000"})
-  int emptyRows;
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void uncompressed(Blackhole blackhole)
-  {
-    final ImmutableConciseSet set = ImmutableConciseSet.complement(null, emptyRows);
-    blackhole.consume(set);
-    assert (emptyRows == set.size());
-  }
-}
diff --git a/benchmarks/src/main/java/io/druid/benchmark/ConsistentHasherBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/ConsistentHasherBenchmark.java
deleted file mode 100644
index 639a3f518ba..00000000000
--- a/benchmarks/src/main/java/io/druid/benchmark/ConsistentHasherBenchmark.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.benchmark;
-
-import com.google.common.collect.Sets;
-
-import io.druid.java.util.common.StringUtils;
-import io.druid.server.router.ConsistentHasher;
-import org.openjdk.jmh.annotations.Benchmark;
-import org.openjdk.jmh.annotations.BenchmarkMode;
-import org.openjdk.jmh.annotations.Fork;
-import org.openjdk.jmh.annotations.Measurement;
-import org.openjdk.jmh.annotations.Mode;
-import org.openjdk.jmh.annotations.OutputTimeUnit;
-import org.openjdk.jmh.annotations.Param;
-import org.openjdk.jmh.annotations.Scope;
-import org.openjdk.jmh.annotations.Setup;
-import org.openjdk.jmh.annotations.State;
-import org.openjdk.jmh.annotations.Warmup;
-import org.openjdk.jmh.infra.Blackhole;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Set;
-import java.util.UUID;
-import java.util.concurrent.TimeUnit;
-
-@State(Scope.Benchmark)
-@Fork(value = 1)
-@Warmup(iterations = 15)
-@Measurement(iterations = 30)
-public class ConsistentHasherBenchmark
-{
-  @Param({"100000"})
-  int numIds;
-
-  ConsistentHasher hasher;
-  List<String> uuids;
-  Set<String> servers;
-
-  @Setup
-  public void setup() throws IOException
-  {
-    hasher = new ConsistentHasher(null);
-    uuids = new ArrayList<>();
-    servers = Sets.newHashSet(
-        "localhost:1",
-        "localhost:2",
-        "localhost:3",
-        "localhost:4",
-        "localhost:5",
-        "localhost:6",
-        "localhost:7",
-        "localhost:8",
-        "localhost:9",
-        "localhost:10"
-    );
-
-    for (int i = 0; i < numIds; i++) {
-      UUID uuid = UUID.randomUUID();
-      uuids.add(uuid.toString());
-    }
-
-    hasher.updateKeys(servers);
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void hash(Blackhole blackhole) throws Exception
-  {
-    for (String uuid : uuids) {
-      String server = hasher.findKey(StringUtils.toUtf8(uuid));
-      blackhole.consume(server);
-    }
-  }
-}
diff --git a/benchmarks/src/main/java/io/druid/benchmark/DimensionPredicateFilterBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/DimensionPredicateFilterBenchmark.java
deleted file mode 100644
index cd87c2738c2..00000000000
--- a/benchmarks/src/main/java/io/druid/benchmark/DimensionPredicateFilterBenchmark.java
+++ /dev/null
@@ -1,221 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.benchmark;
-
-import com.google.common.base.Function;
-import com.google.common.base.Preconditions;
-import com.google.common.base.Predicate;
-import com.google.common.collect.FluentIterable;
-import io.druid.collections.bitmap.BitmapFactory;
-import io.druid.collections.bitmap.ImmutableBitmap;
-import io.druid.collections.bitmap.MutableBitmap;
-import io.druid.collections.bitmap.RoaringBitmapFactory;
-import io.druid.collections.spatial.ImmutableRTree;
-import io.druid.query.filter.BitmapIndexSelector;
-import io.druid.query.filter.DruidDoublePredicate;
-import io.druid.query.filter.DruidFloatPredicate;
-import io.druid.query.filter.DruidLongPredicate;
-import io.druid.query.filter.DruidPredicateFactory;
-import io.druid.segment.column.BitmapIndex;
-import io.druid.segment.data.BitmapSerdeFactory;
-import io.druid.segment.data.GenericIndexed;
-import io.druid.segment.data.Indexed;
-import io.druid.segment.data.RoaringBitmapSerdeFactory;
-import io.druid.segment.filter.DimensionPredicateFilter;
-import io.druid.segment.serde.BitmapIndexColumnPartSupplier;
-import org.openjdk.jmh.annotations.Benchmark;
-import org.openjdk.jmh.annotations.BenchmarkMode;
-import org.openjdk.jmh.annotations.Fork;
-import org.openjdk.jmh.annotations.Measurement;
-import org.openjdk.jmh.annotations.Mode;
-import org.openjdk.jmh.annotations.OutputTimeUnit;
-import org.openjdk.jmh.annotations.Param;
-import org.openjdk.jmh.annotations.Scope;
-import org.openjdk.jmh.annotations.Setup;
-import org.openjdk.jmh.annotations.State;
-import org.openjdk.jmh.annotations.Warmup;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
-
-@State(Scope.Benchmark)
-@Fork(value = 1)
-@Warmup(iterations = 10)
-@Measurement(iterations = 10)
-public class DimensionPredicateFilterBenchmark
-{
-  private static final int START_INT = 1_000_000_000;
-
-  private static final DimensionPredicateFilter IS_EVEN = new DimensionPredicateFilter(
-      "foo",
-      new DruidPredicateFactory()
-      {
-        @Override
-        public Predicate<String> makeStringPredicate()
-        {
-          return new Predicate<String>()
-          {
-            @Override
-            public boolean apply(String input)
-            {
-              if (input == null) {
-                return false;
-              }
-              return Integer.parseInt(input.toString()) % 2 == 0;
-            }
-          };
-        }
-
-        @Override
-        public DruidLongPredicate makeLongPredicate()
-        {
-          return DruidLongPredicate.ALWAYS_FALSE;
-        }
-
-        @Override
-        public DruidFloatPredicate makeFloatPredicate()
-        {
-          return DruidFloatPredicate.ALWAYS_FALSE;
-        }
-
-        @Override
-        public DruidDoublePredicate makeDoublePredicate()
-        {
-          return DruidDoublePredicate.ALWAYS_FALSE;
-        }
-      },
-      null
-  );
-
-  // cardinality, the dictionary will contain integers starting from START_INT
-  @Param({"1000", "100000", "1000000"})
-  int cardinality;
-
-  // selector will contain a cardinality number of bitmaps; each one contains a single int: 0
-  BitmapIndexSelector selector;
-
-  @Setup
-  public void setup() throws IOException
-  {
-    final BitmapFactory bitmapFactory = new RoaringBitmapFactory();
-    final BitmapSerdeFactory serdeFactory = new RoaringBitmapSerdeFactory(null);
-    final List<Integer> ints = generateInts();
-    final GenericIndexed<String> dictionary = GenericIndexed.fromIterable(
-        FluentIterable.from(ints)
-                      .transform(
-                          new Function<Integer, String>()
-                          {
-                            @Override
-                            public String apply(Integer i)
-                            {
-                              return i.toString();
-                            }
-                          }
-                      ),
-        GenericIndexed.STRING_STRATEGY
-    );
-    final BitmapIndex bitmapIndex = new BitmapIndexColumnPartSupplier(
-        bitmapFactory,
-        GenericIndexed.fromIterable(
-            FluentIterable.from(ints)
-                          .transform(
-                              new Function<Integer, ImmutableBitmap>()
-                              {
-                                @Override
-                                public ImmutableBitmap apply(Integer i)
-                                {
-                                  final MutableBitmap mutableBitmap = bitmapFactory.makeEmptyMutableBitmap();
-                                  mutableBitmap.add(i - START_INT);
-                                  return bitmapFactory.makeImmutableBitmap(mutableBitmap);
-                                }
-                              }
-                          ),
-            serdeFactory.getObjectStrategy()
-        ),
-        dictionary
-    ).get();
-    selector = new BitmapIndexSelector()
-    {
-      @Override
-      public Indexed<String> getDimensionValues(String dimension)
-      {
-        return dictionary;
-      }
-
-      @Override
-      public boolean hasMultipleValues(final String dimension)
-      {
-        throw new UnsupportedOperationException();
-      }
-
-      @Override
-      public int getNumRows()
-      {
-        throw new UnsupportedOperationException();
-      }
-
-      @Override
-      public BitmapFactory getBitmapFactory()
-      {
-        return bitmapFactory;
-      }
-
-      @Override
-      public ImmutableBitmap getBitmapIndex(String dimension, String value)
-      {
-        return bitmapIndex.getBitmap(bitmapIndex.getIndex(value));
-      }
-
-      @Override
-      public BitmapIndex getBitmapIndex(String dimension)
-      {
-        return bitmapIndex;
-      }
-
-      @Override
-      public ImmutableRTree getSpatialIndex(String dimension)
-      {
-        throw new UnsupportedOperationException();
-      }
-    };
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void matchIsEven()
-  {
-    final ImmutableBitmap bitmapIndex = IS_EVEN.getBitmapIndex(selector);
-    Preconditions.checkState(bitmapIndex.size() == cardinality / 2);
-  }
-
-  private List<Integer> generateInts()
-  {
-    final List<Integer> ints = new ArrayList<>(cardinality);
-
-    for (int i = 0; i < cardinality; i++) {
-      ints.add(START_INT + i);
-    }
-
-    return ints;
-  }
-}
diff --git a/benchmarks/src/main/java/io/druid/benchmark/ExpressionAggregationBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/ExpressionAggregationBenchmark.java
deleted file mode 100644
index e09e7e0afd9..00000000000
--- a/benchmarks/src/main/java/io/druid/benchmark/ExpressionAggregationBenchmark.java
+++ /dev/null
@@ -1,255 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.benchmark;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Iterables;
-import io.druid.benchmark.datagen.BenchmarkColumnSchema;
-import io.druid.benchmark.datagen.BenchmarkSchemaInfo;
-import io.druid.benchmark.datagen.SegmentGenerator;
-import io.druid.java.util.common.Intervals;
-import io.druid.java.util.common.granularity.Granularities;
-import io.druid.java.util.common.guava.Sequence;
-import io.druid.java.util.common.guava.Sequences;
-import io.druid.js.JavaScriptConfig;
-import io.druid.query.aggregation.BufferAggregator;
-import io.druid.query.aggregation.DoubleSumAggregatorFactory;
-import io.druid.query.aggregation.JavaScriptAggregatorFactory;
-import io.druid.query.expression.TestExprMacroTable;
-import io.druid.segment.BaseFloatColumnValueSelector;
-import io.druid.segment.ColumnSelectorFactory;
-import io.druid.segment.Cursor;
-import io.druid.segment.QueryableIndex;
-import io.druid.segment.QueryableIndexStorageAdapter;
-import io.druid.segment.VirtualColumns;
-import io.druid.segment.column.ValueType;
-import io.druid.timeline.DataSegment;
-import io.druid.timeline.partition.LinearShardSpec;
-import org.openjdk.jmh.annotations.Benchmark;
-import org.openjdk.jmh.annotations.BenchmarkMode;
-import org.openjdk.jmh.annotations.Fork;
-import org.openjdk.jmh.annotations.Level;
-import org.openjdk.jmh.annotations.Measurement;
-import org.openjdk.jmh.annotations.Mode;
-import org.openjdk.jmh.annotations.OutputTimeUnit;
-import org.openjdk.jmh.annotations.Param;
-import org.openjdk.jmh.annotations.Scope;
-import org.openjdk.jmh.annotations.Setup;
-import org.openjdk.jmh.annotations.State;
-import org.openjdk.jmh.annotations.TearDown;
-import org.openjdk.jmh.annotations.Warmup;
-import org.openjdk.jmh.infra.Blackhole;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
-import java.util.function.Function;
-
-@State(Scope.Benchmark)
-@Fork(value = 1)
-@Warmup(iterations = 15)
-@Measurement(iterations = 30)
-@BenchmarkMode(Mode.AverageTime)
-@OutputTimeUnit(TimeUnit.MILLISECONDS)
-public class ExpressionAggregationBenchmark
-{
-  @Param({"1000000"})
-  private int rowsPerSegment;
-
-  private SegmentGenerator segmentGenerator;
-  private QueryableIndex index;
-  private JavaScriptAggregatorFactory javaScriptAggregatorFactory;
-  private DoubleSumAggregatorFactory expressionAggregatorFactory;
-  private ByteBuffer aggregationBuffer = ByteBuffer.allocate(Double.BYTES);
-
-  @Setup(Level.Trial)
-  public void setup() throws Exception
-  {
-    final BenchmarkSchemaInfo schemaInfo = new BenchmarkSchemaInfo(
-        ImmutableList.of(
-            BenchmarkColumnSchema.makeNormal("x", ValueType.FLOAT, false, 1, 0d, 0d, 10000d, false),
-            BenchmarkColumnSchema.makeNormal("y", ValueType.FLOAT, false, 1, 0d, 0d, 10000d, false)
-        ),
-        ImmutableList.of(),
-        Intervals.of("2000/P1D"),
-        false
-    );
-
-    final DataSegment dataSegment = DataSegment.builder()
-                                               .dataSource("foo")
-                                               .interval(schemaInfo.getDataInterval())
-                                               .version("1")
-                                               .shardSpec(new LinearShardSpec(0))
-                                               .build();
-
-    this.segmentGenerator = new SegmentGenerator();
-    this.index = segmentGenerator.generate(dataSegment, schemaInfo, Granularities.NONE, rowsPerSegment);
-    this.javaScriptAggregatorFactory = new JavaScriptAggregatorFactory(
-        "name",
-        ImmutableList.of("x", "y"),
-        "function(current,x,y) { if (x > 0) { return current + x + 1 } else { return current + y + 1 } }",
-        "function() { return 0 }",
-        "function(a,b) { return a + b }",
-        JavaScriptConfig.getEnabledInstance()
-    );
-    this.expressionAggregatorFactory = new DoubleSumAggregatorFactory(
-        "name",
-        null,
-        "if(x>0,1.0+x,y+1)",
-        TestExprMacroTable.INSTANCE
-    );
-  }
-
-  @TearDown(Level.Trial)
-  public void tearDown() throws Exception
-  {
-    if (index != null) {
-      index.close();
-      index = null;
-    }
-
-    if (segmentGenerator != null) {
-      segmentGenerator.close();
-      segmentGenerator = null;
-    }
-  }
-
-  @Benchmark
-  public void queryUsingJavaScript(Blackhole blackhole) throws Exception
-  {
-    final Double result = compute(javaScriptAggregatorFactory::factorizeBuffered);
-    blackhole.consume(result);
-  }
-
-  @Benchmark
-  public void queryUsingExpression(Blackhole blackhole) throws Exception
-  {
-    final Double result = compute(expressionAggregatorFactory::factorizeBuffered);
-    blackhole.consume(result);
-  }
-
-  @Benchmark
-  public void queryUsingNative(Blackhole blackhole) throws Exception
-  {
-    final Double result = compute(
-        columnSelectorFactory ->
-            new NativeBufferAggregator(
-                columnSelectorFactory.makeColumnValueSelector("x"),
-                columnSelectorFactory.makeColumnValueSelector("y")
-            )
-    );
-    blackhole.consume(result);
-  }
-
-  private double compute(final Function<ColumnSelectorFactory, BufferAggregator> aggregatorFactory)
-  {
-    final QueryableIndexStorageAdapter adapter = new QueryableIndexStorageAdapter(index);
-
-    final Sequence<Cursor> cursors = adapter.makeCursors(
-        null,
-        index.getDataInterval(),
-        VirtualColumns.EMPTY,
-        Granularities.ALL,
-        false,
-        null
-    );
-
-    final List<Double> results = Sequences.toList(
-        Sequences.map(
-            cursors,
-            cursor -> {
-              final BufferAggregator bufferAggregator = aggregatorFactory.apply(cursor.getColumnSelectorFactory());
-              bufferAggregator.init(aggregationBuffer, 0);
-
-              while (!cursor.isDone()) {
-                bufferAggregator.aggregate(aggregationBuffer, 0);
-                cursor.advance();
-              }
-
-              final Double dbl = (Double) bufferAggregator.get(aggregationBuffer, 0);
-              bufferAggregator.close();
-              return dbl;
-            }
-        ),
-        new ArrayList<>()
-    );
-
-    return Iterables.getOnlyElement(results);
-  }
-
-  private static class NativeBufferAggregator implements BufferAggregator
-  {
-    private final BaseFloatColumnValueSelector xSelector;
-    private final BaseFloatColumnValueSelector ySelector;
-
-    public NativeBufferAggregator(
-        final BaseFloatColumnValueSelector xSelector,
-        final BaseFloatColumnValueSelector ySelector
-    )
-    {
-      this.xSelector = xSelector;
-      this.ySelector = ySelector;
-    }
-
-    @Override
-    public void init(final ByteBuffer buf, final int position)
-    {
-      buf.putDouble(0, 0d);
-    }
-
-    @Override
-    public void aggregate(final ByteBuffer buf, final int position)
-    {
-      final float x = xSelector.getFloat();
-      final double n = x > 0 ? x + 1 : ySelector.getFloat() + 1;
-      buf.putDouble(0, buf.getDouble(position) + n);
-    }
-
-    @Override
-    public Object get(final ByteBuffer buf, final int position)
-    {
-      return buf.getDouble(position);
-    }
-
-    @Override
-    public float getFloat(final ByteBuffer buf, final int position)
-    {
-      throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public long getLong(final ByteBuffer buf, final int position)
-    {
-      throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public double getDouble(ByteBuffer buf, int position)
-    {
-      throw new UnsupportedOperationException();
-    }
-    @Override
-    public void close()
-    {
-
-    }
-  }
-}
diff --git a/benchmarks/src/main/java/io/druid/benchmark/ExpressionSelectorBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/ExpressionSelectorBenchmark.java
deleted file mode 100644
index f46b56b4eff..00000000000
--- a/benchmarks/src/main/java/io/druid/benchmark/ExpressionSelectorBenchmark.java
+++ /dev/null
@@ -1,374 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.benchmark;
-
-import com.google.common.collect.ImmutableList;
-import io.druid.benchmark.datagen.BenchmarkColumnSchema;
-import io.druid.benchmark.datagen.BenchmarkSchemaInfo;
-import io.druid.benchmark.datagen.SegmentGenerator;
-import io.druid.java.util.common.Intervals;
-import io.druid.java.util.common.granularity.Granularities;
-import io.druid.java.util.common.guava.Sequence;
-import io.druid.java.util.common.guava.Sequences;
-import io.druid.query.dimension.DefaultDimensionSpec;
-import io.druid.query.dimension.ExtractionDimensionSpec;
-import io.druid.query.expression.TestExprMacroTable;
-import io.druid.query.extraction.StrlenExtractionFn;
-import io.druid.query.extraction.TimeFormatExtractionFn;
-import io.druid.segment.ColumnValueSelector;
-import io.druid.segment.Cursor;
-import io.druid.segment.DimensionSelector;
-import io.druid.segment.QueryableIndex;
-import io.druid.segment.QueryableIndexStorageAdapter;
-import io.druid.segment.VirtualColumns;
-import io.druid.segment.column.Column;
-import io.druid.segment.column.ValueType;
-import io.druid.segment.virtual.ExpressionVirtualColumn;
-import io.druid.timeline.DataSegment;
-import io.druid.timeline.partition.LinearShardSpec;
-import org.openjdk.jmh.annotations.Benchmark;
-import org.openjdk.jmh.annotations.BenchmarkMode;
-import org.openjdk.jmh.annotations.Fork;
-import org.openjdk.jmh.annotations.Level;
-import org.openjdk.jmh.annotations.Measurement;
-import org.openjdk.jmh.annotations.Mode;
-import org.openjdk.jmh.annotations.OutputTimeUnit;
-import org.openjdk.jmh.annotations.Param;
-import org.openjdk.jmh.annotations.Scope;
-import org.openjdk.jmh.annotations.Setup;
-import org.openjdk.jmh.annotations.State;
-import org.openjdk.jmh.annotations.TearDown;
-import org.openjdk.jmh.annotations.Warmup;
-import org.openjdk.jmh.infra.Blackhole;
-
-import java.util.ArrayList;
-import java.util.BitSet;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
-
-@State(Scope.Benchmark)
-@Fork(value = 1)
-@Warmup(iterations = 15)
-@Measurement(iterations = 30)
-@BenchmarkMode(Mode.AverageTime)
-@OutputTimeUnit(TimeUnit.MILLISECONDS)
-public class ExpressionSelectorBenchmark
-{
-  @Param({"1000000"})
-  private int rowsPerSegment;
-
-  private SegmentGenerator segmentGenerator;
-  private QueryableIndex index;
-
-  @Setup(Level.Trial)
-  public void setup() throws Exception
-  {
-    final BenchmarkSchemaInfo schemaInfo = new BenchmarkSchemaInfo(
-        ImmutableList.of(
-            BenchmarkColumnSchema.makeNormal("n", ValueType.LONG, false, 1, 0d, 0d, 10000d, false),
-            BenchmarkColumnSchema.makeZipf(
-                "s",
-                ValueType.STRING,
-                false,
-                1,
-                0d,
-                1000,
-                10000,
-                3d
-            )
-        ),
-        ImmutableList.of(),
-        Intervals.of("2000/P1D"),
-        false
-    );
-
-    final DataSegment dataSegment = DataSegment.builder()
-                                               .dataSource("foo")
-                                               .interval(schemaInfo.getDataInterval())
-                                               .version("1")
-                                               .shardSpec(new LinearShardSpec(0))
-                                               .build();
-
-    this.segmentGenerator = new SegmentGenerator();
-    this.index = segmentGenerator.generate(dataSegment, schemaInfo, Granularities.HOUR, rowsPerSegment);
-  }
-
-  @TearDown(Level.Trial)
-  public void tearDown() throws Exception
-  {
-    if (index != null) {
-      index.close();
-      index = null;
-    }
-
-    if (segmentGenerator != null) {
-      segmentGenerator.close();
-      segmentGenerator = null;
-    }
-  }
-
-  @Benchmark
-  public void timeFloorUsingExpression(Blackhole blackhole) throws Exception
-  {
-    final Sequence<Cursor> cursors = new QueryableIndexStorageAdapter(index).makeCursors(
-        null,
-        index.getDataInterval(),
-        VirtualColumns.create(
-            ImmutableList.of(
-                new ExpressionVirtualColumn(
-                    "v",
-                    "timestamp_floor(__time, 'PT1H')",
-                    ValueType.LONG,
-                    TestExprMacroTable.INSTANCE
-                )
-            )
-        ),
-        Granularities.ALL,
-        false,
-        null
-    );
-
-    final List<?> results = Sequences.toList(
-        Sequences.map(
-            cursors,
-            cursor -> {
-              final ColumnValueSelector selector = cursor.getColumnSelectorFactory().makeColumnValueSelector("v");
-              while (!cursor.isDone()) {
-                blackhole.consume(selector.getLong());
-                cursor.advance();
-              }
-              return null;
-            }
-        ),
-        new ArrayList<>()
-    );
-
-    blackhole.consume(results);
-  }
-
-  @Benchmark
-  public void timeFloorUsingExtractionFn(Blackhole blackhole) throws Exception
-  {
-    final Sequence<Cursor> cursors = new QueryableIndexStorageAdapter(index).makeCursors(
-        null,
-        index.getDataInterval(),
-        VirtualColumns.EMPTY,
-        Granularities.ALL,
-        false,
-        null
-    );
-
-    final List<?> results = Sequences.toList(
-        Sequences.map(
-            cursors,
-            cursor -> {
-              final DimensionSelector selector = cursor
-                  .getColumnSelectorFactory()
-                  .makeDimensionSelector(
-                      new ExtractionDimensionSpec(
-                          Column.TIME_COLUMN_NAME,
-                          "v",
-                          new TimeFormatExtractionFn(null, null, null, Granularities.HOUR, true)
-                      )
-                  );
-
-              consumeDimension(cursor, selector, blackhole);
-              return null;
-            }
-        ),
-        new ArrayList<>()
-    );
-
-    blackhole.consume(results);
-  }
-
-  @Benchmark
-  public void timeFloorUsingCursor(Blackhole blackhole) throws Exception
-  {
-    final Sequence<Cursor> cursors = new QueryableIndexStorageAdapter(index).makeCursors(
-        null,
-        index.getDataInterval(),
-        VirtualColumns.EMPTY,
-        Granularities.HOUR,
-        false,
-        null
-    );
-
-    final List<Long> results = Sequences.toList(
-        Sequences.map(
-            cursors,
-            cursor -> {
-              long count = 0L;
-              while (!cursor.isDone()) {
-                count++;
-                cursor.advance();
-              }
-              return count;
-            }
-        ),
-        new ArrayList<>()
-    );
-
-    long count = 0L;
-    for (Long result : results) {
-      count += result;
-    }
-
-    blackhole.consume(count);
-  }
-
-  @Benchmark
-  public void strlenUsingExpressionAsLong(Blackhole blackhole) throws Exception
-  {
-    final Sequence<Cursor> cursors = new QueryableIndexStorageAdapter(index).makeCursors(
-        null,
-        index.getDataInterval(),
-        VirtualColumns.create(
-            ImmutableList.of(
-                new ExpressionVirtualColumn(
-                    "v",
-                    "strlen(s)",
-                    ValueType.STRING,
-                    TestExprMacroTable.INSTANCE
-                )
-            )
-        ),
-        Granularities.ALL,
-        false,
-        null
-    );
-
-    final List<?> results = Sequences.toList(
-        Sequences.map(
-            cursors,
-            cursor -> {
-              final ColumnValueSelector selector = cursor.getColumnSelectorFactory().makeColumnValueSelector("v");
-              consumeLong(cursor, selector, blackhole);
-              return null;
-            }
-        ),
-        new ArrayList<>()
-    );
-
-    blackhole.consume(results);
-  }
-
-  @Benchmark
-  public void strlenUsingExpressionAsString(Blackhole blackhole) throws Exception
-  {
-    final Sequence<Cursor> cursors = new QueryableIndexStorageAdapter(index).makeCursors(
-        null,
-        index.getDataInterval(),
-        VirtualColumns.create(
-            ImmutableList.of(
-                new ExpressionVirtualColumn(
-                    "v",
-                    "strlen(s)",
-                    ValueType.STRING,
-                    TestExprMacroTable.INSTANCE
-                )
-            )
-        ),
-        Granularities.ALL,
-        false,
-        null
-    );
-
-    final List<?> results = Sequences.toList(
-        Sequences.map(
-            cursors,
-            cursor -> {
-              final DimensionSelector selector = cursor.getColumnSelectorFactory().makeDimensionSelector(
-                  new DefaultDimensionSpec("v", "v", ValueType.STRING)
-              );
-
-              consumeDimension(cursor, selector, blackhole);
-              return null;
-            }
-        ),
-        new ArrayList<>()
-    );
-
-    blackhole.consume(results);
-  }
-
-  @Benchmark
-  public void strlenUsingExtractionFn(Blackhole blackhole) throws Exception
-  {
-    final Sequence<Cursor> cursors = new QueryableIndexStorageAdapter(index).makeCursors(
-        null,
-        index.getDataInterval(),
-        VirtualColumns.EMPTY,
-        Granularities.ALL,
-        false,
-        null
-    );
-
-    final List<?> results = Sequences.toList(
-        Sequences.map(
-            cursors,
-            cursor -> {
-              final DimensionSelector selector = cursor
-                  .getColumnSelectorFactory()
-                  .makeDimensionSelector(new ExtractionDimensionSpec("x", "v", StrlenExtractionFn.instance()));
-
-              consumeDimension(cursor, selector, blackhole);
-              return null;
-            }
-        ),
-        new ArrayList<>()
-    );
-
-    blackhole.consume(results);
-  }
-
-  private void consumeDimension(final Cursor cursor, final DimensionSelector selector, final Blackhole blackhole)
-  {
-    if (selector.getValueCardinality() >= 0) {
-      // Read all IDs and then lookup all names.
-      final BitSet values = new BitSet();
-
-      while (!cursor.isDone()) {
-        final int value = selector.getRow().get(0);
-        values.set(value);
-        cursor.advance();
-      }
-
-      for (int i = values.nextSetBit(0); i >= 0; i = values.nextSetBit(i + 1)) {
-        blackhole.consume(selector.lookupName(i));
-      }
-    } else {
-      // Lookup names as we go.
-      while (!cursor.isDone()) {
-        final int value = selector.getRow().get(0);
-        blackhole.consume(selector.lookupName(value));
-        cursor.advance();
-      }
-    }
-  }
-
-  private void consumeLong(final Cursor cursor, final ColumnValueSelector selector, final Blackhole blackhole)
-  {
-    while (!cursor.isDone()) {
-      blackhole.consume(selector.getLong());
-      cursor.advance();
-    }
-  }
-}
diff --git a/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java
deleted file mode 100644
index b409b9cb6d5..00000000000
--- a/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java
+++ /dev/null
@@ -1,654 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.benchmark;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Function;
-import com.google.common.base.Predicate;
-import com.google.common.base.Strings;
-import com.google.common.collect.Lists;
-import com.google.common.io.Files;
-import io.druid.benchmark.datagen.BenchmarkDataGenerator;
-import io.druid.benchmark.datagen.BenchmarkSchemaInfo;
-import io.druid.benchmark.datagen.BenchmarkSchemas;
-import io.druid.data.input.InputRow;
-import io.druid.hll.HyperLogLogHash;
-import io.druid.jackson.DefaultObjectMapper;
-import io.druid.java.util.common.granularity.Granularities;
-import io.druid.java.util.common.guava.Sequence;
-import io.druid.java.util.common.guava.Sequences;
-import io.druid.java.util.common.logger.Logger;
-import io.druid.js.JavaScriptConfig;
-import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
-import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde;
-import io.druid.query.dimension.DefaultDimensionSpec;
-import io.druid.query.extraction.ExtractionFn;
-import io.druid.query.extraction.JavaScriptExtractionFn;
-import io.druid.query.filter.AndDimFilter;
-import io.druid.query.filter.BitmapIndexSelector;
-import io.druid.query.filter.BoundDimFilter;
-import io.druid.query.filter.DimFilter;
-import io.druid.query.filter.DruidDoublePredicate;
-import io.druid.query.filter.DruidFloatPredicate;
-import io.druid.query.filter.DruidLongPredicate;
-import io.druid.query.filter.DruidPredicateFactory;
-import io.druid.query.filter.Filter;
-import io.druid.query.filter.OrDimFilter;
-import io.druid.query.filter.SelectorDimFilter;
-import io.druid.query.ordering.StringComparators;
-import io.druid.segment.BaseLongColumnValueSelector;
-import io.druid.segment.Cursor;
-import io.druid.segment.DimensionSelector;
-import io.druid.segment.IndexIO;
-import io.druid.segment.IndexMergerV9;
-import io.druid.segment.IndexSpec;
-import io.druid.segment.QueryableIndex;
-import io.druid.segment.QueryableIndexStorageAdapter;
-import io.druid.segment.StorageAdapter;
-import io.druid.segment.VirtualColumns;
-import io.druid.segment.column.Column;
-import io.druid.segment.column.ColumnConfig;
-import io.druid.segment.data.IndexedInts;
-import io.druid.segment.filter.AndFilter;
-import io.druid.segment.filter.BoundFilter;
-import io.druid.segment.filter.DimensionPredicateFilter;
-import io.druid.segment.filter.Filters;
-import io.druid.segment.filter.OrFilter;
-import io.druid.segment.filter.SelectorFilter;
-import io.druid.segment.incremental.IncrementalIndex;
-import io.druid.segment.serde.ComplexMetrics;
-import org.apache.commons.io.FileUtils;
-import org.joda.time.Interval;
-import org.openjdk.jmh.annotations.Benchmark;
-import org.openjdk.jmh.annotations.BenchmarkMode;
-import org.openjdk.jmh.annotations.Fork;
-import org.openjdk.jmh.annotations.Measurement;
-import org.openjdk.jmh.annotations.Mode;
-import org.openjdk.jmh.annotations.OutputTimeUnit;
-import org.openjdk.jmh.annotations.Param;
-import org.openjdk.jmh.annotations.Scope;
-import org.openjdk.jmh.annotations.Setup;
-import org.openjdk.jmh.annotations.State;
-import org.openjdk.jmh.annotations.TearDown;
-import org.openjdk.jmh.annotations.Warmup;
-import org.openjdk.jmh.infra.Blackhole;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Objects;
-import java.util.concurrent.TimeUnit;
-
-@State(Scope.Benchmark)
-@Fork(value = 1)
-@Warmup(iterations = 10)
-@Measurement(iterations = 25)
-public class FilterPartitionBenchmark
-{
-  @Param({"750000"})
-  private int rowsPerSegment;
-
-  @Param({"basic"})
-  private String schema;
-
-  private static final Logger log = new Logger(FilterPartitionBenchmark.class);
-  private static final int RNG_SEED = 9999;
-  private static final IndexMergerV9 INDEX_MERGER_V9;
-  private static final IndexIO INDEX_IO;
-  public static final ObjectMapper JSON_MAPPER;
-  private IncrementalIndex incIndex;
-  private QueryableIndex qIndex;
-  private File indexFile;
-  private File tmpDir;
-
-  private Filter timeFilterNone;
-  private Filter timeFilterHalf;
-  private Filter timeFilterAll;
-
-  private BenchmarkSchemaInfo schemaInfo;
-
-  private static String JS_FN = "function(str) { return 'super-' + str; }";
-  private static ExtractionFn JS_EXTRACTION_FN = new JavaScriptExtractionFn(JS_FN, false, JavaScriptConfig.getEnabledInstance());
-
-  static {
-    JSON_MAPPER = new DefaultObjectMapper();
-    INDEX_IO = new IndexIO(
-        JSON_MAPPER,
-        OffHeapMemorySegmentWriteOutMediumFactory.instance(),
-        new ColumnConfig()
-        {
-          @Override
-          public int columnCacheSizeBytes()
-          {
-            return 0;
-          }
-        }
-    );
-    INDEX_MERGER_V9 = new IndexMergerV9(JSON_MAPPER, INDEX_IO, OffHeapMemorySegmentWriteOutMediumFactory.instance());
-  }
-
-  @Setup
-  public void setup() throws IOException
-  {
-    log.info("SETUP CALLED AT " + System.currentTimeMillis());
-
-    if (ComplexMetrics.getSerdeForType("hyperUnique") == null) {
-      ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde(HyperLogLogHash.getDefault()));
-    }
-
-    schemaInfo = BenchmarkSchemas.SCHEMA_MAP.get(schema);
-
-    BenchmarkDataGenerator gen = new BenchmarkDataGenerator(
-        schemaInfo.getColumnSchemas(),
-        RNG_SEED,
-        schemaInfo.getDataInterval(),
-        rowsPerSegment
-    );
-
-    incIndex = makeIncIndex();
-
-    for (int j = 0; j < rowsPerSegment; j++) {
-      InputRow row = gen.nextRow();
-      if (j % 10000 == 0) {
-        log.info(j + " rows generated.");
-      }
-      incIndex.add(row);
-    }
-
-    tmpDir = Files.createTempDir();
-    log.info("Using temp dir: " + tmpDir.getAbsolutePath());
-
-    indexFile = INDEX_MERGER_V9.persist(
-        incIndex,
-        tmpDir,
-        new IndexSpec(),
-        null
-    );
-    qIndex = INDEX_IO.loadIndex(indexFile);
-
-    Interval interval = schemaInfo.getDataInterval();
-    timeFilterNone = new BoundFilter(new BoundDimFilter(
-        Column.TIME_COLUMN_NAME,
-        String.valueOf(Long.MAX_VALUE),
-        String.valueOf(Long.MAX_VALUE),
-        true,
-        true,
-        null,
-        null,
-        StringComparators.ALPHANUMERIC
-    ));
-
-    long halfEnd = (interval.getEndMillis() + interval.getStartMillis()) / 2;
-    timeFilterHalf = new BoundFilter(new BoundDimFilter(
-        Column.TIME_COLUMN_NAME,
-        String.valueOf(interval.getStartMillis()),
-        String.valueOf(halfEnd),
-        true,
-        true,
-        null,
-        null,
-        StringComparators.ALPHANUMERIC
-    ));
-
-    timeFilterAll = new BoundFilter(new BoundDimFilter(
-        Column.TIME_COLUMN_NAME,
-        String.valueOf(interval.getStartMillis()),
-        String.valueOf(interval.getEndMillis()),
-        true,
-        true,
-        null,
-        null,
-        StringComparators.ALPHANUMERIC
-    ));
-  }
-
-  @TearDown
-  public void tearDown() throws IOException
-  {
-    FileUtils.deleteDirectory(tmpDir);
-  }
-
-  private IncrementalIndex makeIncIndex()
-  {
-    return new IncrementalIndex.Builder()
-        .setSimpleTestingIndexSchema(schemaInfo.getAggsArray())
-        .setReportParseExceptions(false)
-        .setMaxRowCount(rowsPerSegment)
-        .buildOnheap();
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void stringRead(Blackhole blackhole) throws Exception
-  {
-    StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
-    Sequence<Cursor> cursors = makeCursors(sa, null);
-
-    Sequence<List<String>> stringListSeq = readCursors(cursors, blackhole);
-    List<String> strings = Sequences.toList(Sequences.limit(stringListSeq, 1), Lists.<List<String>>newArrayList()).get(0);
-    for (String st : strings) {
-      blackhole.consume(st);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void longRead(Blackhole blackhole) throws Exception
-  {
-    StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
-    Sequence<Cursor> cursors = makeCursors(sa, null);
-
-    Sequence<List<Long>> longListSeq = readCursorsLong(cursors, blackhole);
-    List<Long> strings = Sequences.toList(Sequences.limit(longListSeq, 1), Lists.<List<Long>>newArrayList()).get(0);
-    for (Long st : strings) {
-      blackhole.consume(st);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void timeFilterNone(Blackhole blackhole) throws Exception
-  {
-    StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
-    Sequence<Cursor> cursors = makeCursors(sa, timeFilterNone);
-
-    Sequence<List<Long>> longListSeq = readCursorsLong(cursors, blackhole);
-    List<Long> strings = Sequences.toList(Sequences.limit(longListSeq, 1), Lists.<List<Long>>newArrayList()).get(0);
-    for (Long st : strings) {
-      blackhole.consume(st);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void timeFilterHalf(Blackhole blackhole) throws Exception
-  {
-    StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
-    Sequence<Cursor> cursors = makeCursors(sa, timeFilterHalf);
-
-    Sequence<List<Long>> longListSeq = readCursorsLong(cursors, blackhole);
-    List<Long> strings = Sequences.toList(Sequences.limit(longListSeq, 1), Lists.<List<Long>>newArrayList()).get(0);
-    for (Long st : strings) {
-      blackhole.consume(st);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void timeFilterAll(Blackhole blackhole) throws Exception
-  {
-    StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
-    Sequence<Cursor> cursors = makeCursors(sa, timeFilterAll);
-
-    Sequence<List<Long>> longListSeq = readCursorsLong(cursors, blackhole);
-    List<Long> strings = Sequences.toList(Sequences.limit(longListSeq, 1), Lists.<List<Long>>newArrayList()).get(0);
-    for (Long st : strings) {
-      blackhole.consume(st);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void readWithPreFilter(Blackhole blackhole) throws Exception
-  {
-    Filter filter = new SelectorFilter("dimSequential", "199");
-
-    StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
-    Sequence<Cursor> cursors = makeCursors(sa, filter);
-
-    Sequence<List<String>> stringListSeq = readCursors(cursors, blackhole);
-    List<String> strings = Sequences.toList(Sequences.limit(stringListSeq, 1), Lists.<List<String>>newArrayList()).get(0);
-    for (String st : strings) {
-      blackhole.consume(st);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void readWithPostFilter(Blackhole blackhole) throws Exception
-  {
-    Filter filter = new NoBitmapSelectorFilter("dimSequential", "199");
-
-    StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
-    Sequence<Cursor> cursors = makeCursors(sa, filter);
-
-    Sequence<List<String>> stringListSeq = readCursors(cursors, blackhole);
-    List<String> strings = Sequences.toList(Sequences.limit(stringListSeq, 1), Lists.<List<String>>newArrayList()).get(0);
-    for (String st : strings) {
-      blackhole.consume(st);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void readWithExFnPreFilter(Blackhole blackhole) throws Exception
-  {
-    Filter filter = new SelectorDimFilter("dimSequential", "super-199", JS_EXTRACTION_FN).toFilter();
-
-    StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
-    Sequence<Cursor> cursors = makeCursors(sa, filter);
-
-    Sequence<List<String>> stringListSeq = readCursors(cursors, blackhole);
-    List<String> strings = Sequences.toList(Sequences.limit(stringListSeq, 1), Lists.<List<String>>newArrayList()).get(0);
-    for (String st : strings) {
-      blackhole.consume(st);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void readWithExFnPostFilter(Blackhole blackhole) throws Exception
-  {
-    Filter filter = new NoBitmapSelectorDimFilter("dimSequential", "super-199", JS_EXTRACTION_FN).toFilter();
-
-    StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
-    Sequence<Cursor> cursors = makeCursors(sa, filter);
-
-    Sequence<List<String>> stringListSeq = readCursors(cursors, blackhole);
-    List<String> strings = Sequences.toList(Sequences.limit(stringListSeq, 1), Lists.<List<String>>newArrayList()).get(0);
-    for (String st : strings) {
-      blackhole.consume(st);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void readOrFilter(Blackhole blackhole) throws Exception
-  {
-    Filter filter = new NoBitmapSelectorFilter("dimSequential", "199");
-    Filter filter2 = new AndFilter(Arrays.<Filter>asList(new SelectorFilter("dimMultivalEnumerated2", "Corundum"), new NoBitmapSelectorFilter("dimMultivalEnumerated", "Bar")));
-    Filter orFilter = new OrFilter(Arrays.<Filter>asList(filter, filter2));
-
-    StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
-    Sequence<Cursor> cursors = makeCursors(sa, orFilter);
-
-    Sequence<List<String>> stringListSeq = readCursors(cursors, blackhole);
-    List<String> strings = Sequences.toList(Sequences.limit(stringListSeq, 1), Lists.<List<String>>newArrayList()).get(0);
-    for (String st : strings) {
-      blackhole.consume(st);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void readOrFilterCNF(Blackhole blackhole) throws Exception
-  {
-    Filter filter = new NoBitmapSelectorFilter("dimSequential", "199");
-    Filter filter2 = new AndFilter(Arrays.<Filter>asList(new SelectorFilter("dimMultivalEnumerated2", "Corundum"), new NoBitmapSelectorFilter("dimMultivalEnumerated", "Bar")));
-    Filter orFilter = new OrFilter(Arrays.<Filter>asList(filter, filter2));
-
-    StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
-    Sequence<Cursor> cursors = makeCursors(sa, Filters.convertToCNF(orFilter));
-
-    Sequence<List<String>> stringListSeq = readCursors(cursors, blackhole);
-    List<String> strings = Sequences.toList(Sequences.limit(stringListSeq, 1), Lists.<List<String>>newArrayList()).get(0);
-    for (String st : strings) {
-      blackhole.consume(st);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void readComplexOrFilter(Blackhole blackhole) throws Exception
-  {
-    DimFilter dimFilter1 = new OrDimFilter(Arrays.<DimFilter>asList(
-        new SelectorDimFilter("dimSequential", "199", null),
-        new AndDimFilter(Arrays.<DimFilter>asList(
-            new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Corundum", null),
-            new SelectorDimFilter("dimMultivalEnumerated", "Bar", null)
-        )
-        ))
-    );
-    DimFilter dimFilter2 = new OrDimFilter(Arrays.<DimFilter>asList(
-        new SelectorDimFilter("dimSequential", "299", null),
-        new SelectorDimFilter("dimSequential", "399", null),
-        new AndDimFilter(Arrays.<DimFilter>asList(
-            new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Xylophone", null),
-            new SelectorDimFilter("dimMultivalEnumerated", "Foo", null)
-        )
-        ))
-    );
-    DimFilter dimFilter3 = new OrDimFilter(Arrays.<DimFilter>asList(
-        dimFilter1,
-        dimFilter2,
-        new AndDimFilter(Arrays.<DimFilter>asList(
-            new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Orange", null),
-            new SelectorDimFilter("dimMultivalEnumerated", "World", null)
-        )
-        ))
-    );
-
-    StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
-    Sequence<Cursor> cursors = makeCursors(sa, dimFilter3.toFilter());
-
-    Sequence<List<String>> stringListSeq = readCursors(cursors, blackhole);
-    List<String> strings = Sequences.toList(Sequences.limit(stringListSeq, 1), Lists.<List<String>>newArrayList()).get(0);
-    for (String st : strings) {
-      blackhole.consume(st);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void readComplexOrFilterCNF(Blackhole blackhole) throws Exception
-  {
-    DimFilter dimFilter1 = new OrDimFilter(Arrays.<DimFilter>asList(
-        new SelectorDimFilter("dimSequential", "199", null),
-        new AndDimFilter(Arrays.<DimFilter>asList(
-            new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Corundum", null),
-            new SelectorDimFilter("dimMultivalEnumerated", "Bar", null)
-        )
-        ))
-    );
-    DimFilter dimFilter2 = new OrDimFilter(Arrays.<DimFilter>asList(
-        new SelectorDimFilter("dimSequential", "299", null),
-        new SelectorDimFilter("dimSequential", "399", null),
-        new AndDimFilter(Arrays.<DimFilter>asList(
-            new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Xylophone", null),
-            new SelectorDimFilter("dimMultivalEnumerated", "Foo", null)
-        )
-        ))
-    );
-    DimFilter dimFilter3 = new OrDimFilter(Arrays.<DimFilter>asList(
-        dimFilter1,
-        dimFilter2,
-        new AndDimFilter(Arrays.<DimFilter>asList(
-            new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Orange", null),
-            new SelectorDimFilter("dimMultivalEnumerated", "World", null)
-        )
-        ))
-    );
-
-    StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
-    Sequence<Cursor> cursors = makeCursors(sa, Filters.convertToCNF(dimFilter3.toFilter()));
-
-    Sequence<List<String>> stringListSeq = readCursors(cursors, blackhole);
-    List<String> strings = Sequences.toList(Sequences.limit(stringListSeq, 1), Lists.<List<String>>newArrayList()).get(0);
-    for (String st : strings) {
-      blackhole.consume(st);
-    }
-  }
-
-  private Sequence<Cursor> makeCursors(StorageAdapter sa, Filter filter)
-  {
-    return sa.makeCursors(filter, schemaInfo.getDataInterval(), VirtualColumns.EMPTY, Granularities.ALL, false, null);
-  }
-
-  private Sequence<List<String>> readCursors(Sequence<Cursor> cursors, final Blackhole blackhole)
-  {
-    return Sequences.map(
-        cursors,
-        new Function<Cursor, List<String>>()
-        {
-          @Override
-          public List<String> apply(Cursor input)
-          {
-            List<String> strings = new ArrayList<String>();
-            List<DimensionSelector> selectors = new ArrayList<>();
-            selectors.add(
-                input.getColumnSelectorFactory().makeDimensionSelector(new DefaultDimensionSpec("dimSequential", null))
-            );
-            //selectors.add(input.makeDimensionSelector(new DefaultDimensionSpec("dimB", null)));
-            while (!input.isDone()) {
-              for (DimensionSelector selector : selectors) {
-                IndexedInts row = selector.getRow();
-                blackhole.consume(selector.lookupName(row.get(0)));
-                //strings.add(selector.lookupName(row.get(0)));
-              }
-              input.advance();
-            }
-            return strings;
-          }
-        }
-    );
-  }
-
-  private Sequence<List<Long>> readCursorsLong(Sequence<Cursor> cursors, final Blackhole blackhole)
-  {
-    return Sequences.map(
-        cursors,
-        new Function<Cursor, List<Long>>()
-        {
-          @Override
-          public List<Long> apply(Cursor input)
-          {
-            List<Long> longvals = new ArrayList<Long>();
-            BaseLongColumnValueSelector selector = input.getColumnSelectorFactory().makeColumnValueSelector("sumLongSequential");
-            while (!input.isDone()) {
-              long rowval = selector.getLong();
-              blackhole.consume(rowval);
-              input.advance();
-            }
-            return longvals;
-          }
-        }
-    );
-  }
-
-  private static class NoBitmapSelectorFilter extends SelectorFilter
-  {
-    public NoBitmapSelectorFilter(
-        String dimension,
-        String value
-    )
-    {
-      super(dimension, value);
-    }
-
-    @Override
-    public boolean supportsBitmapIndex(BitmapIndexSelector selector)
-    {
-      return false;
-    }
-  }
-
-  private static class NoBitmapDimensionPredicateFilter extends DimensionPredicateFilter
-  {
-    public NoBitmapDimensionPredicateFilter(
-        final String dimension,
-        final DruidPredicateFactory predicateFactory,
-        final ExtractionFn extractionFn
-    )
-    {
-      super(dimension, predicateFactory, extractionFn);
-    }
-
-    @Override
-    public boolean supportsBitmapIndex(BitmapIndexSelector selector)
-    {
-      return false;
-    }
-  }
-
-  private static class NoBitmapSelectorDimFilter extends SelectorDimFilter
-  {
-    public NoBitmapSelectorDimFilter(
-        String dimension,
-        String value,
-        ExtractionFn extractionFn
-    )
-    {
-      super(dimension, value, extractionFn);
-    }
-    @Override
-    public Filter toFilter()
-    {
-      ExtractionFn extractionFn = getExtractionFn();
-      String dimension = getDimension();
-      final String value = getValue();
-      if (extractionFn == null) {
-        return new NoBitmapSelectorFilter(dimension, value);
-      } else {
-        final String valueOrNull = Strings.emptyToNull(value);
-
-        final DruidPredicateFactory predicateFactory = new DruidPredicateFactory()
-        {
-          @Override
-          public Predicate<String> makeStringPredicate()
-          {
-            return new Predicate<String>()
-            {
-              @Override
-              public boolean apply(String input)
-              {
-                return Objects.equals(valueOrNull, input);
-              }
-            };
-          }
-
-          @Override
-          public DruidLongPredicate makeLongPredicate()
-          {
-            return DruidLongPredicate.ALWAYS_FALSE;
-          }
-
-          @Override
-          public DruidFloatPredicate makeFloatPredicate()
-          {
-            return DruidFloatPredicate.ALWAYS_FALSE;
-          }
-
-          @Override
-          public DruidDoublePredicate makeDoublePredicate()
-          {
-            return DruidDoublePredicate.ALWAYS_FALSE;
-          }
-        };
-
-        return new NoBitmapDimensionPredicateFilter(dimension, predicateFactory, extractionFn);
-      }
-    }
-  }
-}
diff --git a/benchmarks/src/main/java/io/druid/benchmark/FilteredAggregatorBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/FilteredAggregatorBenchmark.java
deleted file mode 100644
index 4b9422ae6cc..00000000000
--- a/benchmarks/src/main/java/io/druid/benchmark/FilteredAggregatorBenchmark.java
+++ /dev/null
@@ -1,298 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.benchmark;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.common.io.Files;
-import io.druid.benchmark.datagen.BenchmarkDataGenerator;
-import io.druid.benchmark.datagen.BenchmarkSchemaInfo;
-import io.druid.benchmark.datagen.BenchmarkSchemas;
-import io.druid.benchmark.query.QueryBenchmarkUtil;
-import io.druid.data.input.InputRow;
-import io.druid.hll.HyperLogLogHash;
-import io.druid.jackson.DefaultObjectMapper;
-import io.druid.java.util.common.granularity.Granularities;
-import io.druid.java.util.common.guava.Sequence;
-import io.druid.java.util.common.guava.Sequences;
-import io.druid.java.util.common.logger.Logger;
-import io.druid.js.JavaScriptConfig;
-import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
-import io.druid.query.Druids;
-import io.druid.query.FinalizeResultsQueryRunner;
-import io.druid.query.Query;
-import io.druid.query.QueryPlus;
-import io.druid.query.QueryRunner;
-import io.druid.query.QueryRunnerFactory;
-import io.druid.query.QueryToolChest;
-import io.druid.query.Result;
-import io.druid.query.aggregation.AggregatorFactory;
-import io.druid.query.aggregation.CountAggregatorFactory;
-import io.druid.query.aggregation.FilteredAggregatorFactory;
-import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde;
-import io.druid.query.filter.BoundDimFilter;
-import io.druid.query.filter.DimFilter;
-import io.druid.query.filter.InDimFilter;
-import io.druid.query.filter.JavaScriptDimFilter;
-import io.druid.query.filter.OrDimFilter;
-import io.druid.query.filter.RegexDimFilter;
-import io.druid.query.filter.SearchQueryDimFilter;
-import io.druid.query.ordering.StringComparators;
-import io.druid.query.search.ContainsSearchQuerySpec;
-import io.druid.query.spec.MultipleIntervalSegmentSpec;
-import io.druid.query.spec.QuerySegmentSpec;
-import io.druid.query.timeseries.TimeseriesQuery;
-import io.druid.query.timeseries.TimeseriesQueryEngine;
-import io.druid.query.timeseries.TimeseriesQueryQueryToolChest;
-import io.druid.query.timeseries.TimeseriesQueryRunnerFactory;
-import io.druid.query.timeseries.TimeseriesResultValue;
-import io.druid.segment.IncrementalIndexSegment;
-import io.druid.segment.IndexIO;
-import io.druid.segment.IndexMergerV9;
-import io.druid.segment.IndexSpec;
-import io.druid.segment.QueryableIndex;
-import io.druid.segment.QueryableIndexSegment;
-import io.druid.segment.column.ColumnConfig;
-import io.druid.segment.incremental.IncrementalIndex;
-import io.druid.segment.serde.ComplexMetrics;
-import org.apache.commons.io.FileUtils;
-import org.openjdk.jmh.annotations.Benchmark;
-import org.openjdk.jmh.annotations.BenchmarkMode;
-import org.openjdk.jmh.annotations.Fork;
-import org.openjdk.jmh.annotations.Measurement;
-import org.openjdk.jmh.annotations.Mode;
-import org.openjdk.jmh.annotations.OutputTimeUnit;
-import org.openjdk.jmh.annotations.Param;
-import org.openjdk.jmh.annotations.Scope;
-import org.openjdk.jmh.annotations.Setup;
-import org.openjdk.jmh.annotations.State;
-import org.openjdk.jmh.annotations.TearDown;
-import org.openjdk.jmh.annotations.Warmup;
-import org.openjdk.jmh.infra.Blackhole;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
-
-@State(Scope.Benchmark)
-@Fork(value = 1)
-@Warmup(iterations = 10)
-@Measurement(iterations = 25)
-public class FilteredAggregatorBenchmark
-{
-  @Param({"75000"})
-  private int rowsPerSegment;
-
-  @Param({"basic"})
-  private String schema;
-
-  private static final Logger log = new Logger(FilteredAggregatorBenchmark.class);
-  private static final int RNG_SEED = 9999;
-  private static final IndexMergerV9 INDEX_MERGER_V9;
-  private static final IndexIO INDEX_IO;
-  public static final ObjectMapper JSON_MAPPER;
-  private IncrementalIndex incIndex;
-  private IncrementalIndex incIndexFilteredAgg;
-  private AggregatorFactory[] filteredMetrics;
-  private QueryableIndex qIndex;
-  private File indexFile;
-  private DimFilter filter;
-  private List<InputRow> inputRows;
-  private QueryRunnerFactory factory;
-  private BenchmarkSchemaInfo schemaInfo;
-  private TimeseriesQuery query;
-  private File tmpDir;
-
-  static {
-    JSON_MAPPER = new DefaultObjectMapper();
-    INDEX_IO = new IndexIO(
-        JSON_MAPPER,
-        OffHeapMemorySegmentWriteOutMediumFactory.instance(),
-        new ColumnConfig()
-        {
-          @Override
-          public int columnCacheSizeBytes()
-          {
-            return 0;
-          }
-        }
-    );
-    INDEX_MERGER_V9 = new IndexMergerV9(JSON_MAPPER, INDEX_IO, OffHeapMemorySegmentWriteOutMediumFactory.instance());
-  }
-
-  @Setup
-  public void setup() throws IOException
-  {
-    log.info("SETUP CALLED AT " + System.currentTimeMillis());
-
-    if (ComplexMetrics.getSerdeForType("hyperUnique") == null) {
-      ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde(HyperLogLogHash.getDefault()));
-    }
-
-    schemaInfo = BenchmarkSchemas.SCHEMA_MAP.get(schema);
-
-    BenchmarkDataGenerator gen = new BenchmarkDataGenerator(
-        schemaInfo.getColumnSchemas(),
-        RNG_SEED,
-        schemaInfo.getDataInterval(),
-        rowsPerSegment
-    );
-
-    incIndex = makeIncIndex(schemaInfo.getAggsArray());
-
-    filter = new OrDimFilter(
-        Arrays.asList(
-            new BoundDimFilter("dimSequential", "-1", "-1", true, true, null, null, StringComparators.ALPHANUMERIC),
-            new JavaScriptDimFilter(
-                "dimSequential",
-                "function(x) { return false }",
-                null,
-                JavaScriptConfig.getEnabledInstance()
-            ),
-            new RegexDimFilter("dimSequential", "X", null),
-            new SearchQueryDimFilter("dimSequential", new ContainsSearchQuerySpec("X", false), null),
-            new InDimFilter("dimSequential", Collections.singletonList("X"), null)
-        )
-    );
-    filteredMetrics = new AggregatorFactory[1];
-    filteredMetrics[0] = new FilteredAggregatorFactory(new CountAggregatorFactory("rows"), filter);
-    incIndexFilteredAgg = makeIncIndex(filteredMetrics);
-
-    inputRows = new ArrayList<>();
-    for (int j = 0; j < rowsPerSegment; j++) {
-      InputRow row = gen.nextRow();
-      if (j % 10000 == 0) {
-        log.info(j + " rows generated.");
-      }
-      incIndex.add(row);
-      inputRows.add(row);
-    }
-
-    tmpDir = Files.createTempDir();
-    log.info("Using temp dir: " + tmpDir.getAbsolutePath());
-
-    indexFile = INDEX_MERGER_V9.persist(
-        incIndex,
-        tmpDir,
-        new IndexSpec(),
-        null
-    );
-    qIndex = INDEX_IO.loadIndex(indexFile);
-
-    factory = new TimeseriesQueryRunnerFactory(
-        new TimeseriesQueryQueryToolChest(
-            QueryBenchmarkUtil.NoopIntervalChunkingQueryRunnerDecorator()
-        ),
-        new TimeseriesQueryEngine(),
-        QueryBenchmarkUtil.NOOP_QUERYWATCHER
-    );
-
-    BenchmarkSchemaInfo basicSchema = BenchmarkSchemas.SCHEMA_MAP.get("basic");
-    QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval()));
-    List<AggregatorFactory> queryAggs = new ArrayList<>();
-    queryAggs.add(filteredMetrics[0]);
-
-    query = Druids.newTimeseriesQueryBuilder()
-                  .dataSource("blah")
-                  .granularity(Granularities.ALL)
-                  .intervals(intervalSpec)
-                  .aggregators(queryAggs)
-                  .descending(false)
-                  .build();
-  }
-
-  @TearDown
-  public void tearDown() throws IOException
-  {
-    FileUtils.deleteDirectory(tmpDir);
-  }
-
-  private IncrementalIndex makeIncIndex(AggregatorFactory[] metrics)
-  {
-    return new IncrementalIndex.Builder()
-        .setSimpleTestingIndexSchema(metrics)
-        .setReportParseExceptions(false)
-        .setMaxRowCount(rowsPerSegment)
-        .buildOnheap();
-  }
-
-  private static <T> List<T> runQuery(QueryRunnerFactory factory, QueryRunner runner, Query<T> query)
-  {
-    QueryToolChest toolChest = factory.getToolchest();
-    QueryRunner<T> theRunner = new FinalizeResultsQueryRunner<>(
-        toolChest.mergeResults(toolChest.preMergeQueryDecoration(runner)),
-        toolChest
-    );
-
-    Sequence<T> queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap());
-    return Sequences.toList(queryResult, Lists.<T>newArrayList());
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void ingest(Blackhole blackhole) throws Exception
-  {
-    incIndexFilteredAgg = makeIncIndex(filteredMetrics);
-    for (InputRow row : inputRows) {
-      int rv = incIndexFilteredAgg.add(row);
-      blackhole.consume(rv);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void querySingleIncrementalIndex(Blackhole blackhole) throws Exception
-  {
-    QueryRunner<Result<TimeseriesResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(
-        factory,
-        "incIndex",
-        new IncrementalIndexSegment(incIndex, "incIndex")
-    );
-
-    List<Result<TimeseriesResultValue>> results = FilteredAggregatorBenchmark.runQuery(factory, runner, query);
-    for (Result<TimeseriesResultValue> result : results) {
-      blackhole.consume(result);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void querySingleQueryableIndex(Blackhole blackhole) throws Exception
-  {
-    final QueryRunner<Result<TimeseriesResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(
-        factory,
-        "qIndex",
-        new QueryableIndexSegment("qIndex", qIndex)
-    );
-
-    List<Result<TimeseriesResultValue>> results = FilteredAggregatorBenchmark.runQuery(factory, runner, query);
-    for (Result<TimeseriesResultValue> result : results) {
-      blackhole.consume(result);
-    }
-  }
-}
diff --git a/benchmarks/src/main/java/io/druid/benchmark/FlattenJSONBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/FlattenJSONBenchmark.java
deleted file mode 100644
index e241295582b..00000000000
--- a/benchmarks/src/main/java/io/druid/benchmark/FlattenJSONBenchmark.java
+++ /dev/null
@@ -1,162 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.benchmark;
-
-import io.druid.java.util.common.parsers.Parser;
-import org.openjdk.jmh.annotations.Benchmark;
-import org.openjdk.jmh.annotations.BenchmarkMode;
-import org.openjdk.jmh.annotations.Fork;
-import org.openjdk.jmh.annotations.Measurement;
-import org.openjdk.jmh.annotations.Mode;
-import org.openjdk.jmh.annotations.OutputTimeUnit;
-import org.openjdk.jmh.annotations.Scope;
-import org.openjdk.jmh.annotations.Setup;
-import org.openjdk.jmh.annotations.State;
-import org.openjdk.jmh.annotations.Warmup;
-import org.openjdk.jmh.infra.Blackhole;
-import org.openjdk.jmh.runner.Runner;
-import org.openjdk.jmh.runner.RunnerException;
-import org.openjdk.jmh.runner.options.Options;
-import org.openjdk.jmh.runner.options.OptionsBuilder;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-
-@State(Scope.Benchmark)
-@BenchmarkMode(Mode.AverageTime)
-@Warmup(iterations = 10)
-@Measurement(iterations = 25)
-@Fork(value = 1)
-public class FlattenJSONBenchmark
-{
-  private static final int numEvents = 100000;
-
-  List<String> flatInputs;
-  List<String> nestedInputs;
-  List<String> jqInputs;
-  Parser flatParser;
-  Parser nestedParser;
-  Parser jqParser;
-  Parser fieldDiscoveryParser;
-  Parser forcedPathParser;
-  int flatCounter = 0;
-  int nestedCounter = 0;
-  int jqCounter = 0;
-
-  @Setup
-  public void prepare() throws Exception
-  {
-    FlattenJSONBenchmarkUtil gen = new FlattenJSONBenchmarkUtil();
-    flatInputs = new ArrayList<String>();
-    for (int i = 0; i < numEvents; i++) {
-      flatInputs.add(gen.generateFlatEvent());
-    }
-    nestedInputs = new ArrayList<String>();
-    for (int i = 0; i < numEvents; i++) {
-      nestedInputs.add(gen.generateNestedEvent());
-    }
-    jqInputs = new ArrayList<String>();
-    for (int i = 0; i < numEvents; i++) {
-      jqInputs.add(gen.generateNestedEvent()); // reuse the same event as "nested"
-    }
-
-    flatParser = gen.getFlatParser();
-    nestedParser = gen.getNestedParser();
-    jqParser = gen.getJqParser();
-    fieldDiscoveryParser = gen.getFieldDiscoveryParser();
-    forcedPathParser = gen.getForcedPathParser();
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public Map<String, Object> baseline(final Blackhole blackhole)
-  {
-    Map<String, Object> parsed = flatParser.parseToMap(flatInputs.get(flatCounter));
-    for (String s : parsed.keySet()) {
-      blackhole.consume(parsed.get(s));
-    }
-    flatCounter = (flatCounter + 1) % numEvents;
-    return parsed;
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public Map<String, Object> flatten(final Blackhole blackhole)
-  {
-    Map<String, Object> parsed = nestedParser.parseToMap(nestedInputs.get(nestedCounter));
-    for (String s : parsed.keySet()) {
-      blackhole.consume(parsed.get(s));
-    }
-    nestedCounter = (nestedCounter + 1) % numEvents;
-    return parsed;
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public Map<String, Object> jqflatten(final Blackhole blackhole)
-  {
-    Map<String, Object> parsed = jqParser.parseToMap(jqInputs.get(jqCounter));
-    for (String s : parsed.keySet()) {
-      blackhole.consume(parsed.get(s));
-    }
-    jqCounter = (jqCounter + 1) % numEvents;
-    return parsed;
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public Map<String, Object> preflattenNestedParser(final Blackhole blackhole)
-  {
-    Map<String, Object> parsed = fieldDiscoveryParser.parseToMap(flatInputs.get(nestedCounter));
-    for (String s : parsed.keySet()) {
-      blackhole.consume(parsed.get(s));
-    }
-    nestedCounter = (nestedCounter + 1) % numEvents;
-    return parsed;
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public Map<String, Object> forcedRootPaths(final Blackhole blackhole)
-  {
-    Map<String, Object> parsed = forcedPathParser.parseToMap(flatInputs.get(nestedCounter));
-    for (String s : parsed.keySet()) {
-      blackhole.consume(parsed.get(s));
-    }
-    nestedCounter = (nestedCounter + 1) % numEvents;
-    return parsed;
-  }
-
-  public static void main(String[] args) throws RunnerException
-  {
-    Options opt = new OptionsBuilder()
-        .include(FlattenJSONBenchmark.class.getSimpleName())
-        .build();
-
-    new Runner(opt).run();
-  }
-}
diff --git a/benchmarks/src/main/java/io/druid/benchmark/FlattenJSONBenchmarkUtil.java b/benchmarks/src/main/java/io/druid/benchmark/FlattenJSONBenchmarkUtil.java
deleted file mode 100644
index 41ba357a869..00000000000
--- a/benchmarks/src/main/java/io/druid/benchmark/FlattenJSONBenchmarkUtil.java
+++ /dev/null
@@ -1,500 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.benchmark;
-
-import com.fasterxml.jackson.annotation.JsonAutoDetect;
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonInclude;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.annotation.PropertyAccessor;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.github.wnameless.json.flattener.JsonFlattener;
-import io.druid.data.input.impl.DimensionsSpec;
-import io.druid.data.input.impl.JSONParseSpec;
-import io.druid.data.input.impl.TimestampSpec;
-import io.druid.jackson.DefaultObjectMapper;
-import io.druid.java.util.common.parsers.JSONPathFieldSpec;
-import io.druid.java.util.common.parsers.JSONPathSpec;
-import io.druid.java.util.common.parsers.Parser;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Random;
-
-public class FlattenJSONBenchmarkUtil
-{
-  private Random rng;
-  private final ObjectMapper mapper = new DefaultObjectMapper();
-  private static final String DEFAULT_TIMESTAMP = "2015-09-12T12:10:53.155Z";
-
-  public FlattenJSONBenchmarkUtil()
-  {
-    this.rng = new Random(9999);
-    mapper.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.PUBLIC_ONLY);
-    mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
-  }
-
-  public Parser getFlatParser()
-  {
-    JSONParseSpec spec = new JSONParseSpec(
-        new TimestampSpec("ts", "iso", null),
-        new DimensionsSpec(null, null, null),
-        null,
-        null
-    );
-    return spec.makeParser();
-  }
-
-  public Parser getFieldDiscoveryParser()
-  {
-    List<JSONPathFieldSpec> fields = new ArrayList<>();
-    JSONPathSpec flattenSpec = new JSONPathSpec(true, fields);
-
-    JSONParseSpec spec = new JSONParseSpec(
-        new TimestampSpec("ts", "iso", null),
-        new DimensionsSpec(null, null, null),
-        flattenSpec,
-        null
-    );
-
-    return spec.makeParser();
-  }
-
-  public Parser getNestedParser()
-  {
-    List<JSONPathFieldSpec> fields = new ArrayList<>();
-    fields.add(JSONPathFieldSpec.createRootField("ts"));
-
-    fields.add(JSONPathFieldSpec.createRootField("d1"));
-    //fields.add(JSONPathFieldSpec.createRootField("d2"));
-    fields.add(JSONPathFieldSpec.createNestedField("e1.d1", "$.e1.d1"));
-    fields.add(JSONPathFieldSpec.createNestedField("e1.d2", "$.e1.d2"));
-    fields.add(JSONPathFieldSpec.createNestedField("e2.d3", "$.e2.d3"));
-    fields.add(JSONPathFieldSpec.createNestedField("e2.d4", "$.e2.d4"));
-    fields.add(JSONPathFieldSpec.createNestedField("e2.d5", "$.e2.d5"));
-    fields.add(JSONPathFieldSpec.createNestedField("e2.d6", "$.e2.d6"));
-    fields.add(JSONPathFieldSpec.createNestedField("e2.ad1[0]", "$.e2.ad1[0]"));
-    fields.add(JSONPathFieldSpec.createNestedField("e2.ad1[1]", "$.e2.ad1[1]"));
-    fields.add(JSONPathFieldSpec.createNestedField("e2.ad1[2]", "$.e2.ad1[2]"));
-    fields.add(JSONPathFieldSpec.createNestedField("ae1[0].d1", "$.ae1[0].d1"));
-    fields.add(JSONPathFieldSpec.createNestedField("ae1[1].d1", "$.ae1[1].d1"));
-    fields.add(JSONPathFieldSpec.createNestedField("ae1[2].e1.d2", "$.ae1[2].e1.d2"));
-
-    fields.add(JSONPathFieldSpec.createRootField("m3"));
-    //fields.add(JSONPathFieldSpec.createRootField("m4"));
-    fields.add(JSONPathFieldSpec.createNestedField("e3.m1", "$.e3.m1"));
-    fields.add(JSONPathFieldSpec.createNestedField("e3.m2", "$.e3.m2"));
-    fields.add(JSONPathFieldSpec.createNestedField("e3.m3", "$.e3.m3"));
-    fields.add(JSONPathFieldSpec.createNestedField("e3.m4", "$.e3.m4"));
-    fields.add(JSONPathFieldSpec.createNestedField("e3.am1[0]", "$.e3.am1[0]"));
-    fields.add(JSONPathFieldSpec.createNestedField("e3.am1[1]", "$.e3.am1[1]"));
-    fields.add(JSONPathFieldSpec.createNestedField("e3.am1[2]", "$.e3.am1[2]"));
-    fields.add(JSONPathFieldSpec.createNestedField("e3.am1[3]", "$.e3.am1[3]"));
-    fields.add(JSONPathFieldSpec.createNestedField("e4.e4.m4", "$.e4.e4.m4"));
-
-    JSONPathSpec flattenSpec = new JSONPathSpec(true, fields);
-    JSONParseSpec spec = new JSONParseSpec(
-        new TimestampSpec("ts", "iso", null),
-        new DimensionsSpec(null, null, null),
-        flattenSpec,
-        null
-    );
-
-    return spec.makeParser();
-  }
-
-  public Parser getForcedPathParser()
-  {
-    List<JSONPathFieldSpec> fields = new ArrayList<>();
-    fields.add(JSONPathFieldSpec.createNestedField("ts", "$['ts']"));
-
-    fields.add(JSONPathFieldSpec.createNestedField("d1", "$['d1']"));
-    fields.add(JSONPathFieldSpec.createNestedField("d2", "$['d2']"));
-    fields.add(JSONPathFieldSpec.createNestedField("e1.d1", "$['e1.d1']"));
-    fields.add(JSONPathFieldSpec.createNestedField("e1.d2", "$['e1.d2']"));
-    fields.add(JSONPathFieldSpec.createNestedField("e2.d3", "$['e2.d3']"));
-    fields.add(JSONPathFieldSpec.createNestedField("e2.d4", "$['e2.d4']"));
-    fields.add(JSONPathFieldSpec.createNestedField("e2.d5", "$['e2.d5']"));
-    fields.add(JSONPathFieldSpec.createNestedField("e2.d6", "$['e2.d6']"));
-    fields.add(JSONPathFieldSpec.createNestedField("e2.ad1[0]", "$['e2.ad1[0]']"));
-    fields.add(JSONPathFieldSpec.createNestedField("e2.ad1[1]", "$['e2.ad1[1]']"));
-    fields.add(JSONPathFieldSpec.createNestedField("e2.ad1[2]", "$['e2.ad1[2]']"));
-    fields.add(JSONPathFieldSpec.createNestedField("ae1[0].d1", "$['ae1[0].d1']"));
-    fields.add(JSONPathFieldSpec.createNestedField("ae1[1].d1", "$['ae1[1].d1']"));
-    fields.add(JSONPathFieldSpec.createNestedField("ae1[2].e1.d2", "$['ae1[2].e1.d2']"));
-
-    fields.add(JSONPathFieldSpec.createNestedField("m3", "$['m3']"));
-    fields.add(JSONPathFieldSpec.createNestedField("m4", "$['m4']"));
-    fields.add(JSONPathFieldSpec.createNestedField("e3.m1", "$['e3.m1']"));
-    fields.add(JSONPathFieldSpec.createNestedField("e3.m2", "$['e3.m2']"));
-    fields.add(JSONPathFieldSpec.createNestedField("e3.m3", "$['e3.m3']"));
-    fields.add(JSONPathFieldSpec.createNestedField("e3.m4", "$['e3.m4']"));
-    fields.add(JSONPathFieldSpec.createNestedField("e3.am1[0]", "$['e3.am1[0]']"));
-    fields.add(JSONPathFieldSpec.createNestedField("e3.am1[1]", "$['e3.am1[1]']"));
-    fields.add(JSONPathFieldSpec.createNestedField("e3.am1[2]", "$['e3.am1[2]']"));
-    fields.add(JSONPathFieldSpec.createNestedField("e3.am1[3]", "$['e3.am1[3]']"));
-    fields.add(JSONPathFieldSpec.createNestedField("e4.e4.m4", "$['e4.e4.m4']"));
-
-    JSONPathSpec flattenSpec = new JSONPathSpec(false, fields);
-    JSONParseSpec spec = new JSONParseSpec(
-        new TimestampSpec("ts", "iso", null),
-        new DimensionsSpec(null, null, null),
-        flattenSpec,
-        null
-    );
-
-    return spec.makeParser();
-  }
-
-  public Parser getJqParser()
-  {
-    List<JSONPathFieldSpec> fields = new ArrayList<>();
-    fields.add(JSONPathFieldSpec.createRootField("ts"));
-
-    fields.add(JSONPathFieldSpec.createRootField("d1"));
-    fields.add(JSONPathFieldSpec.createJqField("e1.d1", ".e1.d1"));
-    fields.add(JSONPathFieldSpec.createJqField("e1.d2", ".e1.d2"));
-    fields.add(JSONPathFieldSpec.createJqField("e2.d3", ".e2.d3"));
-    fields.add(JSONPathFieldSpec.createJqField("e2.d4", ".e2.d4"));
-    fields.add(JSONPathFieldSpec.createJqField("e2.d5", ".e2.d5"));
-    fields.add(JSONPathFieldSpec.createJqField("e2.d6", ".e2.d6"));
-    fields.add(JSONPathFieldSpec.createJqField("e2.ad1[0]", ".e2.ad1[0]"));
-    fields.add(JSONPathFieldSpec.createJqField("e2.ad1[1]", ".e2.ad1[1]"));
-    fields.add(JSONPathFieldSpec.createJqField("e2.ad1[2]", ".e2.ad1[2]"));
-    fields.add(JSONPathFieldSpec.createJqField("ae1[0].d1", ".ae1[0].d1"));
-    fields.add(JSONPathFieldSpec.createJqField("ae1[1].d1", ".ae1[1].d1"));
-    fields.add(JSONPathFieldSpec.createJqField("ae1[2].e1.d2", ".ae1[2].e1.d2"));
-
-    fields.add(JSONPathFieldSpec.createRootField("m3"));
-    fields.add(JSONPathFieldSpec.createJqField("e3.m1", ".e3.m1"));
-    fields.add(JSONPathFieldSpec.createJqField("e3.m2", ".e3.m2"));
-    fields.add(JSONPathFieldSpec.createJqField("e3.m3", ".e3.m3"));
-    fields.add(JSONPathFieldSpec.createJqField("e3.m4", ".e3.m4"));
-    fields.add(JSONPathFieldSpec.createJqField("e3.am1[0]", ".e3.am1[0]"));
-    fields.add(JSONPathFieldSpec.createJqField("e3.am1[1]", ".e3.am1[1]"));
-    fields.add(JSONPathFieldSpec.createJqField("e3.am1[2]", ".e3.am1[2]"));
-    fields.add(JSONPathFieldSpec.createJqField("e3.am1[3]", ".e3.am1[3]"));
-    fields.add(JSONPathFieldSpec.createJqField("e4.e4.m4", ".e4.e4.m4"));
-
-    JSONPathSpec flattenSpec = new JSONPathSpec(true, fields);
-    JSONParseSpec spec = new JSONParseSpec(
-        new TimestampSpec("ts", "iso", null),
-        new DimensionsSpec(null, null, null),
-        flattenSpec,
-        null
-    );
-
-    return spec.makeParser();
-  }
-
-  public String generateFlatEvent() throws Exception
-  {
-    String nestedEvent = generateNestedEvent();
-    String flatEvent = JsonFlattener.flatten(nestedEvent);
-    return flatEvent;
-  }
-
-  /*
-  e.g.,
-
-  {
-  "d1":"-889954295",
-  "d2":"-1724267856",
-  "m3":0.1429096312550323,
-  "m4":-7491190942271782800,
-  "e1":{"d1":"2044704643",
-        "d2":"743384585"},
-  "e2":{"d3":"1879234327",
-        "d4":"1248394579",
-        "d5":"-639742676",
-        "d6":"1334864967",
-        "ad1":["-684042233","-1368392605","1826364033"]},
-  "e3":{"m1":1026394465228315487,
-        "m2":0.27737174619459004,
-        "m3":0.011921350960908628,
-        "m4":-7507319256575520484,
-        "am1":[-2383262648875933574,-3980663171371801209,-8225906222712163481,6074309311406287835]},
-  "e4":{"e4":{"m4":32836881083689842}},
-  "ae1":[{"d1":"-1797792200"},{"d1":"142582995"},{"e1":{"d2":"-1341994709"}}],
-  "ts":"2015-09-12T12:10:53.155Z"
-  }
-  */
-  public String generateNestedEvent() throws Exception
-  {
-    BenchmarkEvent nestedDims1 = new BenchmarkEvent(
-        null,
-        String.valueOf(rng.nextInt()), String.valueOf(rng.nextInt()), null, null, null, null,
-        null, null, null, null,
-        null, null, null, null,
-        null, null, null
-    );
-
-    String[] dimsArray1 = {String.valueOf(rng.nextInt()), String.valueOf(rng.nextInt()), String.valueOf(rng.nextInt())};
-    BenchmarkEvent nestedDims2 = new BenchmarkEvent(
-        null,
-        null, null, String.valueOf(rng.nextInt()), String.valueOf(rng.nextInt()), String.valueOf(rng.nextInt()), String.valueOf(rng.nextInt()),
-        null, null, null, null,
-        null, null, null, null,
-        dimsArray1, null, null
-    );
-
-    Long[] metricsArray1 = {rng.nextLong(), rng.nextLong(), rng.nextLong(), rng.nextLong()};
-    BenchmarkEvent nestedMetrics1 = new BenchmarkEvent(
-        null,
-        null, null, null, null, null, null,
-        rng.nextLong(), rng.nextDouble(), rng.nextDouble(), rng.nextLong(),
-        null, null, null, null,
-        null, metricsArray1, null
-    );
-
-    BenchmarkEvent nestedMetrics2 = new BenchmarkEvent(
-        null,
-        null, null, null, null, null, null,
-        null, null, null, rng.nextLong(),
-        null, null, null, null,
-        null, null, null
-    );
-
-    BenchmarkEvent metricsWrapper = new BenchmarkEvent(
-        null,
-        null, null, null, null, null, null,
-        null, null, null, null,
-        null, null, null, nestedMetrics2,
-        null, null, null
-    );
-
-    //nest some dimensions in an array!
-    BenchmarkEvent arrayNestedDim1 = new BenchmarkEvent(
-        null,
-        String.valueOf(rng.nextInt()), null, null, null, null, null,
-        null, null, null, null,
-        null, null, null, null,
-        null, null, null
-    );
-    BenchmarkEvent arrayNestedDim2 = new BenchmarkEvent(
-        null,
-        String.valueOf(rng.nextInt()), null, null, null, null, null,
-        null, null, null, null,
-        null, null, null, null,
-        null, null, null
-    );
-    BenchmarkEvent arrayNestedDim3 = new BenchmarkEvent(
-        null,
-        null, String.valueOf(rng.nextInt()), null, null, null, null,
-        null, null, null, null,
-        null, null, null, null,
-        null, null, null
-    );
-    BenchmarkEvent arrayNestedWrapper = new BenchmarkEvent(
-        null,
-        null, null, null, null, null, null,
-        null, null, null, null,
-        arrayNestedDim3, null, null, null,
-        null, null, null
-    );
-    BenchmarkEvent[] eventArray = {arrayNestedDim1, arrayNestedDim2, arrayNestedWrapper};
-
-    Long[] ignoredMetrics = {Long.valueOf(10), Long.valueOf(20), Long.valueOf(30)};
-
-    BenchmarkEvent wrapper = new BenchmarkEvent(
-        DEFAULT_TIMESTAMP,
-        String.valueOf(rng.nextInt()), String.valueOf(rng.nextInt()), null, null, null, null,
-        null, null, rng.nextDouble(), rng.nextLong(),
-        nestedDims1, nestedDims2, nestedMetrics1, metricsWrapper,
-        null, ignoredMetrics, eventArray
-    );
-
-    return mapper.writeValueAsString(wrapper);
-  }
-
-  public static class BenchmarkEvent
-  {
-
-    public String ts;
-
-    @JsonProperty
-    public String getTs()
-    {
-      return ts;
-    }
-
-    @JsonProperty
-    public String getD1()
-    {
-      return d1;
-    }
-
-    @JsonProperty
-    public String getD2()
-    {
-      return d2;
-    }
-
-    @JsonProperty
-    public String getD3()
-    {
-      return d3;
-    }
-
-    @JsonProperty
-    public String getD4()
-    {
-      return d4;
-    }
-
-    @JsonProperty
-    public String getD5()
-    {
-      return d5;
-    }
-
-    @JsonProperty
-    public String getD6()
-    {
-      return d6;
-    }
-
-    @JsonProperty
-    public Long getM1()
-    {
-      return m1;
-    }
-
-    @JsonProperty
-    public Double getM2()
-    {
-      return m2;
-    }
-
-    @JsonProperty
-    public Double getM3()
-    {
-      return m3;
-    }
-
-    @JsonProperty
-    public Long getM4()
-    {
-      return m4;
-    }
-
-    @JsonProperty
-    public BenchmarkEvent getE1()
-    {
-      return e1;
-    }
-
-    @JsonProperty
-    public BenchmarkEvent getE2()
-    {
-      return e2;
-    }
-
-    @JsonProperty
-    public BenchmarkEvent getE3()
-    {
-      return e3;
-    }
-
-    @JsonProperty
-    public BenchmarkEvent getE4()
-    {
-      return e4;
-    }
-
-    @JsonProperty
-    public String[] getAd1()
-    {
-      return ad1;
-    }
-
-    @JsonProperty
-    public Long[] getAm1()
-    {
-      return am1;
-    }
-
-    @JsonProperty
-    public BenchmarkEvent[] getAe1()
-    {
-      return ae1;
-    }
-
-    public String d1;
-    public String d2;
-    public String d3;
-    public String d4;
-    public String d5;
-    public String d6;
-    public Long m1;
-    public Double m2;
-    public Double m3;
-    public Long m4;
-    public BenchmarkEvent e1;
-    public BenchmarkEvent e2;
-    public BenchmarkEvent e3;
-    public BenchmarkEvent e4;
-    public String[] ad1;
-    public Long[] am1;
-    public BenchmarkEvent[] ae1;
-
-    @JsonCreator
-    public BenchmarkEvent(
-        @JsonProperty("ts") String ts,
-        @JsonProperty("d1") String d1,
-        @JsonProperty("d2") String d2,
-        @JsonProperty("d3") String d3,
-        @JsonProperty("d4") String d4,
-        @JsonProperty("d5") String d5,
-        @JsonProperty("d6") String d6,
-        @JsonProperty("m1") Long m1,
-        @JsonProperty("m2") Double m2,
-        @JsonProperty("m3") Double m3,
-        @JsonProperty("m4") Long m4,
-        @JsonProperty("e1") BenchmarkEvent e1,
-        @JsonProperty("e2") BenchmarkEvent e2,
-        @JsonProperty("e3") BenchmarkEvent e3,
-        @JsonProperty("e4") BenchmarkEvent e4,
-        @JsonProperty("ad1") String[] ad1,
-        @JsonProperty("am1") Long[] am1,
-        @JsonProperty("ae1") BenchmarkEvent[] ae1
-    )
-    {
-      this.ts = ts;
-      this.d1 = d1;
-      this.d2 = d2;
-      this.d3 = d3;
-      this.d4 = d4;
-      this.d5 = d5;
-      this.d6 = d6;
-      this.m1 = m1;
-      this.m2 = m2;
-      this.m3 = m3;
-      this.m4 = m4;
-      this.e1 = e1;
-      this.e2 = e2;
-      this.e3 = e3;
-      this.e4 = e4;
-      this.ad1 = ad1;
-      this.am1 = am1;
-      this.ae1 = ae1;
-    }
-  }
-}
diff --git a/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmark.java
deleted file mode 100644
index 3292bd85e4e..00000000000
--- a/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmark.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.benchmark;
-
-// Run FloatCompressionBenchmarkFileGenerator to generate the required files before running this benchmark
-
-import com.google.common.base.Supplier;
-import com.google.common.io.Files;
-import io.druid.segment.data.CompressedColumnarFloatsSupplier;
-import io.druid.segment.data.ColumnarFloats;
-import org.openjdk.jmh.annotations.Benchmark;
-import org.openjdk.jmh.annotations.BenchmarkMode;
-import org.openjdk.jmh.annotations.Fork;
-import org.openjdk.jmh.annotations.Measurement;
-import org.openjdk.jmh.annotations.Mode;
-import org.openjdk.jmh.annotations.OutputTimeUnit;
-import org.openjdk.jmh.annotations.Param;
-import org.openjdk.jmh.annotations.Scope;
-import org.openjdk.jmh.annotations.Setup;
-import org.openjdk.jmh.annotations.State;
-import org.openjdk.jmh.annotations.Warmup;
-import org.openjdk.jmh.infra.Blackhole;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.util.Random;
-import java.util.concurrent.TimeUnit;
-
-@State(Scope.Benchmark)
-@Fork(value = 1)
-@Warmup(iterations = 10)
-@Measurement(iterations = 25)
-@BenchmarkMode(Mode.AverageTime)
-@OutputTimeUnit(TimeUnit.MILLISECONDS)
-public class FloatCompressionBenchmark
-{
-  @Param("floatCompress/")
-  private static String dirPath;
-
-  @Param({"enumerate", "zipfLow", "zipfHigh", "sequential", "uniform"})
-  private static String file;
-
-  @Param({"lz4", "none"})
-  private static String strategy;
-
-  private Random rand;
-  private Supplier<ColumnarFloats> supplier;
-
-  @Setup
-  public void setup() throws Exception
-  {
-    File dir = new File(dirPath);
-    File compFile = new File(dir, file + "-" + strategy);
-    rand = new Random();
-    ByteBuffer buffer = Files.map(compFile);
-    supplier = CompressedColumnarFloatsSupplier.fromByteBuffer(buffer, ByteOrder.nativeOrder());
-  }
-
-  @Benchmark
-  public void readContinuous(Blackhole bh) throws IOException
-  {
-    ColumnarFloats columnarFloats = supplier.get();
-    int count = columnarFloats.size();
-    float sum = 0;
-    for (int i = 0; i < count; i++) {
-      sum += columnarFloats.get(i);
-    }
-    bh.consume(sum);
-    columnarFloats.close();
-  }
-
-  @Benchmark
-  public void readSkipping(Blackhole bh) throws IOException
-  {
-    ColumnarFloats columnarFloats = supplier.get();
-    int count = columnarFloats.size();
-    float sum = 0;
-    for (int i = 0; i < count; i += rand.nextInt(2000)) {
-      sum += columnarFloats.get(i);
-    }
-    bh.consume(sum);
-    columnarFloats.close();
-  }
-
-}
diff --git a/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java b/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java
deleted file mode 100644
index 864dfbf19aa..00000000000
--- a/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java
+++ /dev/null
@@ -1,171 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.benchmark;
-
-import com.google.common.collect.ImmutableList;
-import io.druid.benchmark.datagen.BenchmarkColumnSchema;
-import io.druid.benchmark.datagen.BenchmarkColumnValueGenerator;
-import io.druid.java.util.common.logger.Logger;
-import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium;
-import io.druid.segment.column.ValueType;
-import io.druid.segment.data.CompressionFactory;
-import io.druid.segment.data.CompressionStrategy;
-import io.druid.segment.data.ColumnarFloatsSerializer;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.IOException;
-import java.io.Writer;
-import java.net.URISyntaxException;
-import java.nio.ByteOrder;
-import java.nio.channels.FileChannel;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
-import java.nio.file.StandardOpenOption;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-public class FloatCompressionBenchmarkFileGenerator
-{
-  private static final Logger log = new Logger(FloatCompressionBenchmarkFileGenerator.class);
-  public static final int ROW_NUM = 5000000;
-  public static final List<CompressionStrategy> compressions =
-      ImmutableList.of(
-          CompressionStrategy.LZ4,
-          CompressionStrategy.NONE
-      );
-
-  private static String dirPath = "floatCompress/";
-
-  public static void main(String[] args) throws IOException, URISyntaxException
-  {
-    if (args.length >= 1) {
-      dirPath = args[0];
-    }
-
-    BenchmarkColumnSchema enumeratedSchema = BenchmarkColumnSchema.makeEnumerated("", ValueType.FLOAT, true, 1, 0d,
-                                                                                  ImmutableList.<Object>of(
-                                                                                      0f,
-                                                                                      1.1f,
-                                                                                      2.2f,
-                                                                                      3.3f,
-                                                                                      4.4f
-                                                                                  ),
-                                                                                  ImmutableList.of(
-                                                                                      0.95,
-                                                                                      0.001,
-                                                                                      0.0189,
-                                                                                      0.03,
-                                                                                      0.0001
-                                                                                  )
-    );
-    BenchmarkColumnSchema zipfLowSchema = BenchmarkColumnSchema.makeZipf(
-        "",
-        ValueType.FLOAT,
-        true,
-        1,
-        0d,
-        -1,
-        1000,
-        1d
-    );
-    BenchmarkColumnSchema zipfHighSchema = BenchmarkColumnSchema.makeZipf(
-        "",
-        ValueType.FLOAT,
-        true,
-        1,
-        0d,
-        -1,
-        1000,
-        3d
-    );
-    BenchmarkColumnSchema sequentialSchema = BenchmarkColumnSchema.makeSequential(
-        "",
-        ValueType.FLOAT,
-        true,
-        1,
-        0d,
-        1470187671,
-        2000000000
-    );
-    BenchmarkColumnSchema uniformSchema = BenchmarkColumnSchema.makeContinuousUniform(
-        "",
-        ValueType.FLOAT,
-        true,
-        1,
-        0d,
-        0,
-        1000
-    );
-
-    Map<String, BenchmarkColumnValueGenerator> generators = new HashMap<>();
-    generators.put("enumerate", new BenchmarkColumnValueGenerator(enumeratedSchema, 1));
-    generators.put("zipfLow", new BenchmarkColumnValueGenerator(zipfLowSchema, 1));
-    generators.put("zipfHigh", new BenchmarkColumnValueGenerator(zipfHighSchema, 1));
-    generators.put("sequential", new BenchmarkColumnValueGenerator(sequentialSchema, 1));
-    generators.put("uniform", new BenchmarkColumnValueGenerator(uniformSchema, 1));
-
-    File dir = new File(dirPath);
-    dir.mkdir();
-
-    // create data files using BenchmarkColunValueGenerator
-    for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
-      final File dataFile = new File(dir, entry.getKey());
-      dataFile.delete();
-      try (Writer writer = Files.newBufferedWriter(dataFile.toPath(), StandardCharsets.UTF_8)) {
-        for (int i = 0; i < ROW_NUM; i++) {
-          writer.write((Float) entry.getValue().generateRowValue() + "\n");
-        }
-      }
-    }
-
-    // create compressed files using all combinations of CompressionStrategy and FloatEncoding provided
-    for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
-      for (CompressionStrategy compression : compressions) {
-        String name = entry.getKey() + "-" + compression.toString();
-        log.info("%s: ", name);
-        File compFile = new File(dir, name);
-        compFile.delete();
-        File dataFile = new File(dir, entry.getKey());
-
-        ColumnarFloatsSerializer writer = CompressionFactory.getFloatSerializer(
-            new OffHeapMemorySegmentWriteOutMedium(),
-            "float",
-            ByteOrder.nativeOrder(),
-            compression
-        );
-        try (
-            BufferedReader br = Files.newBufferedReader(dataFile.toPath(), StandardCharsets.UTF_8);
-            FileChannel output =
-                FileChannel.open(compFile.toPath(), StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE)
-        ) {
-          writer.open();
-          String line;
-          while ((line = br.readLine()) != null) {
-            writer.add(Float.parseFloat(line));
-          }
-          writer.writeTo(output, null);
-        }
-        log.info("%d", compFile.length() / 1024);
-      }
-    }
-  }
-}
diff --git a/benchmarks/src/main/java/io/druid/benchmark/GenericIndexedBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/GenericIndexedBenchmark.java
deleted file mode 100644
index 2d9a60c559e..00000000000
--- a/benchmarks/src/main/java/io/druid/benchmark/GenericIndexedBenchmark.java
+++ /dev/null
@@ -1,173 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.benchmark;
-
-import com.google.common.io.Files;
-import com.google.common.primitives.Ints;
-import io.druid.java.util.common.io.smoosh.FileSmoosher;
-import io.druid.java.util.common.io.smoosh.SmooshedFileMapper;
-import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium;
-import io.druid.segment.data.GenericIndexed;
-import io.druid.segment.data.GenericIndexedWriter;
-import io.druid.segment.data.ObjectStrategy;
-import org.openjdk.jmh.annotations.Benchmark;
-import org.openjdk.jmh.annotations.BenchmarkMode;
-import org.openjdk.jmh.annotations.Fork;
-import org.openjdk.jmh.annotations.Level;
-import org.openjdk.jmh.annotations.Measurement;
-import org.openjdk.jmh.annotations.Mode;
-import org.openjdk.jmh.annotations.OperationsPerInvocation;
-import org.openjdk.jmh.annotations.OutputTimeUnit;
-import org.openjdk.jmh.annotations.Param;
-import org.openjdk.jmh.annotations.Scope;
-import org.openjdk.jmh.annotations.Setup;
-import org.openjdk.jmh.annotations.State;
-import org.openjdk.jmh.annotations.Warmup;
-import org.openjdk.jmh.infra.Blackhole;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.nio.MappedByteBuffer;
-import java.nio.channels.FileChannel;
-import java.nio.file.StandardOpenOption;
-import java.util.concurrent.ThreadLocalRandom;
-import java.util.concurrent.TimeUnit;
-
-@BenchmarkMode(Mode.AverageTime)
-@OutputTimeUnit(TimeUnit.NANOSECONDS)
-@OperationsPerInvocation(GenericIndexedBenchmark.ITERATIONS)
-@Warmup(iterations = 5)
-@Measurement(iterations = 20)
-@Fork(1)
-@State(Scope.Benchmark)
-public class GenericIndexedBenchmark
-{
-  public static final int ITERATIONS = 10000;
-
-  static final ObjectStrategy<byte[]> byteArrayStrategy = new ObjectStrategy<byte[]>()
-  {
-    @Override
-    public Class<? extends byte[]> getClazz()
-    {
-      return byte[].class;
-    }
-
-    @Override
-    public byte[] fromByteBuffer(ByteBuffer buffer, int numBytes)
-    {
-      byte[] result = new byte[numBytes];
-      buffer.get(result);
-      return result;
-    }
-
-    @Override
-    public byte[] toBytes(byte[] val)
-    {
-      return val;
-    }
-
-    @Override
-    public int compare(byte[] o1, byte[] o2)
-    {
-      return Integer.compare(Ints.fromByteArray(o1), Ints.fromByteArray(o2));
-    }
-  };
-
-  @Param({"10000"})
-  public int n;
-  @Param({"8"})
-  public int elementSize;
-
-  private File file;
-  private File smooshDir;
-  private GenericIndexed<byte[]> genericIndexed;
-  private int[] iterationIndexes;
-  private byte[][] elementsToSearch;
-
-  @Setup(Level.Trial)
-  public void createGenericIndexed() throws IOException
-  {
-    GenericIndexedWriter<byte[]> genericIndexedWriter = new GenericIndexedWriter<>(
-        new OffHeapMemorySegmentWriteOutMedium(),
-        "genericIndexedBenchmark",
-        byteArrayStrategy
-    );
-    genericIndexedWriter.open();
-
-    // GenericIndexObject caches prevObject for comparison, so need two arrays for correct objectsSorted computation.
-    ByteBuffer[] elements = new ByteBuffer[2];
-    elements[0] = ByteBuffer.allocate(elementSize);
-    elements[1] = ByteBuffer.allocate(elementSize);
-    for (int i = 0; i < n; i++) {
-      ByteBuffer element = elements[i & 1];
-      element.putInt(0, i);
-      genericIndexedWriter.write(element.array());
-    }
-    smooshDir = Files.createTempDir();
-    file = File.createTempFile("genericIndexedBenchmark", "meta");
-
-    try (FileChannel fileChannel =
-             FileChannel.open(file.toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE);
-         FileSmoosher fileSmoosher = new FileSmoosher(smooshDir)) {
-      genericIndexedWriter.writeTo(fileChannel, fileSmoosher);
-    }
-
-    FileChannel fileChannel = FileChannel.open(file.toPath());
-    MappedByteBuffer byteBuffer = fileChannel.map(FileChannel.MapMode.READ_ONLY, 0, file.length());
-    genericIndexed = GenericIndexed.read(byteBuffer, byteArrayStrategy, SmooshedFileMapper.load(smooshDir));
-  }
-
-  @Setup(Level.Trial)
-  public void createIterationIndexes()
-  {
-    iterationIndexes = new int[ITERATIONS];
-    for (int i = 0; i < ITERATIONS; i++) {
-      iterationIndexes[i] = ThreadLocalRandom.current().nextInt(n);
-    }
-  }
-
-  @Setup(Level.Trial)
-  public void createElementsToSearch()
-  {
-    elementsToSearch = new byte[ITERATIONS][];
-    for (int i = 0; i < ITERATIONS; i++) {
-      elementsToSearch[i] = Ints.toByteArray(ThreadLocalRandom.current().nextInt(n));
-    }
-  }
-
-  @Benchmark
-  public void get(Blackhole bh)
-  {
-    for (int i : iterationIndexes) {
-      bh.consume(genericIndexed.get(i));
-    }
-  }
-
-  @Benchmark
-  public int indexOf()
-  {
-    int r = 0;
-    for (byte[] elementToSearch : elementsToSearch) {
-      r ^= genericIndexed.indexOf(elementToSearch);
-    }
-    return r;
-  }
-}
diff --git a/benchmarks/src/main/java/io/druid/benchmark/GroupByTypeInterfaceBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/GroupByTypeInterfaceBenchmark.java
deleted file mode 100644
index e8160672d7c..00000000000
--- a/benchmarks/src/main/java/io/druid/benchmark/GroupByTypeInterfaceBenchmark.java
+++ /dev/null
@@ -1,856 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.benchmark;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.dataformat.smile.SmileFactory;
-import com.google.common.base.Supplier;
-import com.google.common.base.Suppliers;
-import com.google.common.base.Throwables;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.common.io.Files;
-import io.druid.benchmark.datagen.BenchmarkDataGenerator;
-import io.druid.benchmark.datagen.BenchmarkSchemaInfo;
-import io.druid.benchmark.datagen.BenchmarkSchemas;
-import io.druid.benchmark.query.QueryBenchmarkUtil;
-import io.druid.collections.BlockingPool;
-import io.druid.collections.DefaultBlockingPool;
-import io.druid.collections.NonBlockingPool;
-import io.druid.collections.StupidPool;
-import io.druid.java.util.common.concurrent.Execs;
-import io.druid.data.input.InputRow;
-import io.druid.data.input.Row;
-import io.druid.hll.HyperLogLogHash;
-import io.druid.jackson.DefaultObjectMapper;
-import io.druid.java.util.common.granularity.Granularities;
-import io.druid.java.util.common.granularity.Granularity;
-import io.druid.java.util.common.guava.Sequence;
-import io.druid.java.util.common.guava.Sequences;
-import io.druid.java.util.common.logger.Logger;
-import io.druid.offheap.OffheapBufferGenerator;
-import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
-import io.druid.query.DruidProcessingConfig;
-import io.druid.query.FinalizeResultsQueryRunner;
-import io.druid.query.Query;
-import io.druid.query.QueryPlus;
-import io.druid.query.QueryRunner;
-import io.druid.query.QueryRunnerFactory;
-import io.druid.query.QueryToolChest;
-import io.druid.query.aggregation.AggregatorFactory;
-import io.druid.query.aggregation.LongSumAggregatorFactory;
-import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde;
-import io.druid.query.dimension.DefaultDimensionSpec;
-import io.druid.query.dimension.DimensionSpec;
-import io.druid.query.groupby.GroupByQuery;
-import io.druid.query.groupby.GroupByQueryConfig;
-import io.druid.query.groupby.GroupByQueryEngine;
-import io.druid.query.groupby.GroupByQueryQueryToolChest;
-import io.druid.query.groupby.GroupByQueryRunnerFactory;
-import io.druid.query.groupby.strategy.GroupByStrategySelector;
-import io.druid.query.groupby.strategy.GroupByStrategyV1;
-import io.druid.query.groupby.strategy.GroupByStrategyV2;
-import io.druid.query.spec.MultipleIntervalSegmentSpec;
-import io.druid.query.spec.QuerySegmentSpec;
-import io.druid.segment.IndexIO;
-import io.druid.segment.IndexMergerV9;
-import io.druid.segment.IndexSpec;
-import io.druid.segment.QueryableIndex;
-import io.druid.segment.QueryableIndexSegment;
-import io.druid.segment.column.ColumnConfig;
-import io.druid.segment.incremental.IncrementalIndex;
-import io.druid.segment.serde.ComplexMetrics;
-import org.apache.commons.io.FileUtils;
-import org.openjdk.jmh.annotations.Benchmark;
-import org.openjdk.jmh.annotations.BenchmarkMode;
-import org.openjdk.jmh.annotations.Fork;
-import org.openjdk.jmh.annotations.Level;
-import org.openjdk.jmh.annotations.Measurement;
-import org.openjdk.jmh.annotations.Mode;
-import org.openjdk.jmh.annotations.OutputTimeUnit;
-import org.openjdk.jmh.annotations.Param;
-import org.openjdk.jmh.annotations.Scope;
-import org.openjdk.jmh.annotations.Setup;
-import org.openjdk.jmh.annotations.State;
-import org.openjdk.jmh.annotations.TearDown;
-import org.openjdk.jmh.annotations.Warmup;
-import org.openjdk.jmh.infra.Blackhole;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.TimeUnit;
-
-// Benchmark for determining the interface overhead of GroupBy with multiple type implementations
-
-@State(Scope.Benchmark)
-@Fork(value = 1)
-@Warmup(iterations = 15)
-@Measurement(iterations = 30)
-public class GroupByTypeInterfaceBenchmark
-{
-  @Param({"4"})
-  private int numSegments;
-
-  @Param({"4"})
-  private int numProcessingThreads;
-
-  @Param({"-1"})
-  private int initialBuckets;
-
-  @Param({"100000"})
-  private int rowsPerSegment;
-
-  @Param({"v2"})
-  private String defaultStrategy;
-
-  @Param({"all"})
-  private String queryGranularity;
-
-  private static final Logger log = new Logger(GroupByTypeInterfaceBenchmark.class);
-  private static final int RNG_SEED = 9999;
-  private static final IndexMergerV9 INDEX_MERGER_V9;
-  private static final IndexIO INDEX_IO;
-  public static final ObjectMapper JSON_MAPPER;
-
-  private File tmpDir;
-  private IncrementalIndex anIncrementalIndex;
-  private List<QueryableIndex> queryableIndexes;
-
-  private QueryRunnerFactory<Row, GroupByQuery> factory;
-
-  private BenchmarkSchemaInfo schemaInfo;
-  private GroupByQuery stringQuery;
-  private GroupByQuery longFloatQuery;
-  private GroupByQuery floatQuery;
-  private GroupByQuery longQuery;
-
-  private ExecutorService executorService;
-
-  static {
-    JSON_MAPPER = new DefaultObjectMapper();
-    INDEX_IO = new IndexIO(
-        JSON_MAPPER,
-        OffHeapMemorySegmentWriteOutMediumFactory.instance(),
-        new ColumnConfig()
-        {
-          @Override
-          public int columnCacheSizeBytes()
-          {
-            return 0;
-          }
-        }
-    );
-    INDEX_MERGER_V9 = new IndexMergerV9(JSON_MAPPER, INDEX_IO, OffHeapMemorySegmentWriteOutMediumFactory.instance());
-  }
-
-  private static final Map<String, Map<String, GroupByQuery>> SCHEMA_QUERY_MAP = new LinkedHashMap<>();
-
-  private void setupQueries()
-  {
-    // queries for the basic schema
-    Map<String, GroupByQuery> basicQueries = new LinkedHashMap<>();
-    BenchmarkSchemaInfo basicSchema = BenchmarkSchemas.SCHEMA_MAP.get("basic");
-
-    { // basic.A
-      QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval()));
-      List<AggregatorFactory> queryAggs = new ArrayList<>();
-      queryAggs.add(new LongSumAggregatorFactory(
-          "sumLongSequential",
-          "sumLongSequential"
-      ));
-      GroupByQuery queryString = GroupByQuery
-          .builder()
-          .setDataSource("blah")
-          .setQuerySegmentSpec(intervalSpec)
-          .setDimensions(Lists.<DimensionSpec>newArrayList(
-              new DefaultDimensionSpec("dimSequential", null)
-          ))
-          .setAggregatorSpecs(
-              queryAggs
-          )
-          .setGranularity(Granularity.fromString(queryGranularity))
-          .build();
-
-      GroupByQuery queryLongFloat = GroupByQuery
-          .builder()
-          .setDataSource("blah")
-          .setQuerySegmentSpec(intervalSpec)
-          .setDimensions(Lists.<DimensionSpec>newArrayList(
-              new DefaultDimensionSpec("metLongUniform", null),
-              new DefaultDimensionSpec("metFloatNormal", null)
-          ))
-          .setAggregatorSpecs(
-              queryAggs
-          )
-          .setGranularity(Granularity.fromString(queryGranularity))
-          .build();
-
-      GroupByQuery queryLong = GroupByQuery
-          .builder()
-          .setDataSource("blah")
-          .setQuerySegmentSpec(intervalSpec)
-          .setDimensions(Lists.<DimensionSpec>newArrayList(
-              new DefaultDimensionSpec("metLongUniform", null)
-          ))
-          .setAggregatorSpecs(
-              queryAggs
-          )
-          .setGranularity(Granularity.fromString(queryGranularity))
-          .build();
-
-      GroupByQuery queryFloat = GroupByQuery
-          .builder()
-          .setDataSource("blah")
-          .setQuerySegmentSpec(intervalSpec)
-          .setDimensions(Lists.<DimensionSpec>newArrayList(
-              new DefaultDimensionSpec("metFloatNormal", null)
-          ))
-          .setAggregatorSpecs(
-              queryAggs
-          )
-          .setGranularity(Granularity.fromString(queryGranularity))
-          .build();
-
-      basicQueries.put("string", queryString);
-      basicQueries.put("longFloat", queryLongFloat);
-      basicQueries.put("long", queryLong);
-      basicQueries.put("float", queryFloat);
-    }
-
-    { // basic.nested
-      QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval()));
-      List<AggregatorFactory> queryAggs = new ArrayList<>();
-      queryAggs.add(new LongSumAggregatorFactory(
-          "sumLongSequential",
-          "sumLongSequential"
-      ));
-
-      GroupByQuery subqueryA = GroupByQuery
-          .builder()
-          .setDataSource("blah")
-          .setQuerySegmentSpec(intervalSpec)
-          .setDimensions(Lists.<DimensionSpec>newArrayList(
-              new DefaultDimensionSpec("dimSequential", null),
-              new DefaultDimensionSpec("dimZipf", null)
-          ))
-          .setAggregatorSpecs(
-              queryAggs
-          )
-          .setGranularity(Granularities.DAY)
-          .build();
-
-      GroupByQuery queryA = GroupByQuery
-          .builder()
-          .setDataSource(subqueryA)
-          .setQuerySegmentSpec(intervalSpec)
-          .setDimensions(Lists.<DimensionSpec>newArrayList(
-              new DefaultDimensionSpec("dimSequential", null)
-          ))
-          .setAggregatorSpecs(
-              queryAggs
-          )
-          .setGranularity(Granularities.WEEK)
-          .build();
-
-      basicQueries.put("nested", queryA);
-    }
-
-    SCHEMA_QUERY_MAP.put("basic", basicQueries);
-  }
-
-  @Setup(Level.Trial)
-  public void setup() throws IOException
-  {
-    log.info("SETUP CALLED AT %d", System.currentTimeMillis());
-
-    if (ComplexMetrics.getSerdeForType("hyperUnique") == null) {
-      ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde(HyperLogLogHash.getDefault()));
-    }
-    executorService = Execs.multiThreaded(numProcessingThreads, "GroupByThreadPool[%d]");
-
-    setupQueries();
-
-    String schemaName = "basic";
-
-    schemaInfo = BenchmarkSchemas.SCHEMA_MAP.get(schemaName);
-    stringQuery = SCHEMA_QUERY_MAP.get(schemaName).get("string");
-    longFloatQuery = SCHEMA_QUERY_MAP.get(schemaName).get("longFloat");
-    longQuery = SCHEMA_QUERY_MAP.get(schemaName).get("long");
-    floatQuery = SCHEMA_QUERY_MAP.get(schemaName).get("float");
-
-    final BenchmarkDataGenerator dataGenerator = new BenchmarkDataGenerator(
-        schemaInfo.getColumnSchemas(),
-        RNG_SEED + 1,
-        schemaInfo.getDataInterval(),
-        rowsPerSegment
-    );
-
-    tmpDir = Files.createTempDir();
-    log.info("Using temp dir: %s", tmpDir.getAbsolutePath());
-
-    // queryableIndexes   -> numSegments worth of on-disk segments
-    // anIncrementalIndex -> the last incremental index
-    anIncrementalIndex = null;
-    queryableIndexes = new ArrayList<>(numSegments);
-
-    for (int i = 0; i < numSegments; i++) {
-      log.info("Generating rows for segment %d/%d", i + 1, numSegments);
-
-      final IncrementalIndex index = makeIncIndex();
-
-      for (int j = 0; j < rowsPerSegment; j++) {
-        final InputRow row = dataGenerator.nextRow();
-        if (j % 20000 == 0) {
-          log.info("%,d/%,d rows generated.", i * rowsPerSegment + j, rowsPerSegment * numSegments);
-        }
-        index.add(row);
-      }
-
-      log.info(
-          "%,d/%,d rows generated, persisting segment %d/%d.",
-          (i + 1) * rowsPerSegment,
-          rowsPerSegment * numSegments,
-          i + 1,
-          numSegments
-      );
-
-      final File file = INDEX_MERGER_V9.persist(
-          index,
-          new File(tmpDir, String.valueOf(i)),
-          new IndexSpec(),
-          null
-      );
-
-      queryableIndexes.add(INDEX_IO.loadIndex(file));
-
-      if (i == numSegments - 1) {
-        anIncrementalIndex = index;
-      } else {
-        index.close();
-      }
-    }
-
-    NonBlockingPool<ByteBuffer> bufferPool = new StupidPool<>(
-        "GroupByBenchmark-computeBufferPool",
-        new OffheapBufferGenerator("compute", 250_000_000),
-        0,
-        Integer.MAX_VALUE
-    );
-
-    // limit of 2 is required since we simulate both historical merge and broker merge in the same process
-    BlockingPool<ByteBuffer> mergePool = new DefaultBlockingPool<>(
-        new OffheapBufferGenerator("merge", 250_000_000),
-        2
-    );
-    final GroupByQueryConfig config = new GroupByQueryConfig()
-    {
-      @Override
-      public String getDefaultStrategy()
-      {
-        return defaultStrategy;
-      }
-
-      @Override
-      public int getBufferGrouperInitialBuckets()
-      {
-        return initialBuckets;
-      }
-
-      @Override
-      public long getMaxOnDiskStorage()
-      {
-        return 1_000_000_000L;
-      }
-    };
-    config.setSingleThreaded(false);
-    config.setMaxIntermediateRows(Integer.MAX_VALUE);
-    config.setMaxResults(Integer.MAX_VALUE);
-
-    DruidProcessingConfig druidProcessingConfig = new DruidProcessingConfig()
-    {
-      @Override
-      public int getNumThreads()
-      {
-        // Used by "v2" strategy for concurrencyHint
-        return numProcessingThreads;
-      }
-
-      @Override
-      public String getFormatString()
-      {
-        return null;
-      }
-    };
-
-    final Supplier<GroupByQueryConfig> configSupplier = Suppliers.ofInstance(config);
-    final GroupByStrategySelector strategySelector = new GroupByStrategySelector(
-        configSupplier,
-        new GroupByStrategyV1(
-            configSupplier,
-            new GroupByQueryEngine(configSupplier, bufferPool),
-            QueryBenchmarkUtil.NOOP_QUERYWATCHER,
-            bufferPool
-        ),
-        new GroupByStrategyV2(
-            druidProcessingConfig,
-            configSupplier,
-            bufferPool,
-            mergePool,
-            new ObjectMapper(new SmileFactory()),
-            QueryBenchmarkUtil.NOOP_QUERYWATCHER
-        )
-    );
-
-    factory = new GroupByQueryRunnerFactory(
-        strategySelector,
-        new GroupByQueryQueryToolChest(
-            strategySelector,
-            QueryBenchmarkUtil.NoopIntervalChunkingQueryRunnerDecorator()
-        )
-    );
-  }
-
-  private IncrementalIndex makeIncIndex()
-  {
-    return new IncrementalIndex.Builder()
-        .setSimpleTestingIndexSchema(schemaInfo.getAggsArray())
-        .setReportParseExceptions(false)
-        .setConcurrentEventAdd(true)
-        .setMaxRowCount(rowsPerSegment)
-        .buildOnheap();
-  }
-
-  @TearDown(Level.Trial)
-  public void tearDown()
-  {
-    try {
-      if (anIncrementalIndex != null) {
-        anIncrementalIndex.close();
-      }
-
-      if (queryableIndexes != null) {
-        for (QueryableIndex index : queryableIndexes) {
-          index.close();
-        }
-      }
-
-      if (tmpDir != null) {
-        FileUtils.deleteDirectory(tmpDir);
-      }
-    }
-    catch (IOException e) {
-      log.warn(e, "Failed to tear down, temp dir was: %s", tmpDir);
-      throw Throwables.propagate(e);
-    }
-  }
-
-  private static <T> List<T> runQuery(QueryRunnerFactory factory, QueryRunner runner, Query<T> query)
-  {
-    QueryToolChest toolChest = factory.getToolchest();
-    QueryRunner<T> theRunner = new FinalizeResultsQueryRunner<>(
-        toolChest.mergeResults(toolChest.preMergeQueryDecoration(runner)),
-        toolChest
-    );
-
-    Sequence<T> queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap());
-    return Sequences.toList(queryResult, Lists.<T>newArrayList());
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void querySingleQueryableIndexStringOnly(Blackhole blackhole) throws Exception
-  {
-    QueryRunner<Row> runner = QueryBenchmarkUtil.makeQueryRunner(
-        factory,
-        "qIndex",
-        new QueryableIndexSegment("qIndex", queryableIndexes.get(0))
-    );
-
-    List<Row> results = GroupByTypeInterfaceBenchmark.runQuery(factory, runner, stringQuery);
-
-    for (Row result : results) {
-      blackhole.consume(result);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void querySingleQueryableIndexLongOnly(Blackhole blackhole) throws Exception
-  {
-    QueryRunner<Row> runner = QueryBenchmarkUtil.makeQueryRunner(
-        factory,
-        "qIndex",
-        new QueryableIndexSegment("qIndex", queryableIndexes.get(0))
-    );
-
-    List<Row> results = GroupByTypeInterfaceBenchmark.runQuery(factory, runner, longQuery);
-
-    for (Row result : results) {
-      blackhole.consume(result);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void querySingleQueryableIndexFloatOnly(Blackhole blackhole) throws Exception
-  {
-    QueryRunner<Row> runner = QueryBenchmarkUtil.makeQueryRunner(
-        factory,
-        "qIndex",
-        new QueryableIndexSegment("qIndex", queryableIndexes.get(0))
-    );
-
-    List<Row> results = GroupByTypeInterfaceBenchmark.runQuery(factory, runner, floatQuery);
-
-    for (Row result : results) {
-      blackhole.consume(result);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void querySingleQueryableIndexNumericOnly(Blackhole blackhole) throws Exception
-  {
-    QueryRunner<Row> runner = QueryBenchmarkUtil.makeQueryRunner(
-        factory,
-        "qIndex",
-        new QueryableIndexSegment("qIndex", queryableIndexes.get(0))
-    );
-
-    List<Row> results = GroupByTypeInterfaceBenchmark.runQuery(factory, runner, longFloatQuery);
-
-    for (Row result : results) {
-      blackhole.consume(result);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void querySingleQueryableIndexNumericThenString(Blackhole blackhole) throws Exception
-  {
-    QueryRunner<Row> runner = QueryBenchmarkUtil.makeQueryRunner(
-        factory,
-        "qIndex",
-        new QueryableIndexSegment("qIndex", queryableIndexes.get(0))
-    );
-
-    List<Row> results = GroupByTypeInterfaceBenchmark.runQuery(factory, runner, longFloatQuery);
-
-    for (Row result : results) {
-      blackhole.consume(result);
-    }
-
-    runner = QueryBenchmarkUtil.makeQueryRunner(
-        factory,
-        "qIndex",
-        new QueryableIndexSegment("qIndex", queryableIndexes.get(0))
-    );
-
-    results = GroupByTypeInterfaceBenchmark.runQuery(factory, runner, stringQuery);
-
-    for (Row result : results) {
-      blackhole.consume(result);
-    }
-  }
-
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void querySingleQueryableIndexLongThenString(Blackhole blackhole) throws Exception
-  {
-    QueryRunner<Row> runner = QueryBenchmarkUtil.makeQueryRunner(
-        factory,
-        "qIndex",
-        new QueryableIndexSegment("qIndex", queryableIndexes.get(0))
-    );
-
-    List<Row> results = GroupByTypeInterfaceBenchmark.runQuery(factory, runner, longQuery);
-
-    for (Row result : results) {
-      blackhole.consume(result);
-    }
-
-    runner = QueryBenchmarkUtil.makeQueryRunner(
-        factory,
-        "qIndex",
-        new QueryableIndexSegment("qIndex", queryableIndexes.get(0))
-    );
-
-    results = GroupByTypeInterfaceBenchmark.runQuery(factory, runner, stringQuery);
-
-    for (Row result : results) {
-      blackhole.consume(result);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void querySingleQueryableIndexLongThenFloat(Blackhole blackhole) throws Exception
-  {
-    QueryRunner<Row> runner = QueryBenchmarkUtil.makeQueryRunner(
-        factory,
-        "qIndex",
-        new QueryableIndexSegment("qIndex", queryableIndexes.get(0))
-    );
-
-    List<Row> results = GroupByTypeInterfaceBenchmark.runQuery(factory, runner, longQuery);
-
-    for (Row result : results) {
-      blackhole.consume(result);
-    }
-
-    runner = QueryBenchmarkUtil.makeQueryRunner(
-        factory,
-        "qIndex",
-        new QueryableIndexSegment("qIndex", queryableIndexes.get(0))
-    );
-
-    results = GroupByTypeInterfaceBenchmark.runQuery(factory, runner, floatQuery);
-
-    for (Row result : results) {
-      blackhole.consume(result);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void querySingleQueryableIndexStringThenNumeric(Blackhole blackhole) throws Exception
-  {
-    QueryRunner<Row> runner = QueryBenchmarkUtil.makeQueryRunner(
-        factory,
-        "qIndex",
-        new QueryableIndexSegment("qIndex", queryableIndexes.get(0))
-    );
-
-    List<Row> results = GroupByTypeInterfaceBenchmark.runQuery(factory, runner, stringQuery);
-
-    for (Row result : results) {
-      blackhole.consume(result);
-    }
-
-    runner = QueryBenchmarkUtil.makeQueryRunner(
-        factory,
-        "qIndex",
-        new QueryableIndexSegment("qIndex", queryableIndexes.get(0))
-    );
-
-    results = GroupByTypeInterfaceBenchmark.runQuery(factory, runner, longFloatQuery);
-
-    for (Row result : results) {
-      blackhole.consume(result);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void querySingleQueryableIndexStringThenLong(Blackhole blackhole) throws Exception
-  {
-    QueryRunner<Row> runner = QueryBenchmarkUtil.makeQueryRunner(
-        factory,
-        "qIndex",
-        new QueryableIndexSegment("qIndex", queryableIndexes.get(0))
-    );
-
-    List<Row> results = GroupByTypeInterfaceBenchmark.runQuery(factory, runner, stringQuery);
-
-    for (Row result : results) {
-      blackhole.consume(result);
-    }
-
-    runner = QueryBenchmarkUtil.makeQueryRunner(
-        factory,
-        "qIndex",
-        new QueryableIndexSegment("qIndex", queryableIndexes.get(0))
-    );
-
-    results = GroupByTypeInterfaceBenchmark.runQuery(factory, runner, longQuery);
-
-    for (Row result : results) {
-      blackhole.consume(result);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.AverageTime)
-  @OutputTimeUnit(TimeUnit.MICROSECONDS)
-  public void querySingleQueryableIndexStringTwice(Blackhole blackhole) throws Exception
-  {
-    QueryRunner<Row> runner = QueryBenchmarkUtil.makeQueryRunner(
-        factory,
-        "qIndex",
-        new QueryableIndexSegment("qIndex", queryableIndexes.get(0))
-    );
-
-    List<Row> results = GroupByTypeInterfaceBenchmark.runQuery(factory, runner, stringQuery);
-
-    for (Row result : results) {

  (This diff was longer than 20,000 lines, and has been truncated...)


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@druid.apache.org
For additional commands, e-mail: commits-help@druid.apache.org