You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@druid.apache.org by GitBox <gi...@apache.org> on 2018/08/02 15:20:28 UTC

[GitHub] b-slim closed pull request #5958: Part 2 of changes for SQL Compatible Null Handling

b-slim closed pull request #5958: Part 2 of changes for SQL Compatible Null Handling
URL: https://github.com/apache/incubator-druid/pull/5958
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/.travis.yml b/.travis.yml
index 3918b9bdb69..3c9f57ea4e5 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -33,6 +33,15 @@ matrix:
         - unset _JAVA_OPTIONS
       script: echo "MAVEN_OPTS='-Xmx512m'" > ~/.mavenrc && mvn test -B -Pparallel-test -Dmaven.fork.count=2 -pl processing
 
+      # processing module tests with SQL Compatibility enabled
+    - sudo: false
+      env:
+        - NAME="processing module test with SQL Compatibility"
+      install: echo "MAVEN_OPTS='-Xmx3000m'" > ~/.mavenrc && mvn install -q -ff -DskipTests -B
+      before_script:
+        - unset _JAVA_OPTIONS
+      script: echo "MAVEN_OPTS='-Xmx512m'" > ~/.mavenrc && mvn test -B -Pparallel-test -Dmaven.fork.count=2 -Ddruid.generic.useDefaultValueForNull=false -pl processing
+
       # server module test
     - sudo: false
       env:
@@ -43,6 +52,17 @@ matrix:
       # Server module test is run without the parallel-test option because it's memory sensitive and often fails with that option.
       script: echo "MAVEN_OPTS='-Xmx512m'" > ~/.mavenrc && mvn test -B -pl server
 
+      # server module test with SQL Compatibility enabled
+    - sudo: false
+      env:
+        - NAME="server module test with SQL Compatibility enabled"
+      install: echo "MAVEN_OPTS='-Xmx3000m'" > ~/.mavenrc && mvn install -q -ff -DskipTests -B
+      before_script:
+        - unset _JAVA_OPTIONS
+      # Server module test is run without the parallel-test option because it's memory sensitive and often fails with that option.
+      script: echo "MAVEN_OPTS='-Xmx512m'" > ~/.mavenrc && mvn test -B -pl server -Ddruid.generic.useDefaultValueForNull=false
+
+
       # other modules test
     - sudo: false
       env:
@@ -53,6 +73,16 @@ matrix:
         - unset _JAVA_OPTIONS
       script: echo "MAVEN_OPTS='-Xmx512m'" > ~/.mavenrc && mvn test -B -Pparallel-test -Dmaven.fork.count=2 -pl '!processing,!server'
 
+      # other modules test with SQL Compatibility enabled
+    - sudo: false
+      env:
+        - NAME="other modules test with SQL Compatibility"
+        - AWS_REGION=us-east-1 # set a aws region for unit tests
+      install: echo "MAVEN_OPTS='-Xmx3000m'" > ~/.mavenrc && mvn install -q -ff -DskipTests -B
+      before_script:
+        - unset _JAVA_OPTIONS
+      script: echo "MAVEN_OPTS='-Xmx512m'" > ~/.mavenrc && mvn test -B -Pparallel-test -Dmaven.fork.count=2 -Ddruid.generic.useDefaultValueForNull=false -pl '!processing,!server'
+
       # run integration tests
     - sudo: required
       services:
diff --git a/api/src/main/java/io/druid/data/input/Rows.java b/api/src/main/java/io/druid/data/input/Rows.java
index 2d52b5b355a..b511e7dfcab 100644
--- a/api/src/main/java/io/druid/data/input/Rows.java
+++ b/api/src/main/java/io/druid/data/input/Rows.java
@@ -23,9 +23,11 @@
 import com.google.common.collect.ImmutableSortedSet;
 import com.google.common.collect.Maps;
 import com.google.common.primitives.Longs;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.common.parsers.ParseException;
 
+import javax.annotation.Nullable;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
@@ -92,10 +94,11 @@
    * @throws NullPointerException if the string is null
    * @throws ParseException       if the column cannot be converted to a number
    */
+  @Nullable
   public static Number objectToNumber(final String name, final Object inputValue)
   {
     if (inputValue == null) {
-      return Rows.LONG_ZERO;
+      return NullHandling.defaultLongValue();
     }
 
     if (inputValue instanceof Number) {
diff --git a/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java
index bcf0f3627a9..29499d3b4db 100644
--- a/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java
+++ b/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java
@@ -21,12 +21,12 @@
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.base.Predicate;
-import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableList;
 import com.google.common.io.Files;
 import io.druid.benchmark.datagen.BenchmarkDataGenerator;
 import io.druid.benchmark.datagen.BenchmarkSchemaInfo;
 import io.druid.benchmark.datagen.BenchmarkSchemas;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.hll.HyperLogLogHash;
 import io.druid.jackson.DefaultObjectMapper;
@@ -557,7 +557,7 @@ public Filter toFilter()
       if (extractionFn == null) {
         return new NoBitmapSelectorFilter(dimension, value);
       } else {
-        final String valueOrNull = Strings.emptyToNull(value);
+        final String valueOrNull = NullHandling.emptyToNullIfNeeded(value);
 
         final DruidPredicateFactory predicateFactory = new DruidPredicateFactory()
         {
diff --git a/codestyle/checkstyle.xml b/codestyle/checkstyle.xml
index 1a41308439b..8ceab68a2a7 100644
--- a/codestyle/checkstyle.xml
+++ b/codestyle/checkstyle.xml
@@ -171,7 +171,6 @@
       <property name="illegalPattern" value="true"/>
       <property name="message" value="Use java.lang.Primitive.BYTES instead."/>
     </module>
-
     <!-- This regex should be replaced with an IntelliJ inspection when teamcity.jetbrains.com updates to at least IntelliJ 2018.1 (currently it uses 2017.2) -->
     <module name="Regexp">
       <property name="format" value='[a-z][a-zA-Z0-9_]*\.equals\((\"|[A-Z_]+\))'/>
@@ -184,5 +183,15 @@
       <property name="illegalPattern" value="true"/>
       <property name="message" value='Use toArray(new Object[0]) instead'/>
     </module>
+    <module name="Regexp">
+      <property name="format" value="Strings.emptyToNull"/>
+      <property name="illegalPattern" value="true"/>
+      <property name="message" value="Use io.druid.common.config.NullHandling.emptyToNullIfNeeded instead"/>
+    </module>
+    <module name="Regexp">
+      <property name="format" value="Strings.nullToEmpty"/>
+      <property name="illegalPattern" value="true"/>
+      <property name="message" value="Use io.druid.common.config.NullHandling.nullToEmptyIfNeeded instead"/>
+    </module>
   </module>
 </module>
diff --git a/common/src/main/java/io/druid/math/expr/Evals.java b/common/src/main/java/io/druid/math/expr/Evals.java
index cc218a0f35c..76669fdf24c 100644
--- a/common/src/main/java/io/druid/math/expr/Evals.java
+++ b/common/src/main/java/io/druid/math/expr/Evals.java
@@ -19,7 +19,7 @@
 
 package io.druid.math.expr;
 
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.logger.Logger;
 
 import java.util.Arrays;
@@ -83,6 +83,6 @@ public static boolean asBoolean(double x)
 
   public static boolean asBoolean(String x)
   {
-    return !Strings.isNullOrEmpty(x) && Boolean.valueOf(x);
+    return !NullHandling.isNullOrEquivalent(x) && Boolean.valueOf(x);
   }
 }
diff --git a/common/src/main/java/io/druid/math/expr/Expr.java b/common/src/main/java/io/druid/math/expr/Expr.java
index c73d43d67c3..ddb6699b5fb 100644
--- a/common/src/main/java/io/druid/math/expr/Expr.java
+++ b/common/src/main/java/io/druid/math/expr/Expr.java
@@ -272,6 +272,9 @@ public void visit(Visitor visitor)
   public ExprEval eval(ObjectBinding bindings)
   {
     ExprEval ret = expr.eval(bindings);
+    if (NullHandling.sqlCompatible() && (ret.value() == null)) {
+      return ExprEval.of(null);
+    }
     if (ret.type() == ExprType.LONG) {
       return ExprEval.of(-ret.asLong());
     }
@@ -307,6 +310,9 @@ public String toString()
   public ExprEval eval(ObjectBinding bindings)
   {
     ExprEval ret = expr.eval(bindings);
+    if (NullHandling.sqlCompatible() && (ret.value() == null)) {
+      return ExprEval.of(null);
+    }
     // conforming to other boolean-returning binary operators
     ExprType retType = ret.type() == ExprType.DOUBLE ? ExprType.DOUBLE : ExprType.LONG;
     return ExprEval.of(!ret.asBoolean(), retType);
@@ -365,15 +371,21 @@ public ExprEval eval(ObjectBinding bindings)
 
     // Result of any Binary expressions is null if any of the argument is null.
     // e.g "select null * 2 as c;" or "select null + 1 as c;" will return null as per Standard SQL spec.
-    if (NullHandling.sqlCompatible() && (leftVal.isNull() || rightVal.isNull())) {
+    if (NullHandling.sqlCompatible() && (leftVal.value() == null || rightVal.value() == null)) {
       return ExprEval.of(null);
     }
 
     if (leftVal.type() == ExprType.STRING && rightVal.type() == ExprType.STRING) {
       return evalString(leftVal.asString(), rightVal.asString());
     } else if (leftVal.type() == ExprType.LONG && rightVal.type() == ExprType.LONG) {
+      if (NullHandling.sqlCompatible() && (leftVal.isNumericNull() || rightVal.isNumericNull())) {
+        return ExprEval.of(null);
+      }
       return ExprEval.of(evalLong(leftVal.asLong(), rightVal.asLong()));
     } else {
+      if (NullHandling.sqlCompatible() && (leftVal.isNumericNull() || rightVal.isNumericNull())) {
+        return ExprEval.of(null);
+      }
       return ExprEval.of(evalDouble(leftVal.asDouble(), rightVal.asDouble()));
     }
   }
diff --git a/common/src/main/java/io/druid/math/expr/ExprEval.java b/common/src/main/java/io/druid/math/expr/ExprEval.java
index b87e29874c9..20c74d941f6 100644
--- a/common/src/main/java/io/druid/math/expr/ExprEval.java
+++ b/common/src/main/java/io/druid/math/expr/ExprEval.java
@@ -19,9 +19,7 @@
 
 package io.druid.math.expr;
 
-import com.google.common.base.Preconditions;
 import com.google.common.primitives.Doubles;
-import com.google.common.primitives.Ints;
 import io.druid.common.config.NullHandling;
 import io.druid.common.guava.GuavaUtils;
 import io.druid.java.util.common.IAE;
@@ -32,7 +30,7 @@
  */
 public abstract class ExprEval<T>
 {
-  public static ExprEval ofLong(Number longValue)
+  public static ExprEval ofLong(@Nullable Number longValue)
   {
     return new LongExprEval(longValue);
   }
@@ -42,7 +40,7 @@ public static ExprEval of(long longValue)
     return new LongExprEval(longValue);
   }
 
-  public static ExprEval ofDouble(Number doubleValue)
+  public static ExprEval ofDouble(@Nullable Number doubleValue)
   {
     return new DoubleExprEval(doubleValue);
   }
@@ -71,7 +69,7 @@ public static ExprEval of(boolean value, ExprType type)
     }
   }
 
-  public static ExprEval bestEffortOf(Object val)
+  public static ExprEval bestEffortOf(@Nullable Object val)
   {
     if (val instanceof ExprEval) {
       return (ExprEval) val;
@@ -85,6 +83,7 @@ public static ExprEval bestEffortOf(Object val)
     return new StringExprEval(val == null ? null : String.valueOf(val));
   }
 
+  @Nullable
   final T value;
 
   private ExprEval(T value)
@@ -99,10 +98,10 @@ public Object value()
     return value;
   }
 
-  public boolean isNull()
-  {
-    return value == null;
-  }
+  /**
+   * returns true if numeric primitive value for this ExprEval is null, otherwise false.
+   */
+  public abstract boolean isNumericNull();
 
   public abstract int asInt();
 
@@ -125,7 +124,7 @@ public String asString()
   private abstract static class NumericExprEval extends ExprEval<Number>
   {
 
-    private NumericExprEval(Number value)
+    private NumericExprEval(@Nullable Number value)
     {
       super(value);
     }
@@ -147,13 +146,19 @@ public final double asDouble()
     {
       return value.doubleValue();
     }
+
+    @Override
+    public boolean isNumericNull()
+    {
+      return value == null;
+    }
   }
 
   private static class DoubleExprEval extends NumericExprEval
   {
-    private DoubleExprEval(Number value)
+    private DoubleExprEval(@Nullable Number value)
     {
-      super(Preconditions.checkNotNull(value, "value"));
+      super(value == null ? NullHandling.defaultDoubleValue() : value);
     }
 
     @Override
@@ -175,7 +180,7 @@ public final ExprEval castTo(ExprType castTo)
         case DOUBLE:
           return this;
         case LONG:
-          return ExprEval.of(asLong());
+          return ExprEval.of(value == null ? null : asLong());
         case STRING:
           return ExprEval.of(asString());
       }
@@ -191,9 +196,9 @@ public Expr toExpr()
 
   private static class LongExprEval extends NumericExprEval
   {
-    private LongExprEval(Number value)
+    private LongExprEval(@Nullable Number value)
     {
-      super(Preconditions.checkNotNull(value, "value"));
+      super(value == null ? NullHandling.defaultLongValue() : value);
     }
 
     @Override
@@ -213,7 +218,7 @@ public final ExprEval castTo(ExprType castTo)
     {
       switch (castTo) {
         case DOUBLE:
-          return ExprEval.of(asDouble());
+          return ExprEval.of(value == null ? null : asDouble());
         case LONG:
           return this;
         case STRING:
@@ -231,6 +236,8 @@ public Expr toExpr()
 
   private static class StringExprEval extends ExprEval<String>
   {
+    private Number numericVal;
+
     private StringExprEval(@Nullable String value)
     {
       super(NullHandling.emptyToNullIfNeeded(value));
@@ -245,36 +252,63 @@ public final ExprType type()
     @Override
     public final int asInt()
     {
-      if (value == null) {
+      Number number = asNumber();
+      if (number == null) {
         assert NullHandling.replaceWithDefault();
         return 0;
       }
-
-      final Integer theInt = Ints.tryParse(value);
-      assert NullHandling.replaceWithDefault() || theInt != null;
-      return theInt == null ? 0 : theInt;
+      return number.intValue();
     }
 
     @Override
     public final long asLong()
     {
-      // GuavaUtils.tryParseLong handles nulls, no need for special null handling here.
-      final Long theLong = GuavaUtils.tryParseLong(value);
-      assert NullHandling.replaceWithDefault() || theLong != null;
-      return theLong == null ? 0L : theLong;
+      Number number = asNumber();
+      if (number == null) {
+        assert NullHandling.replaceWithDefault();
+        return 0L;
+      }
+      return number.longValue();
     }
 
     @Override
     public final double asDouble()
     {
-      if (value == null) {
+      Number number = asNumber();
+      if (number == null) {
         assert NullHandling.replaceWithDefault();
-        return 0.0;
+        return 0.0d;
+      }
+      return number.doubleValue();
+    }
+
+    @Nullable
+    private Number asNumber()
+    {
+      if (value == null) {
+        return null;
       }
+      if (numericVal != null) {
+        // Optimization for non-null case.
+        return numericVal;
+      }
+      Number rv;
+      Long v = GuavaUtils.tryParseLong(value);
+      // Do NOT use ternary operator here, because it makes Java to convert Long to Double
+      if (v != null) {
+        rv = v;
+      } else {
+        rv = Doubles.tryParse(value);
+      }
+
+      numericVal = rv;
+      return rv;
+    }
 
-      final Double theDouble = Doubles.tryParse(value);
-      assert NullHandling.replaceWithDefault() || theDouble != null;
-      return theDouble == null ? 0.0 : theDouble;
+    @Override
+    public boolean isNumericNull()
+    {
+      return asNumber() == null;
     }
 
     @Override
@@ -288,9 +322,9 @@ public final ExprEval castTo(ExprType castTo)
     {
       switch (castTo) {
         case DOUBLE:
-          return ExprEval.of(asDouble());
+          return ExprEval.ofDouble(asNumber());
         case LONG:
-          return ExprEval.of(asLong());
+          return ExprEval.ofLong(asNumber());
         case STRING:
           return this;
       }
diff --git a/common/src/main/java/io/druid/math/expr/Function.java b/common/src/main/java/io/druid/math/expr/Function.java
index e7cc34f9a2b..a98b17ce2ff 100644
--- a/common/src/main/java/io/druid/math/expr/Function.java
+++ b/common/src/main/java/io/druid/math/expr/Function.java
@@ -74,7 +74,7 @@ public ExprEval apply(List<Expr> args, Expr.ObjectBinding bindings)
     @Override
     protected final ExprEval eval(ExprEval param)
     {
-      if (NullHandling.sqlCompatible() && param.isNull()) {
+      if (NullHandling.sqlCompatible() && param.isNumericNull()) {
         return ExprEval.of(null);
       }
       if (param.type() == ExprType.LONG) {
@@ -796,6 +796,9 @@ public String name()
     @Override
     protected ExprEval eval(ExprEval x, ExprEval y)
     {
+      if (NullHandling.sqlCompatible() && x.value() == null) {
+        return ExprEval.of(null);
+      }
       ExprType castTo;
       try {
         castTo = ExprType.valueOf(StringUtils.toUpperCase(y.asString()));
@@ -880,7 +883,7 @@ public ExprEval apply(List<Expr> args, Expr.ObjectBinding bindings)
         throw new IAE("Function[%s] needs 2 arguments", name());
       }
       final ExprEval eval = args.get(0).eval(bindings);
-      return eval.isNull() ? args.get(1).eval(bindings) : eval;
+      return eval.value() == null ? args.get(1).eval(bindings) : eval;
     }
   }
 
@@ -937,7 +940,7 @@ public ExprEval apply(List<Expr> args, Expr.ObjectBinding bindings)
       }
 
       final String arg = args.get(0).eval(bindings).asString();
-      return arg == null ? ExprEval.of(0) : ExprEval.of(arg.length());
+      return arg == null ? ExprEval.ofLong(NullHandling.defaultLongValue()) : ExprEval.of(arg.length());
     }
   }
 
@@ -1094,7 +1097,7 @@ public ExprEval apply(List<Expr> args, Expr.ObjectBinding bindings)
       }
 
       final ExprEval expr = args.get(0).eval(bindings);
-      return ExprEval.of(expr.isNull(), ExprType.LONG);
+      return ExprEval.of(expr.value() == null, ExprType.LONG);
     }
   }
 
@@ -1114,7 +1117,7 @@ public ExprEval apply(List<Expr> args, Expr.ObjectBinding bindings)
       }
 
       final ExprEval expr = args.get(0).eval(bindings);
-      return ExprEval.of(!expr.isNull(), ExprType.LONG);
+      return ExprEval.of(expr.value() != null, ExprType.LONG);
     }
   }
 }
diff --git a/common/src/test/java/io/druid/math/expr/EvalTest.java b/common/src/test/java/io/druid/math/expr/EvalTest.java
index 6f334274305..4f9b6d9e956 100644
--- a/common/src/test/java/io/druid/math/expr/EvalTest.java
+++ b/common/src/test/java/io/druid/math/expr/EvalTest.java
@@ -140,11 +140,10 @@ public void testLongEval()
     Assert.assertEquals(1271055781L, evalLong("unix_timestamp('2010-04-12T07:03:01')", bindings));
     Assert.assertEquals(1271023381L, evalLong("unix_timestamp('2010-04-12T07:03:01+09:00')", bindings));
     Assert.assertEquals(1271023381L, evalLong("unix_timestamp('2010-04-12T07:03:01.419+09:00')", bindings));
-    if (NullHandling.replaceWithDefault()) {
-      Assert.assertEquals("NULL", eval("nvl(if(x == 9223372036854775807, '', 'x'), 'NULL')", bindings).asString());
-    } else {
-      Assert.assertEquals("", eval("nvl(if(x == 9223372036854775807, '', 'x'), 'NULL')", bindings).asString());
-    }
+    Assert.assertEquals(
+        NullHandling.replaceWithDefault() ? "NULL" : "",
+        eval("nvl(if(x == 9223372036854775807, '', 'x'), 'NULL')", bindings).asString()
+    );
     Assert.assertEquals("x", eval("nvl(if(x == 9223372036854775806, '', 'x'), 'NULL')", bindings).asString());
   }
 
diff --git a/common/src/test/java/io/druid/math/expr/FunctionTest.java b/common/src/test/java/io/druid/math/expr/FunctionTest.java
index 27713832399..54540fc4f7b 100644
--- a/common/src/test/java/io/druid/math/expr/FunctionTest.java
+++ b/common/src/test/java/io/druid/math/expr/FunctionTest.java
@@ -88,7 +88,7 @@ public void testSubstring()
   public void testStrlen()
   {
     assertExpr("strlen(x)", 3L);
-    assertExpr("strlen(nonexistent)", 0L);
+    assertExpr("strlen(nonexistent)", NullHandling.defaultLongValue());
   }
 
   @Test
diff --git a/docs/content/configuration/auth.md b/docs/content/configuration/auth.md
index 9751ba6ddbd..e18703c3b54 100644
--- a/docs/content/configuration/auth.md
+++ b/docs/content/configuration/auth.md
@@ -12,7 +12,7 @@ layout: doc_page
 |`druid.auth.unsecuredPaths`| List of Strings|List of paths for which security checks will not be performed. All requests to these paths will be allowed.|[]|no|
 |`druid.auth.allowUnauthenticatedHttpOptions`|Boolean|If true, skip authentication checks for HTTP OPTIONS requests. This is needed for certain use cases, such as supporting CORS pre-flight requests. Note that disabling authentication checks for OPTIONS requests will allow unauthenticated users to determine what Druid endpoints are valid (by checking if the OPTIONS request returns a 200 instead of 404), so enabling this option may reveal information about server configuration, including information about what extensions are loaded (if those extensions add endpoints).|false|no|
 
-## Enabling Authentication/Authorization
+## Enabling Authentication/AuthorizationLoadingLookupTest
 
 ## Authenticator Chain
 Authentication decisions are handled by a chain of Authenticator instances. A request will be checked by Authenticators in the sequence defined by the `druid.auth.authenticatorChain`.
diff --git a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/quantiles/DoublesSketchAggregatorFactory.java b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/quantiles/DoublesSketchAggregatorFactory.java
index e550720aa4d..e214a448bdf 100644
--- a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/quantiles/DoublesSketchAggregatorFactory.java
+++ b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/quantiles/DoublesSketchAggregatorFactory.java
@@ -106,13 +106,13 @@ public BufferAggregator factorizeBuffered(final ColumnSelectorFactory metricFact
       if (selector instanceof NilColumnValueSelector) {
         return new DoublesSketchNoOpBufferAggregator();
       }
-      return new DoublesSketchBuildBufferAggregator(selector, k, getMaxIntermediateSize());
+      return new DoublesSketchBuildBufferAggregator(selector, k, getMaxIntermediateSizeWithNulls());
     }
     final ColumnValueSelector<DoublesSketch> selector = metricFactory.makeColumnValueSelector(fieldName);
     if (selector instanceof NilColumnValueSelector) {
       return new DoublesSketchNoOpBufferAggregator();
     }
-    return new DoublesSketchMergeBufferAggregator(selector, k, getMaxIntermediateSize());
+    return new DoublesSketchMergeBufferAggregator(selector, k, getMaxIntermediateSizeWithNulls());
   }
 
   @Override
diff --git a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/quantiles/DoublesSketchMergeAggregatorFactory.java b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/quantiles/DoublesSketchMergeAggregatorFactory.java
index b9cccb5d9c0..2ecc9907d58 100644
--- a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/quantiles/DoublesSketchMergeAggregatorFactory.java
+++ b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/quantiles/DoublesSketchMergeAggregatorFactory.java
@@ -57,7 +57,7 @@ public BufferAggregator factorizeBuffered(final ColumnSelectorFactory metricFact
     if (selector instanceof NilColumnValueSelector) {
       return new DoublesSketchNoOpBufferAggregator();
     }
-    return new DoublesSketchMergeBufferAggregator(selector, getK(), getMaxIntermediateSize());
+    return new DoublesSketchMergeBufferAggregator(selector, getK(), getMaxIntermediateSizeWithNulls());
   }
 
 }
diff --git a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchAggregatorFactory.java b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchAggregatorFactory.java
index 9fa7e3cb86a..2e6bc52e99a 100644
--- a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchAggregatorFactory.java
+++ b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchAggregatorFactory.java
@@ -74,7 +74,7 @@ public Aggregator factorize(ColumnSelectorFactory metricFactory)
   public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
   {
     BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(fieldName);
-    return new SketchBufferAggregator(selector, size, getMaxIntermediateSize());
+    return new SketchBufferAggregator(selector, size, getMaxIntermediateSizeWithNulls());
   }
 
   @Override
diff --git a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/tuple/ArrayOfDoublesSketchAggregatorFactory.java b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/tuple/ArrayOfDoublesSketchAggregatorFactory.java
index 2610084d7b8..58f5b2eb9b4 100644
--- a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/tuple/ArrayOfDoublesSketchAggregatorFactory.java
+++ b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/tuple/ArrayOfDoublesSketchAggregatorFactory.java
@@ -125,7 +125,7 @@ public BufferAggregator factorizeBuffered(final ColumnSelectorFactory metricFact
           selector,
           nominalEntries,
           numberOfValues,
-          getMaxIntermediateSize()
+          getMaxIntermediateSizeWithNulls()
       );
     }
     // input is raw data (key and array of values), use build aggregator
@@ -143,7 +143,7 @@ public BufferAggregator factorizeBuffered(final ColumnSelectorFactory metricFact
         keySelector,
         valueSelectors,
         nominalEntries,
-        getMaxIntermediateSize()
+        getMaxIntermediateSizeWithNulls()
     );
   }
 
diff --git a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregator.java b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregator.java
index 7f4e724f7f8..ccd18cc9713 100644
--- a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregator.java
+++ b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregator.java
@@ -20,6 +20,7 @@
 package io.druid.query.aggregation.histogram;
 
 import com.google.common.primitives.Longs;
+import io.druid.common.config.NullHandling;
 import io.druid.query.aggregation.Aggregator;
 import io.druid.segment.BaseFloatColumnValueSelector;
 
@@ -59,7 +60,12 @@ public ApproximateHistogramAggregator(
   @Override
   public void aggregate()
   {
-    histogram.offer(selector.getFloat());
+    // In case of ExpressionColumnValueSelector isNull will compute the expression and then give the result,
+    // the check for is NullHandling.replaceWithDefault is there to not have any performance impact of calling
+    // isNull for default case.
+    if (NullHandling.replaceWithDefault() || !selector.isNull()) {
+      histogram.offer(selector.getFloat());
+    }
   }
 
   @Override
diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java
index 6f2b58ba7a0..261715dcc07 100644
--- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java
+++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java
@@ -20,6 +20,7 @@
 package io.druid.query.aggregation.histogram;
 
 import com.google.common.collect.Lists;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.MapBasedRow;
 import io.druid.java.util.common.granularity.Granularities;
 import io.druid.java.util.common.guava.Sequence;
@@ -79,10 +80,14 @@ public void testIngestWithNullsIgnoredAndQuery() throws Exception
   @Test
   public void testIngestWithNullsToZeroAndQuery() throws Exception
   {
-    MapBasedRow row = ingestAndQuery(false);
-    Assert.assertEquals(0.0, row.getMetric("index_min").floatValue(), 0.0001);
-    Assert.assertEquals(135.109191, row.getMetric("index_max").floatValue(), 0.0001);
-    Assert.assertEquals(131.428176, row.getMetric("index_quantile").floatValue(), 0.0001);
+    // Nulls are ignored and not replaced with default for SQL compatible null handling.
+    // This is already tested in testIngestWithNullsIgnoredAndQuery()
+    if (NullHandling.replaceWithDefault()) {
+      MapBasedRow row = ingestAndQuery(false);
+      Assert.assertEquals(0.0F, row.getMetric("index_min"));
+      Assert.assertEquals(135.109191, row.getMetric("index_max").floatValue(), 0.0001);
+      Assert.assertEquals(131.428176, row.getMetric("index_quantile").floatValue(), 0.0001);
+    }
   }
 
   private MapBasedRow ingestAndQuery(boolean ignoreNulls) throws Exception
diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregatorTest.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregatorTest.java
index f99d3fdfdb9..c2569bf7bc2 100644
--- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregatorTest.java
+++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregatorTest.java
@@ -48,7 +48,7 @@ public void testBufferAggregate()
     );
     ApproximateHistogramBufferAggregator agg = new ApproximateHistogramBufferAggregator(selector, resolution);
 
-    ByteBuffer buf = ByteBuffer.allocate(factory.getMaxIntermediateSize());
+    ByteBuffer buf = ByteBuffer.allocate(factory.getMaxIntermediateSizeWithNulls());
     int position = 0;
 
     agg.init(buf, position);
diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java
index a61bd49709d..e4d6faa97a8 100644
--- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java
+++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java
@@ -23,6 +23,7 @@
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Iterables;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.granularity.Granularities;
 import io.druid.query.Druids;
 import io.druid.query.QueryDataSource;
@@ -312,9 +313,12 @@ public void testQuantileOnInnerQuery() throws Exception
 
       // Verify results
       final List<Object[]> results = plannerResult.run().toList();
-      final List<Object[]> expectedResults = ImmutableList.of(
-          new Object[]{7.0, 8.26386833190918}
-      );
+      final List<Object[]> expectedResults;
+      if (NullHandling.replaceWithDefault()) {
+        expectedResults = ImmutableList.of(new Object[]{7.0, 8.26386833190918});
+      } else {
+        expectedResults = ImmutableList.of(new Object[]{5.25, 6.59091854095459});
+      }
       Assert.assertEquals(expectedResults.size(), results.size());
       for (int i = 0; i < expectedResults.size(); i++) {
         Assert.assertArrayEquals(expectedResults.get(i), results.get(i));
diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java
index f0abd4d37d7..c3553b2b952 100644
--- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java
+++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java
@@ -115,6 +115,7 @@
 import io.druid.query.timeseries.TimeseriesQueryQueryToolChest;
 import io.druid.query.timeseries.TimeseriesQueryRunnerFactory;
 import io.druid.query.timeseries.TimeseriesResultValue;
+import io.druid.segment.DimensionHandlerUtils;
 import io.druid.segment.IndexIO;
 import io.druid.segment.QueryableIndex;
 import io.druid.segment.TestHelper;
@@ -2244,7 +2245,7 @@ public long countEvents(final Task task)
     List<Result<TimeseriesResultValue>> results =
         task.getQueryRunner(query).run(wrap(query), ImmutableMap.of()).toList();
 
-    return results.isEmpty() ? 0 : results.get(0).getValue().getLongMetric("rows");
+    return results.isEmpty() ? 0L : DimensionHandlerUtils.nullToZero(results.get(0).getValue().getLongMetric("rows"));
   }
 
   private static byte[] JB(String timestamp, String dim1, String dim2, String dimLong, String dimFloat, String met1)
diff --git a/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/UriExtractionNamespace.java b/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/UriExtractionNamespace.java
index 4e4358457c2..6dfaef68ce8 100644
--- a/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/UriExtractionNamespace.java
+++ b/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/UriExtractionNamespace.java
@@ -35,6 +35,7 @@
 import com.google.common.collect.ImmutableMap;
 import io.druid.guice.annotations.Json;
 import io.druid.java.util.common.IAE;
+import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.common.UOE;
 import io.druid.java.util.common.jackson.JacksonUtils;
 import io.druid.java.util.common.parsers.CSVParser;
@@ -396,8 +397,8 @@ public TSVFlatDataParser(
           "Must specify more than one column to have a key value pair"
       );
       final DelimitedParser delegate = new DelimitedParser(
-          Strings.emptyToNull(delimiter),
-          Strings.emptyToNull(listDelimiter),
+          StringUtils.emptyToNullNonDruidDataString(delimiter),
+          StringUtils.emptyToNullNonDruidDataString(listDelimiter),
           hasHeaderRow,
           skipHeaderRows
       );
diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java
index ed3b695884c..8104e942eb3 100644
--- a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java
+++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java
@@ -19,15 +19,15 @@
 
 package io.druid.server.lookup.namespace.cache;
 
-import com.google.common.base.Strings;
 import com.google.common.base.Throwables;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.util.concurrent.ListenableFuture;
 import com.google.common.util.concurrent.ListeningExecutorService;
 import com.google.common.util.concurrent.MoreExecutors;
-import io.druid.java.util.common.StringUtils;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.concurrent.Execs;
+import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.common.io.Closer;
 import io.druid.java.util.common.lifecycle.Lifecycle;
 import io.druid.java.util.common.logger.Logger;
@@ -382,7 +382,7 @@ public void testMappingWithoutFilter()
         String key = e.getKey();
         String[] val = e.getValue();
         String field = val[0];
-        Assert.assertEquals("non-null check", Strings.emptyToNull(field), Strings.emptyToNull(map.get(key)));
+        Assert.assertEquals("non-null check", NullHandling.emptyToNullIfNeeded(field), NullHandling.emptyToNullIfNeeded(map.get(key)));
       }
       Assert.assertEquals("null check", null, map.get("baz"));
     }
@@ -412,9 +412,9 @@ public void testMappingWithFilter()
         String filterVal = val[1];
 
         if ("1".equals(filterVal)) {
-          Assert.assertEquals("non-null check", Strings.emptyToNull(field), Strings.emptyToNull(map.get(key)));
+          Assert.assertEquals("non-null check", NullHandling.emptyToNullIfNeeded(field), NullHandling.emptyToNullIfNeeded(map.get(key)));
         } else {
-          Assert.assertEquals("non-null check", null, Strings.emptyToNull(map.get(key)));
+          Assert.assertEquals("non-null check", null, NullHandling.emptyToNullIfNeeded(map.get(key)));
         }
       }
     }
diff --git a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookup.java b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookup.java
index 5ef1a445dfc..08f229aea7a 100644
--- a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookup.java
+++ b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookup.java
@@ -21,11 +21,12 @@
 
 
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.logger.Logger;
 import io.druid.query.lookup.LookupExtractor;
 import io.druid.server.lookup.cache.loading.LoadingCache;
 
+import javax.annotation.Nullable;
 import java.util.Collections;
 import java.util.List;
 import java.util.concurrent.Callable;
@@ -62,15 +63,19 @@ public LoadingLookup(
 
 
   @Override
-  public String apply(final String key)
+  public String apply(@Nullable final String key)
   {
-    if (key == null) {
+    String keyEquivalent = NullHandling.nullToEmptyIfNeeded(key);
+    if (keyEquivalent == null) {
+      // valueEquivalent is null only for SQL Compatible Null Behavior
+      // otherwise null will be replaced with empty string in nullToEmptyIfNeeded above.
       return null;
     }
+
     final String presentVal;
     try {
-      presentVal = loadingCache.get(key, new ApplyCallable(key));
-      return Strings.emptyToNull(presentVal);
+      presentVal = loadingCache.get(keyEquivalent, new ApplyCallable(keyEquivalent));
+      return NullHandling.emptyToNullIfNeeded(presentVal);
     }
     catch (ExecutionException e) {
       LOGGER.debug("value not found for key [%s]", key);
@@ -79,15 +84,18 @@ public String apply(final String key)
   }
 
   @Override
-  public List<String> unapply(final String value)
+  public List<String> unapply(@Nullable final String value)
   {
-    // null value maps to empty list
-    if (value == null) {
+    String valueEquivalent = NullHandling.nullToEmptyIfNeeded(value);
+    if (valueEquivalent == null) {
+      // valueEquivalent is null only for SQL Compatible Null Behavior
+      // otherwise null will be replaced with empty string in nullToEmptyIfNeeded above.
+      // null value maps to empty list when SQL Compatible
       return Collections.EMPTY_LIST;
     }
     final List<String> retList;
     try {
-      retList = reverseLoadingCache.get(value, new UnapplyCallable(value));
+      retList = reverseLoadingCache.get(valueEquivalent, new UnapplyCallable(valueEquivalent));
       return retList;
     }
     catch (ExecutionException e) {
@@ -131,8 +139,9 @@ public ApplyCallable(String key)
     @Override
     public String call()
     {
+      // When SQL compatible null handling is disabled,
       // avoid returning null and return an empty string to cache it.
-      return Strings.nullToEmpty(dataFetcher.fetch(key));
+      return NullHandling.nullToEmptyIfNeeded(dataFetcher.fetch(key));
     }
   }
 
diff --git a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/PollingLookup.java b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/PollingLookup.java
index 1250a24a2c7..4dd1a3d38db 100644
--- a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/PollingLookup.java
+++ b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/PollingLookup.java
@@ -20,11 +20,11 @@
 package io.druid.server.lookup;
 
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.common.util.concurrent.ListenableFuture;
 import com.google.common.util.concurrent.ListeningScheduledExecutorService;
 import com.google.common.util.concurrent.MoreExecutors;
 
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.concurrent.Execs;
 import io.druid.java.util.common.ISE;
 import io.druid.java.util.common.logger.Logger;
@@ -33,7 +33,8 @@
 import io.druid.server.lookup.cache.polling.PollingCache;
 import io.druid.server.lookup.cache.polling.PollingCacheFactory;
 
-import javax.validation.constraints.NotNull;
+import javax.annotation.Nullable;
+import java.util.Collections;
 import java.util.List;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
@@ -107,8 +108,15 @@ public void close()
   }
 
   @Override
-  public String apply(@NotNull String key)
+  @Nullable
+  public String apply(@Nullable String key)
   {
+    String keyEquivalent = NullHandling.nullToEmptyIfNeeded(key);
+    if (keyEquivalent == null) {
+      // valueEquivalent is null only for SQL Compatible Null Behavior
+      // otherwise null will be replaced with empty string in nullToEmptyIfNeeded above.
+      return null;
+    }
     final CacheRefKeeper cacheRefKeeper = refOfCacheKeeper.get();
     if (cacheRefKeeper == null) {
       throw new ISE("Cache reference is null WTF");
@@ -117,9 +125,9 @@ public String apply(@NotNull String key)
     try {
       if (cache == null) {
         // it must've been closed after swapping while I was getting it.  Try again.
-        return this.apply(key);
+        return this.apply(keyEquivalent);
       }
-      return Strings.emptyToNull((String) cache.get(key));
+      return NullHandling.emptyToNullIfNeeded((String) cache.get(keyEquivalent));
     }
     finally {
       if (cacheRefKeeper != null && cache != null) {
@@ -129,8 +137,16 @@ public String apply(@NotNull String key)
   }
 
   @Override
-  public List<String> unapply(final String value)
+  public List<String> unapply(@Nullable final String value)
   {
+    String valueEquivalent = NullHandling.nullToEmptyIfNeeded(value);
+    if (valueEquivalent == null) {
+      // valueEquivalent is null only for SQL Compatible Null Behavior
+      // otherwise null will be replaced with empty string in nullToEmptyIfNeeded above.
+      // null value maps to empty list when SQL Compatible
+      return Collections.emptyList();
+    }
+
     CacheRefKeeper cacheRefKeeper = refOfCacheKeeper.get();
     if (cacheRefKeeper == null) {
       throw new ISE("pollingLookup id [%s] is closed", id);
@@ -139,9 +155,9 @@ public String apply(@NotNull String key)
     try {
       if (cache == null) {
         // it must've been closed after swapping while I was getting it.  Try again.
-        return this.unapply(value);
+        return this.unapply(valueEquivalent);
       }
-      return cache.getKeys(value);
+      return cache.getKeys(valueEquivalent);
     }
     finally {
       if (cacheRefKeeper != null && cache != null) {
diff --git a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/jdbc/JdbcDataFetcher.java b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/jdbc/JdbcDataFetcher.java
index 53fd81795b9..5ec4aafa59c 100644
--- a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/jdbc/JdbcDataFetcher.java
+++ b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/jdbc/JdbcDataFetcher.java
@@ -21,8 +21,9 @@
 
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.common.collect.Lists;
+
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.common.logger.Logger;
 import io.druid.metadata.MetadataStorageConnectorConfig;
@@ -131,7 +132,7 @@ public String fetch(final String key)
     if (pairs.isEmpty()) {
       return null;
     }
-    return Strings.nullToEmpty(pairs.get(0));
+    return NullHandling.nullToEmptyIfNeeded(pairs.get(0));
   }
 
   @Override
diff --git a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/LoadingLookupTest.java b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/LoadingLookupTest.java
index c8e8078ccf8..f98b03f13ef 100644
--- a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/LoadingLookupTest.java
+++ b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/LoadingLookupTest.java
@@ -21,6 +21,7 @@
 
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableSet;
+import io.druid.common.config.NullHandling;
 import io.druid.server.lookup.cache.loading.LoadingCache;
 import org.easymock.EasyMock;
 import org.junit.Assert;
@@ -39,16 +40,29 @@
   LoadingLookup loadingLookup = new LoadingLookup(dataFetcher, lookupCache, reverseLookupCache);
 
   @Test
-  public void testApplyEmptyOrNull()
+  public void testApplyEmptyOrNull() throws ExecutionException
   {
-    Assert.assertEquals(null, loadingLookup.apply(null));
-    Assert.assertEquals(null, loadingLookup.apply(""));
+    EasyMock.expect(lookupCache.get(EasyMock.eq(""), EasyMock.anyObject(Callable.class)))
+            .andReturn("empty").atLeastOnce();
+    EasyMock.replay(lookupCache);
+    Assert.assertEquals("empty", loadingLookup.apply(""));
+    if (!NullHandling.sqlCompatible()) {
+      // Nulls and empty strings should have same behavior
+      Assert.assertEquals("empty", loadingLookup.apply(null));
+    } else {
+      Assert.assertNull(loadingLookup.apply(null));
+    }
+    EasyMock.verify(lookupCache);
   }
 
   @Test
   public void testUnapplyNull()
   {
-    Assert.assertEquals(Collections.EMPTY_LIST, loadingLookup.unapply(null));
+    if (NullHandling.sqlCompatible()) {
+      Assert.assertEquals(Collections.emptyList(), loadingLookup.unapply(null));
+    } else {
+      Assert.assertNull(loadingLookup.unapply(null));
+    }
   }
 
   @Test
@@ -88,7 +102,7 @@ public void testApplyWithExecutionError() throws ExecutionException
             .andThrow(new ExecutionException(null))
             .once();
     EasyMock.replay(lookupCache);
-    Assert.assertEquals(null, loadingLookup.apply("key"));
+    Assert.assertNull(loadingLookup.apply("key"));
     EasyMock.verify(lookupCache);
   }
 
@@ -99,7 +113,7 @@ public void testUnApplyWithExecutionError() throws ExecutionException
             .andThrow(new ExecutionException(null))
             .once();
     EasyMock.replay(reverseLookupCache);
-    Assert.assertEquals(Collections.EMPTY_LIST, loadingLookup.unapply("value"));
+    Assert.assertEquals(Collections.emptyList(), loadingLookup.unapply("value"));
     EasyMock.verify(reverseLookupCache);
   }
 
diff --git a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupTest.java b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupTest.java
index f299603963a..bbe696450e2 100644
--- a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupTest.java
+++ b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupTest.java
@@ -21,10 +21,11 @@
 
 import com.fasterxml.jackson.annotation.JsonTypeName;
 import com.google.common.base.Function;
-import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
+
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.ISE;
 import io.druid.query.lookup.LookupExtractor;
 import io.druid.server.lookup.cache.polling.OffHeapPollingCache;
@@ -190,7 +191,7 @@ public void testBulkApply()
       public String apply(String input)
       {
         //make sure to rewrite null strings as empty.
-        return Strings.nullToEmpty(input);
+        return NullHandling.nullToEmptyIfNeeded(input);
       }
     }));
   }
@@ -207,7 +208,7 @@ private void assertMapLookup(Map<String, String> map, LookupExtractor lookup)
     for (Map.Entry<String, String> entry : map.entrySet()) {
       String key = entry.getKey();
       String val = entry.getValue();
-      Assert.assertEquals("non-null check", Strings.emptyToNull(val), lookup.apply(key));
+      Assert.assertEquals("non-null check", NullHandling.emptyToNullIfNeeded(val), lookup.apply(key));
     }
   }
 }
diff --git a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceAggregatorTest.java b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceAggregatorTest.java
index 8a01fdd12b1..f496b16e80f 100644
--- a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceAggregatorTest.java
+++ b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceAggregatorTest.java
@@ -111,7 +111,7 @@ public void testDoubleVarianceBufferAggregator()
         colSelectorFactory
     );
 
-    ByteBuffer buffer = ByteBuffer.wrap(new byte[aggFactory.getMaxIntermediateSize()]);
+    ByteBuffer buffer = ByteBuffer.wrap(new byte[aggFactory.getMaxIntermediateSizeWithNulls()]);
     agg.init(buffer, 0);
 
     assertValues((VarianceAggregatorCollector) agg.get(buffer, 0), 0, 0d, 0d);
diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/InputRowSerde.java b/indexing-hadoop/src/main/java/io/druid/indexer/InputRowSerde.java
index 9d28296a7af..f25c2a60d52 100644
--- a/indexing-hadoop/src/main/java/io/druid/indexer/InputRowSerde.java
+++ b/indexing-hadoop/src/main/java/io/druid/indexer/InputRowSerde.java
@@ -26,6 +26,7 @@
 import com.google.common.io.ByteArrayDataOutput;
 import com.google.common.io.ByteStreams;
 
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.MapBasedInputRow;
 import io.druid.data.input.Rows;
@@ -45,6 +46,7 @@
 import io.druid.segment.serde.ComplexMetrics;
 import org.apache.hadoop.io.WritableUtils;
 
+import javax.annotation.Nullable;
 import java.io.DataInput;
 import java.io.IOException;
 import java.util.ArrayList;
@@ -330,18 +332,22 @@ public static final SerializeResult toBytes(
           }
 
           String t = aggFactory.getTypeName();
-
-          if ("float".equals(t)) {
-            out.writeFloat(agg.getFloat());
-          } else if ("long".equals(t)) {
-            WritableUtils.writeVLong(out, agg.getLong());
-          } else if ("double".equals(t)) {
-            out.writeDouble(agg.getDouble());
+          if (agg.isNull()) {
+            out.writeByte(NullHandling.IS_NULL_BYTE);
           } else {
-            //its a complex metric
-            Object val = agg.get();
-            ComplexMetricSerde serde = getComplexMetricSerde(t);
-            writeBytes(serde.toBytes(val), out);
+            out.writeByte(NullHandling.IS_NOT_NULL_BYTE);
+            if ("float".equals(t)) {
+              out.writeFloat(agg.getFloat());
+            } else if ("long".equals(t)) {
+              WritableUtils.writeVLong(out, agg.getLong());
+            } else if ("double".equals(t)) {
+              out.writeDouble(agg.getDouble());
+            } else {
+              //its a complex metric
+              Object val = agg.get();
+              ComplexMetricSerde serde = getComplexMetricSerde(t);
+              writeBytes(serde.toBytes(val), out);
+            }
           }
         }
       }
@@ -353,10 +359,13 @@ public static final SerializeResult toBytes(
     }
   }
 
-  private static void writeBytes(byte[] value, ByteArrayDataOutput out) throws IOException
+  private static void writeBytes(@Nullable byte[] value, ByteArrayDataOutput out) throws IOException
   {
-    WritableUtils.writeVInt(out, value.length);
-    out.write(value, 0, value.length);
+    int length = value == null ? -1 : value.length;
+    WritableUtils.writeVInt(out, length);
+    if (value != null) {
+      out.write(value, 0, value.length);
+    }
   }
 
   private static void writeString(String value, ByteArrayDataOutput out) throws IOException
@@ -450,6 +459,11 @@ public static final InputRow fromBytes(
       for (int i = 0; i < metricSize; i++) {
         String metric = readString(in);
         String type = getType(metric, aggs, i);
+        byte metricNullability = in.readByte();
+        if (metricNullability == NullHandling.IS_NULL_BYTE) {
+          // metric value is null.
+          continue;
+        }
         if ("float".equals(type)) {
           event.put(metric, in.readFloat());
         } else if ("long".equals(type)) {
diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java b/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java
index 92448fa6e70..756385a1d26 100644
--- a/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java
+++ b/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java
@@ -295,8 +295,8 @@ public static void injectSystemProperties(Job job)
 
   public static void injectDruidProperties(Configuration configuration, List<String> listOfAllowedPrefix)
   {
-    String mapJavaOpts = Strings.nullToEmpty(configuration.get(MRJobConfig.MAP_JAVA_OPTS));
-    String reduceJavaOpts = Strings.nullToEmpty(configuration.get(MRJobConfig.REDUCE_JAVA_OPTS));
+    String mapJavaOpts = StringUtils.nullToEmptyNonDruidDataString(configuration.get(MRJobConfig.MAP_JAVA_OPTS));
+    String reduceJavaOpts = StringUtils.nullToEmptyNonDruidDataString(configuration.get(MRJobConfig.REDUCE_JAVA_OPTS));
 
     for (String propName : System.getProperties().stringPropertyNames()) {
       for (String prefix : listOfAllowedPrefix) {
diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/path/StaticPathSpec.java b/indexing-hadoop/src/main/java/io/druid/indexer/path/StaticPathSpec.java
index f00d8872b28..17fe2e478af 100644
--- a/indexing-hadoop/src/main/java/io/druid/indexer/path/StaticPathSpec.java
+++ b/indexing-hadoop/src/main/java/io/druid/indexer/path/StaticPathSpec.java
@@ -21,11 +21,11 @@
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Sets;
 import io.druid.indexer.HadoopDruidIndexerConfig;
+import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.common.logger.Logger;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
@@ -126,7 +126,9 @@ public static void addToMultipleInputs(
   private static void addInputPath(Job job, Iterable<String> pathStrings, Class<? extends InputFormat> inputFormatClass)
   {
     Configuration conf = job.getConfiguration();
-    StringBuilder inputFormats = new StringBuilder(Strings.nullToEmpty(conf.get(MultipleInputs.DIR_FORMATS)));
+    StringBuilder inputFormats = new StringBuilder(
+        StringUtils.nullToEmptyNonDruidDataString(conf.get(MultipleInputs.DIR_FORMATS))
+    );
 
     String[] paths = Iterables.toArray(pathStrings, String.class);
     for (int i = 0; i < paths.length - 1; i++) {
diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/InputRowSerdeTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/InputRowSerdeTest.java
index 3b508416b56..0f3753cd7ff 100644
--- a/indexing-hadoop/src/test/java/io/druid/indexer/InputRowSerdeTest.java
+++ b/indexing-hadoop/src/test/java/io/druid/indexer/InputRowSerdeTest.java
@@ -21,6 +21,7 @@
 
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Maps;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.MapBasedInputRow;
 import io.druid.data.input.impl.DimensionsSpec;
@@ -83,6 +84,7 @@ public void testSerde()
   {
     // Prepare the mocks & set close() call count expectation to 1
     final Aggregator mockedAggregator = EasyMock.createMock(DoubleSumAggregator.class);
+    EasyMock.expect(mockedAggregator.isNull()).andReturn(false).times(1);
     EasyMock.expect(mockedAggregator.getDouble()).andReturn(0d).times(1);
     mockedAggregator.aggregate();
     EasyMock.expectLastCall().times(1);
@@ -90,6 +92,26 @@ public void testSerde()
     EasyMock.expectLastCall().times(1);
     EasyMock.replay(mockedAggregator);
 
+    final Aggregator mockedNullAggregator = EasyMock.createMock(DoubleSumAggregator.class);
+    EasyMock.expect(mockedNullAggregator.isNull()).andReturn(true).times(1);
+    mockedNullAggregator.aggregate();
+    EasyMock.expectLastCall().times(1);
+    mockedNullAggregator.close();
+    EasyMock.expectLastCall().times(1);
+    EasyMock.replay(mockedNullAggregator);
+
+    final AggregatorFactory mockedAggregatorFactory = EasyMock.createMock(AggregatorFactory.class);
+    EasyMock.expect(mockedAggregatorFactory.factorize(EasyMock.anyObject(ColumnSelectorFactory.class))).andReturn(mockedAggregator);
+    EasyMock.expect(mockedAggregatorFactory.getTypeName()).andReturn("double").anyTimes();
+    EasyMock.expect(mockedAggregatorFactory.getName()).andReturn("mockedAggregator").anyTimes();
+
+    final AggregatorFactory mockedNullAggregatorFactory = EasyMock.createMock(AggregatorFactory.class);
+    EasyMock.expect(mockedNullAggregatorFactory.factorize(EasyMock.anyObject(ColumnSelectorFactory.class))).andReturn(mockedNullAggregator);
+    EasyMock.expect(mockedNullAggregatorFactory.getName()).andReturn("mockedNullAggregator").anyTimes();
+    EasyMock.expect(mockedNullAggregatorFactory.getTypeName()).andReturn("double").anyTimes();
+
+    EasyMock.replay(mockedAggregatorFactory, mockedNullAggregatorFactory);
+
     InputRow in = new MapBasedInputRow(
         timestamp,
         dims,
@@ -102,13 +124,8 @@ public void testSerde()
         new LongSumAggregatorFactory("m2out", "m2"),
         new HyperUniquesAggregatorFactory("m3out", "m3"),
         new LongSumAggregatorFactory("unparseable", "m3"), // Unparseable from String to Long
-        new DoubleSumAggregatorFactory("mockedAggregator", "m4") {
-          @Override
-          public Aggregator factorize(ColumnSelectorFactory metricFactory)
-          {
-            return mockedAggregator;
-          }
-        }
+        mockedAggregatorFactory,
+        mockedNullAggregatorFactory
     };
 
     DimensionsSpec dimensionsSpec = new DimensionsSpec(
@@ -136,13 +153,14 @@ public Aggregator factorize(ColumnSelectorFactory metricFactory)
     Assert.assertEquals(300.1f, out.getRaw("d4"));
     Assert.assertEquals(400.5d, out.getRaw("d5"));
 
-    Assert.assertEquals(0.0f, out.getMetric("agg_non_existing").floatValue(), 0.00001);
+    Assert.assertEquals(NullHandling.defaultDoubleValue(), out.getMetric("agg_non_existing"));
     Assert.assertEquals(5.0f, out.getMetric("m1out").floatValue(), 0.00001);
     Assert.assertEquals(100L, out.getMetric("m2out"));
     Assert.assertEquals(1, ((HyperLogLogCollector) out.getRaw("m3out")).estimateCardinality(), 0.001);
     Assert.assertEquals(0L, out.getMetric("unparseable"));
 
     EasyMock.verify(mockedAggregator);
+    EasyMock.verify(mockedNullAggregator);
   }
 
   @Test
diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java
index b0b8ca30983..6d3b31676aa 100644
--- a/indexing-service/src/test/java/io/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java
+++ b/indexing-service/src/test/java/io/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java
@@ -31,6 +31,7 @@
 import com.google.common.util.concurrent.MoreExecutors;
 import io.druid.client.cache.CacheConfig;
 import io.druid.client.cache.MapCache;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.Firehose;
 import io.druid.data.input.FirehoseFactory;
 import io.druid.data.input.InputRow;
@@ -130,6 +131,7 @@
 import io.druid.timeline.DataSegment;
 import io.druid.timeline.partition.LinearShardSpec;
 import io.druid.timeline.partition.NumberedShardSpec;
+import io.druid.utils.Runnables;
 import org.apache.commons.io.FileUtils;
 import org.easymock.EasyMock;
 import org.joda.time.DateTime;
@@ -142,6 +144,7 @@
 import org.junit.rules.ExpectedException;
 import org.junit.rules.TemporaryFolder;
 
+import javax.annotation.Nullable;
 import java.io.File;
 import java.io.IOException;
 import java.nio.file.Files;
@@ -223,7 +226,7 @@ public InputRow nextRow()
     @Override
     public Runnable commit()
     {
-      return () -> {};
+      return Runnables.getNoopRunnable();
     }
 
     @Override
@@ -331,7 +334,8 @@ public void testHandoffTimeout() throws Exception
 
     // handoff would timeout, resulting in exception
     TaskStatus status = statusFuture.get();
-    Assert.assertTrue(status.getErrorMsg().contains("java.util.concurrent.TimeoutException: Timeout waiting for task."));
+    Assert.assertTrue(status.getErrorMsg()
+                            .contains("java.util.concurrent.TimeoutException: Timeout waiting for task."));
   }
 
   @Test(timeout = 60_000L)
@@ -367,8 +371,8 @@ public void testBasics() throws Exception
     Assert.assertEquals(0, task.getRowIngestionMeters().getUnparseable());
 
     // Do some queries.
-    Assert.assertEquals(2, sumMetric(task, null, "rows"));
-    Assert.assertEquals(3, sumMetric(task, null, "met1"));
+    Assert.assertEquals(2, sumMetric(task, null, "rows").longValue());
+    Assert.assertEquals(3, sumMetric(task, null, "met1").longValue());
 
     awaitHandoffs();
 
@@ -429,8 +433,8 @@ public void testLateData() throws Exception
     Assert.assertEquals(0, task.getRowIngestionMeters().getUnparseable());
 
     // Do some queries.
-    Assert.assertEquals(2, sumMetric(task, null, "rows"));
-    Assert.assertEquals(3, sumMetric(task, null, "met1"));
+    Assert.assertEquals(2, sumMetric(task, null, "rows").longValue());
+    Assert.assertEquals(3, sumMetric(task, null, "met1").longValue());
 
     awaitHandoffs();
 
@@ -494,8 +498,8 @@ public void testMaxRowsPerSegment() throws Exception
     Assert.assertEquals(0, task.getRowIngestionMeters().getUnparseable());
 
     // Do some queries.
-    Assert.assertEquals(2000, sumMetric(task, null, "rows"));
-    Assert.assertEquals(2000, sumMetric(task, null, "met1"));
+    Assert.assertEquals(2000, sumMetric(task, null, "rows").longValue());
+    Assert.assertEquals(2000, sumMetric(task, null, "met1").longValue());
 
     awaitHandoffs();
 
@@ -562,10 +566,14 @@ public void testTransformSpec() throws Exception
     Assert.assertEquals(0, task.getRowIngestionMeters().getUnparseable());
 
     // Do some queries.
-    Assert.assertEquals(2, sumMetric(task, null, "rows"));
-    Assert.assertEquals(2, sumMetric(task, new SelectorDimFilter("dim1t", "foofoo", null), "rows"));
-    Assert.assertEquals(0, sumMetric(task, new SelectorDimFilter("dim1t", "barbar", null), "rows"));
-    Assert.assertEquals(3, sumMetric(task, null, "met1"));
+    Assert.assertEquals(2, sumMetric(task, null, "rows").longValue());
+    Assert.assertEquals(2, sumMetric(task, new SelectorDimFilter("dim1t", "foofoo", null), "rows").longValue());
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(0, sumMetric(task, new SelectorDimFilter("dim1t", "barbar", null), "metric1").longValue());
+    } else {
+      Assert.assertNull(sumMetric(task, new SelectorDimFilter("dim1t", "barbar", null), "metric1"));
+    }
+    Assert.assertEquals(3, sumMetric(task, null, "met1").longValue());
 
     awaitHandoffs();
 
@@ -620,7 +628,8 @@ public void testReportParseExceptionsOnBadMetric() throws Exception
 
     // Wait for the task to finish.
     TaskStatus status = statusFuture.get();
-    Assert.assertTrue(status.getErrorMsg().contains("java.lang.RuntimeException: Max parse exceptions exceeded, terminating task..."));
+    Assert.assertTrue(status.getErrorMsg()
+                            .contains("java.lang.RuntimeException: Max parse exceptions exceeded, terminating task..."));
 
     IngestionStatsAndErrorsTaskReportData reportData = getTaskReportData();
 
@@ -639,7 +648,15 @@ public void testNoReportParseExceptions() throws Exception
   {
     expectPublishedSegments(1);
 
-    final AppenderatorDriverRealtimeIndexTask task = makeRealtimeTask(null, TransformSpec.NONE, false, 0, true, null, 1);
+    final AppenderatorDriverRealtimeIndexTask task = makeRealtimeTask(
+        null,
+        TransformSpec.NONE,
+        false,
+        0,
+        true,
+        null,
+        1
+    );
     final ListenableFuture<TaskStatus> statusFuture = runTask(task);
 
     // Wait for firehose to show up, it starts off null.
@@ -683,8 +700,8 @@ public void testNoReportParseExceptions() throws Exception
     Assert.assertEquals(2, task.getRowIngestionMeters().getUnparseable());
 
     // Do some queries.
-    Assert.assertEquals(3, sumMetric(task, null, "rows"));
-    Assert.assertEquals(3, sumMetric(task, null, "met1"));
+    Assert.assertEquals(3, sumMetric(task, null, "rows").longValue());
+    Assert.assertEquals(3, sumMetric(task, null, "met1").longValue());
 
     awaitHandoffs();
 
@@ -750,7 +767,18 @@ public void testMultipleParseExceptionsSuccess() throws Exception
             ImmutableMap.of("t", 1521251960729L, "dim1", "foo", "met1", "foo"),
 
             // Bad long dim- will count as processed, but bad dims will get default values
-            ImmutableMap.of("t", 1521251960729L, "dim1", "foo", "dimLong", "notnumber", "dimFloat", "notnumber", "met1", "foo"),
+            ImmutableMap.of(
+                "t",
+                1521251960729L,
+                "dim1",
+                "foo",
+                "dimLong",
+                "notnumber",
+                "dimFloat",
+                "notnumber",
+                "met1",
+                "foo"
+            ),
 
             // Bad row- will be unparseable.
             ImmutableMap.of("dim1", "foo", "met1", 2.0, FAIL_DIM, "x"),
@@ -775,8 +803,8 @@ public void testMultipleParseExceptionsSuccess() throws Exception
     Assert.assertEquals(2, task.getRowIngestionMeters().getUnparseable());
 
     // Do some queries.
-    Assert.assertEquals(4, sumMetric(task, null, "rows"));
-    Assert.assertEquals(3, sumMetric(task, null, "met1"));
+    Assert.assertEquals(4, sumMetric(task, null, "rows").longValue());
+    Assert.assertEquals(3, sumMetric(task, null, "met1").longValue());
 
     awaitHandoffs();
 
@@ -852,7 +880,18 @@ public void testMultipleParseExceptionsFailure() throws Exception
             ImmutableMap.of("t", 1521251960729L, "dim1", "foo", "met1", "foo"),
 
             // Bad long dim- will count as processed, but bad dims will get default values
-            ImmutableMap.of("t", 1521251960729L, "dim1", "foo", "dimLong", "notnumber", "dimFloat", "notnumber", "met1", "foo"),
+            ImmutableMap.of(
+                "t",
+                1521251960729L,
+                "dim1",
+                "foo",
+                "dimLong",
+                "notnumber",
+                "dimFloat",
+                "notnumber",
+                "met1",
+                "foo"
+            ),
 
             // Bad row- will be unparseable.
             ImmutableMap.of("dim1", "foo", "met1", 2.0, FAIL_DIM, "x"),
@@ -943,7 +982,7 @@ public void testRestore() throws Exception
       }
 
       // Do a query, at this point the previous data should be loaded.
-      Assert.assertEquals(1, sumMetric(task2, null, "rows"));
+      Assert.assertEquals(1, sumMetric(task2, null, "rows").longValue());
 
       final TestFirehose firehose = (TestFirehose) task2.getFirehose();
 
@@ -961,7 +1000,7 @@ public void testRestore() throws Exception
       publishedSegment = Iterables.getOnlyElement(publishedSegments);
 
       // Do a query.
-      Assert.assertEquals(2, sumMetric(task2, null, "rows"));
+      Assert.assertEquals(2, sumMetric(task2, null, "rows").longValue());
 
       awaitHandoffs();
 
@@ -1018,7 +1057,7 @@ public void testRestoreAfterHandoffAttemptDuringShutdown() throws Exception
       publishedSegment = Iterables.getOnlyElement(publishedSegments);
 
       // Do a query.
-      Assert.assertEquals(1, sumMetric(task1, null, "rows"));
+      Assert.assertEquals(1, sumMetric(task1, null, "rows").longValue());
 
       // Trigger graceful shutdown.
       task1.stopGracefully();
@@ -1137,7 +1176,8 @@ public void testRestoreCorruptData() throws Exception
 
       IngestionStatsAndErrorsTaskReportData reportData = getTaskReportData();
       Assert.assertEquals(expectedMetrics, reportData.getRowStats());
-      Assert.assertTrue(status.getErrorMsg().contains("java.lang.IllegalArgumentException\n\tat java.nio.Buffer.position"));
+      Assert.assertTrue(status.getErrorMsg()
+                              .contains("java.lang.IllegalArgumentException\n\tat java.nio.Buffer.position"));
     }
   }
 
@@ -1466,7 +1506,8 @@ public void close()
     );
   }
 
-  public long sumMetric(final Task task, final DimFilter filter, final String metric)
+  @Nullable
+  public Long sumMetric(final Task task, final DimFilter filter, final String metric)
   {
     // Do a query.
     TimeseriesQuery query = Druids.newTimeseriesQueryBuilder()
@@ -1482,7 +1523,12 @@ public long sumMetric(final Task task, final DimFilter filter, final String metr
 
     List<Result<TimeseriesResultValue>> results =
         task.getQueryRunner(query).run(QueryPlus.wrap(query), ImmutableMap.of()).toList();
-    return results.isEmpty() ? 0 : results.get(0).getValue().getLongMetric(metric);
+
+    if (results.isEmpty()) {
+      return 0L;
+    } else {
+      return results.get(0).getValue().getLongMetric(metric);
+    }
   }
 
   private IngestionStatsAndErrorsTaskReportData getTaskReportData() throws IOException
diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java
index 85836432e94..ac523e5039c 100644
--- a/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java
+++ b/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java
@@ -32,6 +32,7 @@
 import com.google.common.util.concurrent.MoreExecutors;
 import io.druid.client.cache.CacheConfig;
 import io.druid.client.cache.MapCache;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.Firehose;
 import io.druid.data.input.FirehoseFactory;
 import io.druid.data.input.InputRow;
@@ -118,6 +119,7 @@
 import io.druid.server.coordination.DataSegmentServerAnnouncer;
 import io.druid.server.coordination.ServerType;
 import io.druid.timeline.DataSegment;
+import io.druid.utils.Runnables;
 import org.easymock.EasyMock;
 import org.hamcrest.CoreMatchers;
 import org.joda.time.DateTime;
@@ -131,6 +133,7 @@
 import org.junit.rules.ExpectedException;
 import org.junit.rules.TemporaryFolder;
 
+import javax.annotation.Nullable;
 import java.io.File;
 import java.nio.file.Files;
 import java.util.Arrays;
@@ -204,7 +207,7 @@ public InputRow nextRow()
     @Override
     public Runnable commit()
     {
-      return () -> {};
+      return Runnables.getNoopRunnable();
     }
 
     @Override
@@ -350,8 +353,8 @@ public void testBasics() throws Exception
     Assert.assertEquals(0, task.getMetrics().unparseable());
 
     // Do some queries.
-    Assert.assertEquals(2, sumMetric(task, null, "rows"));
-    Assert.assertEquals(3, sumMetric(task, null, "met1"));
+    Assert.assertEquals(2, sumMetric(task, null, "rows").longValue());
+    Assert.assertEquals(3, sumMetric(task, null, "met1").longValue());
 
     // Simulate handoff.
     for (Map.Entry<SegmentDescriptor, Pair<Executor, Runnable>> entry : handOffCallbacks.entrySet()) {
@@ -419,10 +422,15 @@ public void testTransformSpec() throws Exception
     Assert.assertEquals(0, task.getMetrics().unparseable());
 
     // Do some queries.
-    Assert.assertEquals(1, sumMetric(task, null, "rows"));
-    Assert.assertEquals(1, sumMetric(task, new SelectorDimFilter("dim1t", "foofoo", null), "rows"));
-    Assert.assertEquals(0, sumMetric(task, new SelectorDimFilter("dim1t", "barbar", null), "rows"));
-    Assert.assertEquals(1, sumMetric(task, null, "met1"));
+    Assert.assertEquals(1, sumMetric(task, null, "rows").longValue());
+    Assert.assertEquals(1, sumMetric(task, new SelectorDimFilter("dim1t", "foofoo", null), "rows").longValue());
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(0, sumMetric(task, new SelectorDimFilter("dim1t", "barbar", null), "rows").longValue());
+    } else {
+      Assert.assertNull(sumMetric(task, new SelectorDimFilter("dim1t", "barbar", null), "rows"));
+
+    }
+    Assert.assertEquals(1, sumMetric(task, null, "met1").longValue());
 
     // Simulate handoff.
     for (Map.Entry<SegmentDescriptor, Pair<Executor, Runnable>> entry : handOffCallbacks.entrySet()) {
@@ -537,8 +545,8 @@ public void testNoReportParseExceptions() throws Exception
     Assert.assertEquals(2, task.getMetrics().unparseable());
 
     // Do some queries.
-    Assert.assertEquals(3, sumMetric(task, null, "rows"));
-    Assert.assertEquals(3, sumMetric(task, null, "met1"));
+    Assert.assertEquals(3, sumMetric(task, null, "rows").longValue());
+    Assert.assertEquals(3, sumMetric(task, null, "met1").longValue());
 
     // Simulate handoff.
     for (Map.Entry<SegmentDescriptor, Pair<Executor, Runnable>> entry : handOffCallbacks.entrySet()) {
@@ -610,7 +618,7 @@ public void testRestore() throws Exception
       }
 
       // Do a query, at this point the previous data should be loaded.
-      Assert.assertEquals(1, sumMetric(task2, null, "rows"));
+      Assert.assertEquals(1, sumMetric(task2, null, "rows").longValue());
 
       final TestFirehose firehose = (TestFirehose) task2.getFirehose();
 
@@ -631,7 +639,7 @@ public void testRestore() throws Exception
       publishedSegment = Iterables.getOnlyElement(mdc.getPublished());
 
       // Do a query.
-      Assert.assertEquals(2, sumMetric(task2, null, "rows"));
+      Assert.assertEquals(2, sumMetric(task2, null, "rows").longValue());
 
       // Simulate handoff.
       for (Map.Entry<SegmentDescriptor, Pair<Executor, Runnable>> entry : handOffCallbacks.entrySet()) {
@@ -692,7 +700,7 @@ public void testRestoreAfterHandoffAttemptDuringShutdown() throws Exception
       publishedSegment = Iterables.getOnlyElement(mdc.getPublished());
 
       // Do a query.
-      Assert.assertEquals(1, sumMetric(task1, null, "rows"));
+      Assert.assertEquals(1, sumMetric(task1, null, "rows").longValue());
 
       // Trigger graceful shutdown.
       task1.stopGracefully();
@@ -1080,7 +1088,8 @@ public void close()
     return toolboxFactory.build(task);
   }
 
-  public long sumMetric(final Task task, final DimFilter filter, final String metric)
+  @Nullable
+  public Long sumMetric(final Task task, final DimFilter filter, final String metric)
   {
     // Do a query.
     TimeseriesQuery query = Druids.newTimeseriesQueryBuilder()
@@ -1096,6 +1105,10 @@ public long sumMetric(final Task task, final DimFilter filter, final String metr
 
     List<Result<TimeseriesResultValue>> results =
         task.getQueryRunner(query).run(QueryPlus.wrap(query), ImmutableMap.of()).toList();
-    return results.isEmpty() ? 0 : results.get(0).getValue().getLongMetric(metric);
+    if (results.isEmpty()) {
+      return 0L;
+    } else {
+      return results.get(0).getValue().getLongMetric(metric);
+    }
   }
 }
diff --git a/java-util/pom.xml b/java-util/pom.xml
index d40b0adaeec..696e579a73d 100644
--- a/java-util/pom.xml
+++ b/java-util/pom.xml
@@ -53,6 +53,10 @@
             <groupId>org.skife.config</groupId>
             <artifactId>config-magic</artifactId>
         </dependency>
+        <dependency>
+            <groupId>com.google.inject</groupId>
+            <artifactId>guice</artifactId>
+        </dependency>
         <dependency>
             <groupId>com.google.guava</groupId>
             <artifactId>guava</artifactId>
diff --git a/common/src/main/java/io/druid/common/config/NullHandling.java b/java-util/src/main/java/io/druid/common/config/NullHandling.java
similarity index 95%
rename from common/src/main/java/io/druid/common/config/NullHandling.java
rename to java-util/src/main/java/io/druid/common/config/NullHandling.java
index 634c4e69266..018e300d36f 100644
--- a/common/src/main/java/io/druid/common/config/NullHandling.java
+++ b/java-util/src/main/java/io/druid/common/config/NullHandling.java
@@ -41,18 +41,22 @@
   public static final Double ZERO_DOUBLE = 0.0d;
   public static final Float ZERO_FLOAT = 0.0f;
   public static final Long ZERO_LONG = 0L;
+  public static final byte IS_NULL_BYTE = (byte) 1;
+  public static final byte IS_NOT_NULL_BYTE = (byte) 0;
 
   /**
    * INSTANCE is injected using static injection to avoid adding JacksonInject annotations all over the code.
    * See io.druid.guice.NullHandlingModule for details.
    * It does not take effect in all unit tests since we don't use Guice Injection.
-   * For tests default system property is supposed to be used only in tests
    */
   @Inject
   private static NullValueHandlingConfig INSTANCE = new NullValueHandlingConfig(
       Boolean.valueOf(System.getProperty(NULL_HANDLING_CONFIG_STRING, "true"))
   );
 
+  /**
+   * whether nulls should be replaced with default value.
+   */
   public static boolean replaceWithDefault()
   {
     return INSTANCE.isUseDefaultValuesForNull();
diff --git a/common/src/main/java/io/druid/common/config/NullValueHandlingConfig.java b/java-util/src/main/java/io/druid/common/config/NullValueHandlingConfig.java
similarity index 100%
rename from common/src/main/java/io/druid/common/config/NullValueHandlingConfig.java
rename to java-util/src/main/java/io/druid/common/config/NullValueHandlingConfig.java
diff --git a/java-util/src/main/java/io/druid/java/util/common/StringUtils.java b/java-util/src/main/java/io/druid/java/util/common/StringUtils.java
index fe48597e6e5..7277aa7c726 100644
--- a/java-util/src/main/java/io/druid/java/util/common/StringUtils.java
+++ b/java-util/src/main/java/io/druid/java/util/common/StringUtils.java
@@ -19,6 +19,7 @@
 
 package io.druid.java.util.common;
 
+import com.google.common.base.Strings;
 import com.google.common.base.Throwables;
 
 import javax.annotation.Nullable;
@@ -172,4 +173,36 @@ private static String removeChar(String s, char c, int firstOccurranceIndex)
     }
     return sb.toString();
   }
+
+  /**
+   * Returns the given string if it is non-null; the empty string otherwise.
+   * This method should only be used at places where null to empty conversion is
+   * irrelevant to null handling of the data.
+   *
+   * @param string the string to test and possibly return
+   * @return {@code string} itself if it is non-null; {@code ""} if it is null
+   */
+  public static String nullToEmptyNonDruidDataString(@Nullable String string)
+  {
+    //CHECKSTYLE.OFF: Regexp
+    return Strings.nullToEmpty(string);
+    //CHECKSTYLE.ON: Regexp
+  }
+
+  /**
+   * Returns the given string if it is nonempty; {@code null} otherwise.
+   * This method should only be used at places where null to empty conversion is
+   * irrelevant to null handling of the data.
+   *
+   * @param string the string to test and possibly return
+   * @return {@code string} itself if it is nonempty; {@code null} if it is
+   *     empty or null
+   */
+  @Nullable
+  public static String emptyToNullNonDruidDataString(@Nullable String string)
+  {
+    //CHECKSTYLE.OFF: Regexp
+    return Strings.emptyToNull(string);
+    //CHECKSTYLE.ON: Regexp
+  }
 }
diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/ParserUtils.java b/java-util/src/main/java/io/druid/java/util/common/parsers/ParserUtils.java
index 65aa41fa4e6..1284765045b 100644
--- a/java-util/src/main/java/io/druid/java/util/common/parsers/ParserUtils.java
+++ b/java-util/src/main/java/io/druid/java/util/common/parsers/ParserUtils.java
@@ -21,8 +21,8 @@
 
 import com.google.common.base.Function;
 import com.google.common.base.Splitter;
-import com.google.common.base.Strings;
 import com.google.common.collect.Sets;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 import org.joda.time.DateTimeZone;
 
@@ -60,10 +60,10 @@
     return (input) -> {
       if (input != null && input.contains(listDelimiter)) {
         return StreamSupport.stream(listSplitter.split(input).spliterator(), false)
-                            .map(Strings::emptyToNull)
+                            .map(NullHandling::emptyToNullIfNeeded)
                             .collect(Collectors.toList());
       } else {
-        return Strings.emptyToNull(input);
+        return NullHandling.emptyToNullIfNeeded(input);
       }
     };
   }
diff --git a/java-util/src/main/java/io/druid/java/util/http/client/NettyHttpClient.java b/java-util/src/main/java/io/druid/java/util/http/client/NettyHttpClient.java
index 03a5e3e47c1..7058f3f11c3 100644
--- a/java-util/src/main/java/io/druid/java/util/http/client/NettyHttpClient.java
+++ b/java-util/src/main/java/io/druid/java/util/http/client/NettyHttpClient.java
@@ -20,7 +20,6 @@
 package io.druid.java.util.http.client;
 
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.common.collect.Multimap;
 import com.google.common.util.concurrent.Futures;
 import com.google.common.util.concurrent.ListenableFuture;
@@ -140,8 +139,7 @@ public void stop()
     } else {
       channel = channelFuture.getChannel();
     }
-
-    final String urlFile = Strings.nullToEmpty(url.getFile());
+    final String urlFile = StringUtils.nullToEmptyNonDruidDataString(url.getFile());
     final HttpRequest httpRequest = new DefaultHttpRequest(
         HttpVersion.HTTP_1_1,
         method,
diff --git a/processing/src/main/java/io/druid/guice/GuiceInjectors.java b/processing/src/main/java/io/druid/guice/GuiceInjectors.java
index 203bdc0d630..cc5670fedf6 100644
--- a/processing/src/main/java/io/druid/guice/GuiceInjectors.java
+++ b/processing/src/main/java/io/druid/guice/GuiceInjectors.java
@@ -41,6 +41,7 @@
         new JacksonModule(),
         new PropertiesModule(Arrays.asList("common.runtime.properties", "runtime.properties")),
         new ConfigModule(),
+        new NullHandlingModule(),
         binder -> {
           binder.bind(DruidSecondaryModule.class);
           JsonConfigProvider.bind(binder, "druid.extensions", ExtensionsConfig.class);
diff --git a/processing/src/main/java/io/druid/guice/NullHandlingModule.java b/processing/src/main/java/io/druid/guice/NullHandlingModule.java
new file mode 100644
index 00000000000..64f01291a34
--- /dev/null
+++ b/processing/src/main/java/io/druid/guice/NullHandlingModule.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package io.druid.guice;
+
+import com.google.inject.Binder;
+import com.google.inject.Module;
+import io.druid.common.config.NullHandling;
+import io.druid.common.config.NullValueHandlingConfig;
+
+/**
+ */
+public class NullHandlingModule implements Module
+{
+  @Override
+  public void configure(Binder binder)
+  {
+    JsonConfigProvider.bind(binder, "druid.generic", NullValueHandlingConfig.class);
+    binder.requestStaticInjection(NullHandling.class);
+    binder.requestStaticInjection(NullHandling.class);
+  }
+}
diff --git a/processing/src/main/java/io/druid/query/DefaultQueryMetrics.java b/processing/src/main/java/io/druid/query/DefaultQueryMetrics.java
index 1db510bb3d2..09900a57be9 100644
--- a/processing/src/main/java/io/druid/query/DefaultQueryMetrics.java
+++ b/processing/src/main/java/io/druid/query/DefaultQueryMetrics.java
@@ -21,11 +21,11 @@
 
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableMap;
+import io.druid.collections.bitmap.BitmapFactory;
+import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.emitter.service.ServiceEmitter;
 import io.druid.java.util.emitter.service.ServiceMetricEvent;
-import io.druid.collections.bitmap.BitmapFactory;
 import io.druid.query.filter.Filter;
 import org.joda.time.Interval;
 
@@ -118,7 +118,7 @@ public void duration(QueryType query)
   @Override
   public void queryId(QueryType query)
   {
-    setDimension(DruidMetrics.ID, Strings.nullToEmpty(query.getId()));
+    setDimension(DruidMetrics.ID, StringUtils.nullToEmptyNonDruidDataString(query.getId()));
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/query/aggregation/AggregateCombiner.java b/processing/src/main/java/io/druid/query/aggregation/AggregateCombiner.java
index e112411b5b1..45bca12771b 100644
--- a/processing/src/main/java/io/druid/query/aggregation/AggregateCombiner.java
+++ b/processing/src/main/java/io/druid/query/aggregation/AggregateCombiner.java
@@ -19,6 +19,7 @@
 
 package io.druid.query.aggregation;
 
+import io.druid.guice.annotations.ExtensionPoint;
 import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
 import io.druid.segment.ColumnValueSelector;
 
@@ -39,6 +40,7 @@
  * @see DoubleAggregateCombiner
  * @see ObjectAggregateCombiner
  */
+@ExtensionPoint
 public interface AggregateCombiner<T> extends ColumnValueSelector<T>
 {
   /**
diff --git a/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java
index 22ec0724719..6a5515fefc4 100644
--- a/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java
@@ -37,6 +37,8 @@
  * AggregatorFactory is a strategy (in the terms of Design Patterns) that represents column aggregation, e. g. min,
  * max, sum of metric columns, or cardinality of dimension columns (see {@link
  * io.druid.query.aggregation.cardinality.CardinalityAggregatorFactory}).
+ * Implementations of {@link AggregatorFactory} which need to Support Nullable Aggregations are encouraged
+ * to extend {@link NullableAggregatorFactory}.
  */
 @ExtensionPoint
 public abstract class AggregatorFactory implements Cacheable
@@ -60,7 +62,8 @@
    *
    * @return an object representing the combination of lhs and rhs, this can be a new object or a mutation of the inputs
    */
-  public abstract Object combine(Object lhs, Object rhs);
+  @Nullable
+  public abstract Object combine(@Nullable Object lhs, @Nullable Object rhs);
 
   /**
    * Creates an AggregateCombiner to fold rollup aggregation results from serveral "rows" of different indexes during
@@ -76,6 +79,20 @@ public AggregateCombiner makeAggregateCombiner()
     throw new UOE("[%s] does not implement makeAggregateCombiner()", this.getClass().getName());
   }
 
+  /**
+   * Creates an {@link AggregateCombiner} which supports nullability.
+   * Implementations of {@link AggregatorFactory} which need to Support Nullable Aggregations are encouraged
+   * to extend {@link NullableAggregatorFactory} instead of overriding this method.
+   * Default implementation calls {@link #makeAggregateCombiner()} for backwards compatibility.
+   *
+   * @see AggregateCombiner
+   * @see NullableAggregatorFactory
+   */
+  public AggregateCombiner makeNullableAggregateCombiner()
+  {
+    return makeAggregateCombiner();
+  }
+
   /**
    * Returns an AggregatorFactory that can be used to combine the output of aggregators from this factory.  This
    * generally amounts to simply creating a new factory that is the same as the current except with its input
@@ -127,7 +144,8 @@ public AggregatorFactory getMergingFactory(AggregatorFactory other) throws Aggre
    *
    * @return the finalized value that should be returned for the initial query
    */
-  public abstract Object finalizeComputation(Object object);
+  @Nullable
+  public abstract Object finalizeComputation(@Nullable Object object);
 
   public abstract String getName();
 
@@ -142,6 +160,19 @@ public AggregatorFactory getMergingFactory(AggregatorFactory other) throws Aggre
    */
   public abstract int getMaxIntermediateSize();
 
+  /**
+   * Returns the maximum size that this aggregator will require in bytes for intermediate storage of results.
+   * Implementations of {@link AggregatorFactory} which need to Support Nullable Aggregations are encouraged
+   * to extend {@link NullableAggregatorFactory} instead of overriding this method.
+   * Default implementation calls {@link #makeAggregateCombiner()} for backwards compatibility.
+   *
+   * @return the maximum number of bytes that an aggregator of this type will require for intermediate result storage.
+   */
+  public int getMaxIntermediateSizeWithNulls()
+  {
+    return getMaxIntermediateSize();
+  }
+
   /**
    * Return a potentially optimized form of this AggregatorFactory for per-segment queries.
    */
diff --git a/processing/src/main/java/io/druid/query/aggregation/AggregatorUtil.java b/processing/src/main/java/io/druid/query/aggregation/AggregatorUtil.java
index 8f7910d275e..89d07e6d923 100644
--- a/processing/src/main/java/io/druid/query/aggregation/AggregatorUtil.java
+++ b/processing/src/main/java/io/druid/query/aggregation/AggregatorUtil.java
@@ -174,8 +174,10 @@ public static BaseFloatColumnValueSelector makeColumnValueSelectorWithFloatDefau
         @Override
         public float getFloat()
         {
+          // Although baseSelector.getObject is nullable
+          // exprEval returned from Expression selectors is never null.
           final ExprEval exprEval = baseSelector.getObject();
-          return exprEval.isNull() ? nullValue : (float) exprEval.asDouble();
+          return exprEval.isNumericNull() ? nullValue : (float) exprEval.asDouble();
         }
 
         @Override
@@ -188,7 +190,7 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector)
         public boolean isNull()
         {
           final ExprEval exprEval = baseSelector.getObject();
-          return exprEval.isNull();
+          return exprEval == null || exprEval.isNumericNull();
         }
       }
       return new ExpressionFloatColumnSelector();
@@ -216,7 +218,7 @@ public static BaseLongColumnValueSelector makeColumnValueSelectorWithLongDefault
         public long getLong()
         {
           final ExprEval exprEval = baseSelector.getObject();
-          return exprEval.isNull() ? nullValue : exprEval.asLong();
+          return exprEval.isNumericNull() ? nullValue : exprEval.asLong();
         }
 
         @Override
@@ -229,7 +231,7 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector)
         public boolean isNull()
         {
           final ExprEval exprEval = baseSelector.getObject();
-          return exprEval.isNull();
+          return exprEval == null || exprEval.isNumericNull();
         }
       }
       return new ExpressionLongColumnSelector();
@@ -257,7 +259,7 @@ public static BaseDoubleColumnValueSelector makeColumnValueSelectorWithDoubleDef
         public double getDouble()
         {
           final ExprEval exprEval = baseSelector.getObject();
-          return exprEval.isNull() ? nullValue : exprEval.asDouble();
+          return exprEval.isNumericNull() ? nullValue : exprEval.asDouble();
         }
 
         @Override
@@ -270,7 +272,7 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector)
         public boolean isNull()
         {
           final ExprEval exprEval = baseSelector.getObject();
-          return exprEval.isNull();
+          return exprEval == null || exprEval.isNumericNull();
         }
       }
       return new ExpressionDoubleColumnSelector();
diff --git a/processing/src/main/java/io/druid/query/aggregation/BufferAggregator.java b/processing/src/main/java/io/druid/query/aggregation/BufferAggregator.java
index 86bd9dc54bf..7264f5c58a4 100644
--- a/processing/src/main/java/io/druid/query/aggregation/BufferAggregator.java
+++ b/processing/src/main/java/io/druid/query/aggregation/BufferAggregator.java
@@ -44,7 +44,7 @@
    *
    * <b>Implementations must not change the position, limit or mark of the given buffer</b>
    *
-   * This method must not exceed the number of bytes returned by {@link AggregatorFactory#getMaxIntermediateSize()}
+   * This method must not exceed the number of bytes returned by {@link AggregatorFactory#getMaxIntermediateSizeWithNulls}
    * in the corresponding {@link AggregatorFactory}
    *
    * @param buf byte buffer to initialize
diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleMaxAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/DoubleMaxAggregatorFactory.java
index a61ddcfea45..83a079580e1 100644
--- a/processing/src/main/java/io/druid/query/aggregation/DoubleMaxAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/DoubleMaxAggregatorFactory.java
@@ -24,8 +24,10 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import io.druid.java.util.common.StringUtils;
 import io.druid.math.expr.ExprMacroTable;
+import io.druid.segment.BaseDoubleColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Collections;
 import java.util.List;
@@ -51,20 +53,39 @@ public DoubleMaxAggregatorFactory(String name, String fieldName)
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected BaseDoubleColumnValueSelector selector(ColumnSelectorFactory metricFactory)
   {
-    return new DoubleMaxAggregator(getDoubleColumnSelector(metricFactory, Double.NEGATIVE_INFINITY));
+    return getDoubleColumnSelector(
+        metricFactory,
+        Double.NEGATIVE_INFINITY
+    );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, BaseDoubleColumnValueSelector selector)
   {
-    return new DoubleMaxBufferAggregator(getDoubleColumnSelector(metricFactory, Double.NEGATIVE_INFINITY));
+    return new DoubleMaxAggregator(selector);
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  protected BufferAggregator factorizeBuffered(
+      ColumnSelectorFactory metricFactory,
+      BaseDoubleColumnValueSelector selector
+  )
+  {
+    return new DoubleMaxBufferAggregator(selector);
+  }
+
+  @Override
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
   {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return DoubleMaxAggregator.combineValues(lhs, rhs);
   }
 
diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleMinAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/DoubleMinAggregatorFactory.java
index 8b3ce13cba2..4d345e9658a 100644
--- a/processing/src/main/java/io/druid/query/aggregation/DoubleMinAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/DoubleMinAggregatorFactory.java
@@ -24,8 +24,10 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import io.druid.java.util.common.StringUtils;
 import io.druid.math.expr.ExprMacroTable;
+import io.druid.segment.BaseDoubleColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Collections;
 import java.util.List;
@@ -51,20 +53,39 @@ public DoubleMinAggregatorFactory(String name, String fieldName)
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected BaseDoubleColumnValueSelector selector(ColumnSelectorFactory metricFactory)
   {
-    return new DoubleMinAggregator(getDoubleColumnSelector(metricFactory, Double.POSITIVE_INFINITY));
+    return getDoubleColumnSelector(
+        metricFactory,
+        Double.POSITIVE_INFINITY
+    );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, BaseDoubleColumnValueSelector selector)
   {
-    return new DoubleMinBufferAggregator(getDoubleColumnSelector(metricFactory, Double.POSITIVE_INFINITY));
+    return new DoubleMinAggregator(selector);
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  protected BufferAggregator factorizeBuffered(
+      ColumnSelectorFactory metricFactory,
+      BaseDoubleColumnValueSelector selector
+  )
+  {
+    return new DoubleMinBufferAggregator(selector);
+  }
+
+  @Override
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
   {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return DoubleMinAggregator.combineValues(lhs, rhs);
   }
 
diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleSumAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/DoubleSumAggregatorFactory.java
index b9c2feff742..6ce68bb3f7a 100644
--- a/processing/src/main/java/io/druid/query/aggregation/DoubleSumAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/DoubleSumAggregatorFactory.java
@@ -24,8 +24,10 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import io.druid.java.util.common.StringUtils;
 import io.druid.math.expr.ExprMacroTable;
+import io.druid.segment.BaseDoubleColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Collections;
 import java.util.List;
@@ -51,20 +53,39 @@ public DoubleSumAggregatorFactory(String name, String fieldName)
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected BaseDoubleColumnValueSelector selector(ColumnSelectorFactory metricFactory)
   {
-    return new DoubleSumAggregator(getDoubleColumnSelector(metricFactory, 0.0));
+    return getDoubleColumnSelector(
+        metricFactory,
+        0.0d
+    );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, BaseDoubleColumnValueSelector selector)
   {
-    return new DoubleSumBufferAggregator(getDoubleColumnSelector(metricFactory, 0.0));
+    return new DoubleSumAggregator(selector);
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  protected BufferAggregator factorizeBuffered(
+      ColumnSelectorFactory metricFactory,
+      BaseDoubleColumnValueSelector selector
+  )
+  {
+    return new DoubleSumBufferAggregator(selector);
+  }
+
+  @Override
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
   {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return DoubleSumAggregator.combineValues(lhs, rhs);
   }
 
diff --git a/processing/src/main/java/io/druid/query/aggregation/FilteredAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/FilteredAggregatorFactory.java
index c3dfdc4d750..f9f54a9a683 100644
--- a/processing/src/main/java/io/druid/query/aggregation/FilteredAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/FilteredAggregatorFactory.java
@@ -142,7 +142,7 @@ public String getTypeName()
   @Override
   public int getMaxIntermediateSize()
   {
-    return delegate.getMaxIntermediateSize();
+    return delegate.getMaxIntermediateSizeWithNulls();
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/query/aggregation/FloatMaxAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/FloatMaxAggregatorFactory.java
index 3c2737c316b..29ffab315d8 100644
--- a/processing/src/main/java/io/druid/query/aggregation/FloatMaxAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/FloatMaxAggregatorFactory.java
@@ -24,8 +24,10 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import io.druid.java.util.common.StringUtils;
 import io.druid.math.expr.ExprMacroTable;
+import io.druid.segment.BaseFloatColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Collections;
 import java.util.List;
@@ -51,20 +53,39 @@ public FloatMaxAggregatorFactory(String name, String fieldName)
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected BaseFloatColumnValueSelector selector(ColumnSelectorFactory metricFactory)
   {
-    return new FloatMaxAggregator(getFloatColumnSelector(metricFactory, Float.NEGATIVE_INFINITY));
+    return getFloatColumnSelector(
+        metricFactory,
+        Float.NEGATIVE_INFINITY
+    );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, BaseFloatColumnValueSelector selector)
   {
-    return new FloatMaxBufferAggregator(getFloatColumnSelector(metricFactory, Float.NEGATIVE_INFINITY));
+    return new FloatMaxAggregator(selector);
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  protected BufferAggregator factorizeBuffered(
+      ColumnSelectorFactory metricFactory,
+      BaseFloatColumnValueSelector selector
+  )
+  {
+    return new FloatMaxBufferAggregator(selector);
+  }
+
+  @Override
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
   {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return FloatMaxAggregator.combineValues(lhs, rhs);
   }
 
diff --git a/processing/src/main/java/io/druid/query/aggregation/FloatMinAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/FloatMinAggregatorFactory.java
index eca7da0ee2c..00ff2b5a8b5 100644
--- a/processing/src/main/java/io/druid/query/aggregation/FloatMinAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/FloatMinAggregatorFactory.java
@@ -24,8 +24,10 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import io.druid.java.util.common.StringUtils;
 import io.druid.math.expr.ExprMacroTable;
+import io.druid.segment.BaseFloatColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Collections;
 import java.util.List;
@@ -51,20 +53,39 @@ public FloatMinAggregatorFactory(String name, String fieldName)
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected BaseFloatColumnValueSelector selector(ColumnSelectorFactory metricFactory)
   {
-    return new FloatMinAggregator(getFloatColumnSelector(metricFactory, Float.POSITIVE_INFINITY));
+    return getFloatColumnSelector(
+        metricFactory,
+        Float.POSITIVE_INFINITY
+    );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, BaseFloatColumnValueSelector selector)
   {
-    return new FloatMinBufferAggregator(getFloatColumnSelector(metricFactory, Float.POSITIVE_INFINITY));
+    return new FloatMinAggregator(selector);
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  protected BufferAggregator factorizeBuffered(
+      ColumnSelectorFactory metricFactory,
+      BaseFloatColumnValueSelector selector
+  )
+  {
+    return new FloatMinBufferAggregator(selector);
+  }
+
+  @Override
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
   {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return FloatMinAggregator.combineValues(lhs, rhs);
   }
 
diff --git a/processing/src/main/java/io/druid/query/aggregation/FloatSumAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/FloatSumAggregatorFactory.java
index 38ecf7d46c0..feb29c36b1c 100644
--- a/processing/src/main/java/io/druid/query/aggregation/FloatSumAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/FloatSumAggregatorFactory.java
@@ -24,8 +24,10 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import io.druid.java.util.common.StringUtils;
 import io.druid.math.expr.ExprMacroTable;
+import io.druid.segment.BaseFloatColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Collections;
 import java.util.List;
@@ -51,20 +53,39 @@ public FloatSumAggregatorFactory(String name, String fieldName)
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected BaseFloatColumnValueSelector selector(ColumnSelectorFactory metricFactory)
   {
-    return new FloatSumAggregator(getFloatColumnSelector(metricFactory, 0.0f));
+    return getFloatColumnSelector(
+        metricFactory,
+        0.0f
+    );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, BaseFloatColumnValueSelector selector)
   {
-    return new FloatSumBufferAggregator(getFloatColumnSelector(metricFactory, 0.0f));
+    return new FloatSumAggregator(selector);
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  protected BufferAggregator factorizeBuffered(
+      ColumnSelectorFactory metricFactory,
+      BaseFloatColumnValueSelector selector
+  )
+  {
+    return new FloatSumBufferAggregator(selector);
+  }
+
+  @Override
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
   {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return FloatSumAggregator.combineValues(lhs, rhs);
   }
 
diff --git a/processing/src/main/java/io/druid/query/aggregation/LongMaxAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/LongMaxAggregatorFactory.java
index a2f16b9b075..4d7030e074f 100644
--- a/processing/src/main/java/io/druid/query/aggregation/LongMaxAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/LongMaxAggregatorFactory.java
@@ -24,8 +24,10 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import io.druid.java.util.common.StringUtils;
 import io.druid.math.expr.ExprMacroTable;
+import io.druid.segment.BaseLongColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Collections;
 import java.util.List;
@@ -51,20 +53,39 @@ public LongMaxAggregatorFactory(String name, String fieldName)
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected BaseLongColumnValueSelector selector(ColumnSelectorFactory metricFactory)
   {
-    return new LongMaxAggregator(getLongColumnSelector(metricFactory, Long.MIN_VALUE));
+    return getLongColumnSelector(
+        metricFactory,
+        Long.MIN_VALUE
+    );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, BaseLongColumnValueSelector selector)
   {
-    return new LongMaxBufferAggregator(getLongColumnSelector(metricFactory, Long.MIN_VALUE));
+    return new LongMaxAggregator(selector);
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  protected BufferAggregator factorizeBuffered(
+      ColumnSelectorFactory metricFactory,
+      BaseLongColumnValueSelector selector
+  )
+  {
+    return new LongMaxBufferAggregator(selector);
+  }
+
+  @Override
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
   {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return LongMaxAggregator.combineValues(lhs, rhs);
   }
 
diff --git a/processing/src/main/java/io/druid/query/aggregation/LongMinAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/LongMinAggregatorFactory.java
index 1992bd50a9d..15174f171f2 100644
--- a/processing/src/main/java/io/druid/query/aggregation/LongMinAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/LongMinAggregatorFactory.java
@@ -24,8 +24,10 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import io.druid.java.util.common.StringUtils;
 import io.druid.math.expr.ExprMacroTable;
+import io.druid.segment.BaseLongColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Collections;
 import java.util.List;
@@ -51,20 +53,36 @@ public LongMinAggregatorFactory(String name, String fieldName)
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected BaseLongColumnValueSelector selector(ColumnSelectorFactory metricFactory)
   {
-    return new LongMinAggregator(getLongColumnSelector(metricFactory, Long.MAX_VALUE));
+    return getLongColumnSelector(
+        metricFactory,
+        Long.MAX_VALUE
+    );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  public Aggregator factorize(ColumnSelectorFactory metricFactory, BaseLongColumnValueSelector selector)
   {
-    return new LongMinBufferAggregator(getLongColumnSelector(metricFactory, Long.MAX_VALUE));
+    return new LongMinAggregator(selector);
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, BaseLongColumnValueSelector selector)
   {
+    return new LongMinBufferAggregator(selector);
+  }
+
+  @Override
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
+  {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return LongMinAggregator.combineValues(lhs, rhs);
   }
 
diff --git a/processing/src/main/java/io/druid/query/aggregation/LongSumAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/LongSumAggregatorFactory.java
index 1e36dd16dc1..285b1ef4af8 100644
--- a/processing/src/main/java/io/druid/query/aggregation/LongSumAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/LongSumAggregatorFactory.java
@@ -24,8 +24,10 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import io.druid.java.util.common.StringUtils;
 import io.druid.math.expr.ExprMacroTable;
+import io.druid.segment.BaseLongColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Collections;
 import java.util.List;
@@ -51,20 +53,39 @@ public LongSumAggregatorFactory(String name, String fieldName)
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected BaseLongColumnValueSelector selector(ColumnSelectorFactory metricFactory)
   {
-    return new LongSumAggregator(getLongColumnSelector(metricFactory, 0L));
+    return getLongColumnSelector(
+        metricFactory,
+        0L
+    );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, BaseLongColumnValueSelector selector)
   {
-    return new LongSumBufferAggregator(getLongColumnSelector(metricFactory, 0L));
+    return new LongSumAggregator(selector);
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  protected BufferAggregator factorizeBuffered(
+      ColumnSelectorFactory metricFactory,
+      BaseLongColumnValueSelector selector
+  )
+  {
+    return new LongSumBufferAggregator(selector);
+  }
+
+  @Override
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
   {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return LongSumAggregator.combineValues(lhs, rhs);
   }
 
diff --git a/processing/src/main/java/io/druid/query/aggregation/NullableAggregateCombiner.java b/processing/src/main/java/io/druid/query/aggregation/NullableAggregateCombiner.java
new file mode 100644
index 00000000000..8c530585dc2
--- /dev/null
+++ b/processing/src/main/java/io/druid/query/aggregation/NullableAggregateCombiner.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package io.druid.query.aggregation;
+
+import io.druid.guice.annotations.PublicApi;
+import io.druid.segment.BaseNullableColumnValueSelector;
+import io.druid.segment.ColumnValueSelector;
+
+import javax.annotation.Nullable;
+
+/**
+ * The result of a NullableAggregateCombiner will be null if all the values to be combined are null values or no values
+ * are combined at all. If any of the value is non-null, the result would be the value of the delegate combiner.
+ * Note that the delegate combiner is not required to perform check for {@link BaseNullableColumnValueSelector#isNull()}
+ * on the selector as only non-null values will be passed to the delegate combiner.
+ * This class is only used when SQL compatible null handling is enabled.
+ */
+@PublicApi
+public final class NullableAggregateCombiner<T> implements AggregateCombiner<T>
+{
+  private boolean isNullResult = true;
+
+  private final AggregateCombiner<T> delegate;
+
+  public NullableAggregateCombiner(AggregateCombiner<T> delegate)
+  {
+    this.delegate = delegate;
+  }
+
+  @Override
+  public void reset(ColumnValueSelector selector)
+  {
+    if (selector.isNull()) {
+      isNullResult = true;
+    } else {
+      isNullResult = false;
+      delegate.reset(selector);
+    }
+  }
+
+  @Override
+  public void fold(ColumnValueSelector selector)
+  {
+    boolean isNotNull = !selector.isNull();
+    if (isNotNull) {
+      if (isNullResult) {
+        isNullResult = false;
+        delegate.reset(selector);
+      } else {
+        delegate.fold(selector);
+      }
+    }
+  }
+
+  @Override
+  public float getFloat()
+  {
+    if (isNullResult) {
+      throw new IllegalStateException("Cannot return primitive float for Null Value");
+    }
+    return delegate.getFloat();
+  }
+
+  @Override
+  public double getDouble()
+  {
+    if (isNullResult) {
+      throw new IllegalStateException("Cannot return double for Null Value");
+    }
+    return delegate.getDouble();
+  }
+
+  @Override
+  public long getLong()
+  {
+    if (isNullResult) {
+      throw new IllegalStateException("Cannot return long for Null Value");
+    }
+    return delegate.getLong();
+  }
+
+  @Override
+  public boolean isNull()
+  {
+    return isNullResult || delegate.isNull();
+  }
+
+  @Nullable
+  @Override
+  public T getObject()
+  {
+    return isNullResult ? null : delegate.getObject();
+  }
+
+  @Override
+  public Class classOfObject()
+  {
+    return delegate.classOfObject();
+  }
+}
diff --git a/processing/src/main/java/io/druid/query/aggregation/NullableAggregator.java b/processing/src/main/java/io/druid/query/aggregation/NullableAggregator.java
new file mode 100644
index 00000000000..3b3353da4bf
--- /dev/null
+++ b/processing/src/main/java/io/druid/query/aggregation/NullableAggregator.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package io.druid.query.aggregation;
+
+import io.druid.guice.annotations.PublicApi;
+import io.druid.segment.BaseNullableColumnValueSelector;
+
+import javax.annotation.Nullable;
+
+/**
+ * The result of a NullableAggregator will be null if all the values to be aggregated are null values
+ * or no values are aggregated at all. If any of the value is non-null, the result would be the aggregated
+ * value of the delegate aggregator. Note that the delegate aggregator is not required to perform check for
+ * {@link BaseNullableColumnValueSelector#isNull()} on the selector as only non-null values will be passed
+ * to the delegate aggregator. This class is only used when SQL compatible null handling is enabled.
+ */
+@PublicApi
+public final class NullableAggregator implements Aggregator
+{
+  private final Aggregator delegate;
+  private final BaseNullableColumnValueSelector selector;
+  private boolean isNullResult = true;
+
+  public NullableAggregator(Aggregator delegate, BaseNullableColumnValueSelector selector)
+  {
+    this.delegate = delegate;
+    this.selector = selector;
+  }
+
+  @Override
+  public void aggregate()
+  {
+    boolean isNotNull = !selector.isNull();
+    if (isNotNull) {
+      if (isNullResult) {
+        isNullResult = false;
+      }
+      delegate.aggregate();
+    }
+  }
+
+  @Override
+  @Nullable
+  public Object get()
+  {
+    if (isNullResult) {
+      return null;
+    }
+    return delegate.get();
+  }
+
+  @Override
+  public float getFloat()
+  {
+    if (isNullResult) {
+      throw new IllegalStateException("Cannot return float for Null Value");
+    }
+    return delegate.getFloat();
+  }
+
+  @Override
+  public long getLong()
+  {
+    if (isNullResult) {
+      throw new IllegalStateException("Cannot return long for Null Value");
+    }
+    return delegate.getLong();
+  }
+
+  @Override
+  public double getDouble()
+  {
+    if (isNullResult) {
+      throw new IllegalStateException("Cannot return double for Null Value");
+    }
+    return delegate.getDouble();
+  }
+
+  @Override
+  public boolean isNull()
+  {
+    return isNullResult || delegate.isNull();
+  }
+
+  @Override
+  public void close()
+  {
+    delegate.close();
+  }
+}
diff --git a/processing/src/main/java/io/druid/query/aggregation/NullableAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/NullableAggregatorFactory.java
new file mode 100644
index 00000000000..fdefd317c95
--- /dev/null
+++ b/processing/src/main/java/io/druid/query/aggregation/NullableAggregatorFactory.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package io.druid.query.aggregation;
+
+
+import io.druid.common.config.NullHandling;
+import io.druid.guice.annotations.ExtensionPoint;
+import io.druid.segment.BaseNullableColumnValueSelector;
+import io.druid.segment.ColumnSelectorFactory;
+import io.druid.segment.ColumnValueSelector;
+
+/**
+ * Abstract class with functionality to wrap {@link Aggregator}, {@link BufferAggregator} and {@link AggregateCombiner}
+ * to support nullable aggregations for SQL compatibility. Implementations of {@link AggregatorFactory} which need to
+ * Support Nullable Aggregations are encouraged to extend this class.
+ */
+@ExtensionPoint
+public abstract class NullableAggregatorFactory<T extends BaseNullableColumnValueSelector> extends AggregatorFactory
+{
+  @Override
+  public final Aggregator factorize(ColumnSelectorFactory metricFactory)
+  {
+    T selector = selector(metricFactory);
+    Aggregator aggregator = factorize(metricFactory, selector);
+    return NullHandling.replaceWithDefault() ? aggregator : new NullableAggregator(aggregator, selector);
+  }
+
+  @Override
+  public final BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  {
+    T selector = selector(metricFactory);
+    BufferAggregator aggregator = factorizeBuffered(metricFactory, selector);
+    return NullHandling.replaceWithDefault() ? aggregator : new NullableBufferAggregator(aggregator, selector);
+  }
+
+  @Override
+  public final AggregateCombiner makeNullableAggregateCombiner()
+  {
+    AggregateCombiner combiner = makeAggregateCombiner();
+    return NullHandling.replaceWithDefault() ? combiner : new NullableAggregateCombiner(combiner);
+  }
+
+  @Override
+  public final int getMaxIntermediateSizeWithNulls()
+  {
+    return getMaxIntermediateSize() + (NullHandling.replaceWithDefault() ? 0 : Byte.BYTES);
+  }
+
+  // ---- ABSTRACT METHODS BELOW ------
+
+  /**
+   * Creates a {@link ColumnValueSelector} for the aggregated column.
+   *
+   * @see ColumnValueSelector
+   */
+  protected abstract T selector(ColumnSelectorFactory metricFactory);
+
+  /**
+   * Creates an {@link Aggregator} to aggregate values from several rows, by using the provided selector.
+   * @param metricFactory metricFactory
+   * @param selector {@link ColumnValueSelector} for the column to aggregate.
+   *
+   * @see Aggregator
+   */
+  protected abstract Aggregator factorize(ColumnSelectorFactory metricFactory, T selector);
+
+  /**
+   * Creates an {@link BufferAggregator} to aggregate values from several rows into a ByteBuffer.
+   * @param metricFactory metricFactory
+   * @param selector {@link ColumnValueSelector} for the column to aggregate.
+   *
+   * @see BufferAggregator
+   */
+  protected abstract BufferAggregator factorizeBuffered(
+      ColumnSelectorFactory metricFactory,
+      T selector
+  );
+}
diff --git a/processing/src/main/java/io/druid/query/aggregation/NullableBufferAggregator.java b/processing/src/main/java/io/druid/query/aggregation/NullableBufferAggregator.java
new file mode 100644
index 00000000000..2c8e1a5618e
--- /dev/null
+++ b/processing/src/main/java/io/druid/query/aggregation/NullableBufferAggregator.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package io.druid.query.aggregation;
+
+import io.druid.common.config.NullHandling;
+import io.druid.guice.annotations.PublicApi;
+import io.druid.segment.BaseNullableColumnValueSelector;
+
+import javax.annotation.Nullable;
+import java.nio.ByteBuffer;
+
+/**
+ * The result of a NullableBufferAggregator will be null if all the values to be aggregated are null values or no values
+ * are aggregated at all. If any of the value is non-null, the result would be the aggregated value of the delegate
+ * aggregator. Note that the delegate aggregator is not required to perform check for
+ * {@link BaseNullableColumnValueSelector#isNull()} on the selector as only non-null values will be passed to the
+ * delegate aggregator. This class is only used when SQL compatible null handling is enabled.
+ * When writing aggregated result to buffer, it will write an additional byte to store the nullability of the
+ * aggregated result.
+ * Buffer Layout - 1 byte for storing nullability + delegate storage bytes.
+ */
+@PublicApi
+public final class NullableBufferAggregator implements BufferAggregator
+{
+
+  private final BufferAggregator delegate;
+  private final BaseNullableColumnValueSelector selector;
+
+  public NullableBufferAggregator(BufferAggregator delegate, BaseNullableColumnValueSelector selector)
+  {
+    this.delegate = delegate;
+    this.selector = selector;
+  }
+
+  @Override
+  public void init(ByteBuffer buf, int position)
+  {
+    buf.put(position, NullHandling.IS_NULL_BYTE);
+    delegate.init(buf, position + Byte.BYTES);
+  }
+
+  @Override
+  public void aggregate(ByteBuffer buf, int position)
+  {
+    boolean isNotNull = !selector.isNull();
+    if (isNotNull) {
+      if (buf.get(position) == NullHandling.IS_NULL_BYTE) {
+        buf.put(position, NullHandling.IS_NOT_NULL_BYTE);
+      }
+      delegate.aggregate(buf, position + Byte.BYTES);
+    }
+  }
+
+  @Override
+  @Nullable
+  public Object get(ByteBuffer buf, int position)
+  {
+    if (buf.get(position) == NullHandling.IS_NULL_BYTE) {
+      return null;
+    }
+    return delegate.get(buf, position + Byte.BYTES);
+  }
+
+  @Override
+  public float getFloat(ByteBuffer buf, int position)
+  {
+    if (buf.get(position) == NullHandling.IS_NULL_BYTE) {
+      throw new IllegalStateException("Cannot return float for Null Value");
+    }
+    return delegate.getFloat(buf, position + Byte.BYTES);
+  }
+
+  @Override
+  public long getLong(ByteBuffer buf, int position)
+  {
+    if (buf.get(position) == NullHandling.IS_NULL_BYTE) {
+      throw new IllegalStateException("Cannot return long for Null Value");
+    }
+    return delegate.getLong(buf, position + Byte.BYTES);
+  }
+
+  @Override
+  public double getDouble(ByteBuffer buf, int position)
+  {
+    if (buf.get(position) == NullHandling.IS_NULL_BYTE) {
+      throw new IllegalStateException("Cannot return double for Null Value");
+    }
+    return delegate.getDouble(buf, position + Byte.BYTES);
+  }
+
+  @Override
+  public boolean isNull(ByteBuffer buf, int position)
+  {
+    return buf.get(position) == NullHandling.IS_NULL_BYTE || delegate.isNull(buf, position + Byte.BYTES);
+  }
+
+  @Override
+  public void close()
+  {
+    delegate.close();
+  }
+}
diff --git a/processing/src/main/java/io/druid/query/aggregation/PostAggregator.java b/processing/src/main/java/io/druid/query/aggregation/PostAggregator.java
index f6140b282c1..b089f7fff3c 100644
--- a/processing/src/main/java/io/druid/query/aggregation/PostAggregator.java
+++ b/processing/src/main/java/io/druid/query/aggregation/PostAggregator.java
@@ -22,6 +22,7 @@
 import io.druid.guice.annotations.ExtensionPoint;
 import io.druid.java.util.common.Cacheable;
 
+import javax.annotation.Nullable;
 import java.util.Comparator;
 import java.util.Map;
 import java.util.Set;
@@ -36,6 +37,7 @@
 
   Comparator getComparator();
 
+  @Nullable
   Object compute(Map<String, Object> combinedAggregators);
 
   String getName();
diff --git a/processing/src/main/java/io/druid/query/aggregation/SimpleDoubleAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/SimpleDoubleAggregatorFactory.java
index 933a2575e47..a9add73aadd 100644
--- a/processing/src/main/java/io/druid/query/aggregation/SimpleDoubleAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/SimpleDoubleAggregatorFactory.java
@@ -28,12 +28,13 @@
 import io.druid.segment.ColumnSelectorFactory;
 import io.druid.segment.column.Column;
 
+import javax.annotation.Nullable;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.List;
 import java.util.Objects;
 
-public abstract class SimpleDoubleAggregatorFactory extends AggregatorFactory
+public abstract class SimpleDoubleAggregatorFactory extends NullableAggregatorFactory<BaseDoubleColumnValueSelector>
 {
   protected final String name;
   protected final String fieldName;
@@ -103,7 +104,8 @@ public Comparator getComparator()
   }
 
   @Override
-  public Object finalizeComputation(Object object)
+  @Nullable
+  public Object finalizeComputation(@Nullable Object object)
   {
     return object;
   }
diff --git a/processing/src/main/java/io/druid/query/aggregation/SimpleFloatAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/SimpleFloatAggregatorFactory.java
index ce03a9947dc..919a31425ad 100644
--- a/processing/src/main/java/io/druid/query/aggregation/SimpleFloatAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/SimpleFloatAggregatorFactory.java
@@ -27,12 +27,13 @@
 import io.druid.segment.BaseFloatColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 
+import javax.annotation.Nullable;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.List;
 import java.util.Objects;
 
-public abstract class SimpleFloatAggregatorFactory extends AggregatorFactory
+public abstract class SimpleFloatAggregatorFactory extends NullableAggregatorFactory<BaseFloatColumnValueSelector>
 {
   protected final String name;
   protected final String fieldName;
@@ -97,7 +98,8 @@ public Comparator getComparator()
   }
 
   @Override
-  public Object finalizeComputation(Object object)
+  @Nullable
+  public Object finalizeComputation(@Nullable Object object)
   {
     return object;
   }
diff --git a/processing/src/main/java/io/druid/query/aggregation/SimpleLongAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/SimpleLongAggregatorFactory.java
index c35a0b2a6c5..cd66fcab215 100644
--- a/processing/src/main/java/io/druid/query/aggregation/SimpleLongAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/SimpleLongAggregatorFactory.java
@@ -27,12 +27,13 @@
 import io.druid.segment.BaseLongColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 
+import javax.annotation.Nullable;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.List;
 import java.util.Objects;
 
-public abstract class SimpleLongAggregatorFactory extends AggregatorFactory
+public abstract class SimpleLongAggregatorFactory extends NullableAggregatorFactory<BaseLongColumnValueSelector>
 {
   protected final String name;
   protected final String fieldName;
@@ -93,7 +94,8 @@ public Comparator getComparator()
   }
 
   @Override
-  public Object finalizeComputation(Object object)
+  @Nullable
+  public Object finalizeComputation(@Nullable Object object)
   {
     return object;
   }
diff --git a/processing/src/main/java/io/druid/query/aggregation/cardinality/types/DoubleCardinalityAggregatorColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/aggregation/cardinality/types/DoubleCardinalityAggregatorColumnSelectorStrategy.java
index 47bf800c35a..b62c48aba1e 100644
--- a/processing/src/main/java/io/druid/query/aggregation/cardinality/types/DoubleCardinalityAggregatorColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/aggregation/cardinality/types/DoubleCardinalityAggregatorColumnSelectorStrategy.java
@@ -20,23 +20,33 @@
 package io.druid.query.aggregation.cardinality.types;
 
 import com.google.common.hash.Hasher;
+import io.druid.common.config.NullHandling;
 import io.druid.hll.HyperLogLogCollector;
 import io.druid.query.aggregation.cardinality.CardinalityAggregator;
 import io.druid.segment.BaseDoubleColumnValueSelector;
 
-
+/**
+ * If performance of this class appears to be a bottleneck for somebody,
+ * one simple way to improve it is to split it into two different classes,
+ * one that is used when {@link NullHandling#replaceWithDefault()} is false,
+ * and one - when it's true, moving this computation out of the tight loop
+ */
 public class DoubleCardinalityAggregatorColumnSelectorStrategy
     implements CardinalityAggregatorColumnSelectorStrategy<BaseDoubleColumnValueSelector>
 {
   @Override
-  public void hashRow(BaseDoubleColumnValueSelector dimSelector, Hasher hasher)
+  public void hashRow(BaseDoubleColumnValueSelector selector, Hasher hasher)
   {
-    hasher.putDouble(dimSelector.getDouble());
+    if (NullHandling.replaceWithDefault() || !selector.isNull()) {
+      hasher.putDouble(selector.getDouble());
+    }
   }
 
   @Override
-  public void hashValues(BaseDoubleColumnValueSelector dimSelector, HyperLogLogCollector collector)
+  public void hashValues(BaseDoubleColumnValueSelector selector, HyperLogLogCollector collector)
   {
-    collector.add(CardinalityAggregator.hashFn.hashLong(Double.doubleToLongBits(dimSelector.getDouble())).asBytes());
+    if (NullHandling.replaceWithDefault() || !selector.isNull()) {
+      collector.add(CardinalityAggregator.hashFn.hashLong(Double.doubleToLongBits(selector.getDouble())).asBytes());
+    }
   }
 }
diff --git a/processing/src/main/java/io/druid/query/aggregation/cardinality/types/FloatCardinalityAggregatorColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/aggregation/cardinality/types/FloatCardinalityAggregatorColumnSelectorStrategy.java
index d8865189669..de8160e3935 100644
--- a/processing/src/main/java/io/druid/query/aggregation/cardinality/types/FloatCardinalityAggregatorColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/aggregation/cardinality/types/FloatCardinalityAggregatorColumnSelectorStrategy.java
@@ -20,22 +20,33 @@
 package io.druid.query.aggregation.cardinality.types;
 
 import com.google.common.hash.Hasher;
+import io.druid.common.config.NullHandling;
 import io.druid.hll.HyperLogLogCollector;
 import io.druid.query.aggregation.cardinality.CardinalityAggregator;
 import io.druid.segment.BaseFloatColumnValueSelector;
 
+/**
+ * If performance of this class appears to be a bottleneck for somebody,
+ * one simple way to improve it is to split it into two different classes,
+ * one that is used when {@link NullHandling#replaceWithDefault()} is false,
+ * and one - when it's true, moving this computation out of the tight loop
+ */
 public class FloatCardinalityAggregatorColumnSelectorStrategy
     implements CardinalityAggregatorColumnSelectorStrategy<BaseFloatColumnValueSelector>
 {
   @Override
   public void hashRow(BaseFloatColumnValueSelector selector, Hasher hasher)
   {
-    hasher.putFloat(selector.getFloat());
+    if (NullHandling.replaceWithDefault() || !selector.isNull()) {
+      hasher.putFloat(selector.getFloat());
+    }
   }
 
   @Override
   public void hashValues(BaseFloatColumnValueSelector selector, HyperLogLogCollector collector)
   {
-    collector.add(CardinalityAggregator.hashFn.hashInt(Float.floatToIntBits(selector.getFloat())).asBytes());
+    if (NullHandling.replaceWithDefault() || !selector.isNull()) {
+      collector.add(CardinalityAggregator.hashFn.hashInt(Float.floatToIntBits(selector.getFloat())).asBytes());
+    }
   }
 }
diff --git a/processing/src/main/java/io/druid/query/aggregation/cardinality/types/LongCardinalityAggregatorColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/aggregation/cardinality/types/LongCardinalityAggregatorColumnSelectorStrategy.java
index b97e9103db1..a3d4e45177c 100644
--- a/processing/src/main/java/io/druid/query/aggregation/cardinality/types/LongCardinalityAggregatorColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/aggregation/cardinality/types/LongCardinalityAggregatorColumnSelectorStrategy.java
@@ -20,22 +20,33 @@
 package io.druid.query.aggregation.cardinality.types;
 
 import com.google.common.hash.Hasher;
+import io.druid.common.config.NullHandling;
 import io.druid.hll.HyperLogLogCollector;
 import io.druid.query.aggregation.cardinality.CardinalityAggregator;
 import io.druid.segment.BaseLongColumnValueSelector;
 
+/**
+ * If performance of this class appears to be a bottleneck for somebody,
+ * one simple way to improve it is to split it into two different classes,
+ * one that is used when {@link NullHandling#replaceWithDefault()} is false,
+ * and one - when it's true, moving this computation out of the tight loop
+ */
 public class LongCardinalityAggregatorColumnSelectorStrategy
     implements CardinalityAggregatorColumnSelectorStrategy<BaseLongColumnValueSelector>
 {
   @Override
-  public void hashRow(BaseLongColumnValueSelector dimSelector, Hasher hasher)
+  public void hashRow(BaseLongColumnValueSelector selector, Hasher hasher)
   {
-    hasher.putLong(dimSelector.getLong());
+    if (NullHandling.replaceWithDefault() || !selector.isNull()) {
+      hasher.putLong(selector.getLong());
+    }
   }
 
   @Override
-  public void hashValues(BaseLongColumnValueSelector dimSelector, HyperLogLogCollector collector)
+  public void hashValues(BaseLongColumnValueSelector selector, HyperLogLogCollector collector)
   {
-    collector.add(CardinalityAggregator.hashFn.hashLong(dimSelector.getLong()).asBytes());
+    if (NullHandling.replaceWithDefault() || !selector.isNull()) {
+      collector.add(CardinalityAggregator.hashFn.hashLong(selector.getLong()).asBytes());
+    }
   }
 }
diff --git a/processing/src/main/java/io/druid/query/aggregation/cardinality/types/StringCardinalityAggregatorColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/aggregation/cardinality/types/StringCardinalityAggregatorColumnSelectorStrategy.java
index 408d1cc1657..d7129470b5d 100644
--- a/processing/src/main/java/io/druid/query/aggregation/cardinality/types/StringCardinalityAggregatorColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/aggregation/cardinality/types/StringCardinalityAggregatorColumnSelectorStrategy.java
@@ -20,6 +20,7 @@
 package io.druid.query.aggregation.cardinality.types;
 
 import com.google.common.hash.Hasher;
+import io.druid.common.config.NullHandling;
 import io.druid.hll.HyperLogLogCollector;
 import io.druid.query.aggregation.cardinality.CardinalityAggregator;
 import io.druid.segment.DimensionSelector;
@@ -40,20 +41,34 @@ public void hashRow(DimensionSelector dimSelector, Hasher hasher)
     // nothing to add to hasher if size == 0, only handle size == 1 and size != 0 cases.
     if (size == 1) {
       final String value = dimSelector.lookupName(row.get(0));
-      hasher.putUnencodedChars(nullToSpecial(value));
+      if (NullHandling.replaceWithDefault() || value != null) {
+        hasher.putUnencodedChars(nullToSpecial(value));
+      }
     } else if (size != 0) {
+      boolean hasNonNullValue = false;
       final String[] values = new String[size];
       for (int i = 0; i < size; ++i) {
         final String value = dimSelector.lookupName(row.get(i));
+        // SQL standard spec does not count null values,
+        // Skip counting null values when we are not replacing null with default value.
+        // A special value for null in case null handling is configured to use empty string for null.
+        if (NullHandling.sqlCompatible() && !hasNonNullValue && value != null) {
+          hasNonNullValue = true;
+        }
         values[i] = nullToSpecial(value);
       }
-      // Values need to be sorted to ensure consistent multi-value ordering across different segments
-      Arrays.sort(values);
-      for (int i = 0; i < size; ++i) {
-        if (i != 0) {
-          hasher.putChar(CARDINALITY_AGG_SEPARATOR);
+      // SQL standard spec does not count null values,
+      // Skip counting null values when we are not replacing null with default value.
+      // A special value for null in case null handling is configured to use empty string for null.
+      if (NullHandling.replaceWithDefault() || hasNonNullValue) {
+        // Values need to be sorted to ensure consistent multi-value ordering across different segments
+        Arrays.sort(values);
+        for (int i = 0; i < size; ++i) {
+          if (i != 0) {
+            hasher.putChar(CARDINALITY_AGG_SEPARATOR);
+          }
+          hasher.putUnencodedChars(values[i]);
         }
-        hasher.putUnencodedChars(values[i]);
       }
     }
   }
@@ -65,7 +80,12 @@ public void hashValues(DimensionSelector dimSelector, HyperLogLogCollector colle
     for (int i = 0, rowSize = row.size(); i < rowSize; i++) {
       int index = row.get(i);
       final String value = dimSelector.lookupName(index);
-      collector.add(CardinalityAggregator.hashFn.hashUnencodedChars(nullToSpecial(value)).asBytes());
+      // SQL standard spec does not count null values,
+      // Skip counting null values when we are not replacing null with default value.
+      // A special value for null in case null handling is configured to use empty string for null.
+      if (NullHandling.replaceWithDefault() || value != null) {
+        collector.add(CardinalityAggregator.hashFn.hashUnencodedChars(nullToSpecial(value)).asBytes());
+      }
     }
   }
 
diff --git a/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java
index 7da5379199a..240b06e694c 100644
--- a/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java
@@ -32,11 +32,13 @@
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.AggregatorUtil;
 import io.druid.query.aggregation.BufferAggregator;
+import io.druid.query.aggregation.NullableAggregatorFactory;
 import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
-import io.druid.segment.BaseObjectColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
+import io.druid.segment.ColumnValueSelector;
 import io.druid.segment.column.Column;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Arrays;
 import java.util.Collections;
@@ -45,7 +47,7 @@
 import java.util.Map;
 import java.util.Objects;
 
-public class DoubleFirstAggregatorFactory extends AggregatorFactory
+public class DoubleFirstAggregatorFactory extends NullableAggregatorFactory<ColumnValueSelector>
 {
   public static final Comparator VALUE_COMPARATOR = (o1, o2) -> Doubles.compare(
       ((SerializablePair<Long, Double>) o1).rhs,
@@ -76,20 +78,26 @@ public DoubleFirstAggregatorFactory(
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected ColumnValueSelector selector(ColumnSelectorFactory metricFactory)
+  {
+    return metricFactory.makeColumnValueSelector(fieldName);
+  }
+
+  @Override
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new DoubleFirstAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new DoubleFirstBufferAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
@@ -100,8 +108,15 @@ public Comparator getComparator()
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
   {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return TIME_COMPARATOR.compare(lhs, rhs) <= 0 ? lhs : rhs;
   }
 
@@ -117,9 +132,8 @@ public AggregatorFactory getCombiningFactory()
     return new DoubleFirstAggregatorFactory(name, name)
     {
       @Override
-      public Aggregator factorize(ColumnSelectorFactory metricFactory)
+      public Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new DoubleFirstAggregator(null, null)
         {
           @Override
@@ -135,9 +149,8 @@ public void aggregate()
       }
 
       @Override
-      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new DoubleFirstBufferAggregator(null, null)
         {
           @Override
@@ -175,9 +188,10 @@ public Object deserialize(Object object)
   }
 
   @Override
-  public Object finalizeComputation(Object object)
+  @Nullable
+  public Object finalizeComputation(@Nullable Object object)
   {
-    return ((SerializablePair<Long, Double>) object).rhs;
+    return object == null ? null : ((SerializablePair<Long, Double>) object).rhs;
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/query/aggregation/first/FloatFirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/FloatFirstAggregatorFactory.java
index 1ae510b675a..2f17b3d0cfd 100644
--- a/processing/src/main/java/io/druid/query/aggregation/first/FloatFirstAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/first/FloatFirstAggregatorFactory.java
@@ -32,11 +32,13 @@
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.AggregatorUtil;
 import io.druid.query.aggregation.BufferAggregator;
+import io.druid.query.aggregation.NullableAggregatorFactory;
 import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
-import io.druid.segment.BaseObjectColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
+import io.druid.segment.ColumnValueSelector;
 import io.druid.segment.column.Column;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Arrays;
 import java.util.Collections;
@@ -45,7 +47,7 @@
 import java.util.Map;
 import java.util.Objects;
 
-public class FloatFirstAggregatorFactory extends AggregatorFactory
+public class FloatFirstAggregatorFactory extends NullableAggregatorFactory<ColumnValueSelector>
 {
   public static final Comparator VALUE_COMPARATOR = (o1, o2) -> Doubles.compare(
       ((SerializablePair<Long, Float>) o1).rhs,
@@ -74,20 +76,26 @@ public FloatFirstAggregatorFactory(
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected ColumnValueSelector selector(ColumnSelectorFactory metricFactory)
+  {
+    return metricFactory.makeColumnValueSelector(fieldName);
+  }
+
+  @Override
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new FloatFirstAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new FloatFirstBufferAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
@@ -98,8 +106,15 @@ public Comparator getComparator()
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
   {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return TIME_COMPARATOR.compare(lhs, rhs) <= 0 ? lhs : rhs;
   }
 
@@ -115,9 +130,8 @@ public AggregatorFactory getCombiningFactory()
     return new FloatFirstAggregatorFactory(name, name)
     {
       @Override
-      public Aggregator factorize(ColumnSelectorFactory metricFactory)
+      public Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new FloatFirstAggregator(null, null)
         {
           @Override
@@ -133,9 +147,8 @@ public void aggregate()
       }
 
       @Override
-      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new FloatFirstBufferAggregator(null, null)
         {
           @Override
@@ -173,9 +186,10 @@ public Object deserialize(Object object)
   }
 
   @Override
-  public Object finalizeComputation(Object object)
+  @Nullable
+  public Object finalizeComputation(@Nullable Object object)
   {
-    return ((SerializablePair<Long, Float>) object).rhs;
+    return object == null ? null : ((SerializablePair<Long, Float>) object).rhs;
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java
index 32b575f4558..099da2577c4 100644
--- a/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java
@@ -31,11 +31,13 @@
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.AggregatorUtil;
 import io.druid.query.aggregation.BufferAggregator;
+import io.druid.query.aggregation.NullableAggregatorFactory;
 import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
-import io.druid.segment.BaseObjectColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
+import io.druid.segment.ColumnValueSelector;
 import io.druid.segment.column.Column;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Arrays;
 import java.util.Collections;
@@ -43,7 +45,7 @@
 import java.util.List;
 import java.util.Map;
 
-public class LongFirstAggregatorFactory extends AggregatorFactory
+public class LongFirstAggregatorFactory extends NullableAggregatorFactory<ColumnValueSelector>
 {
   public static final Comparator VALUE_COMPARATOR = (o1, o2) -> Longs.compare(
       ((SerializablePair<Long, Long>) o1).rhs,
@@ -67,20 +69,26 @@ public LongFirstAggregatorFactory(
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected ColumnValueSelector selector(ColumnSelectorFactory metricFactory)
+  {
+    return metricFactory.makeColumnValueSelector(fieldName);
+  }
+
+  @Override
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new LongFirstAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new LongFirstBufferAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
@@ -91,8 +99,15 @@ public Comparator getComparator()
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
   {
+    if (lhs == null) {
+      return rhs;
+    }
+    if (rhs == null) {
+      return lhs;
+    }
     return DoubleFirstAggregatorFactory.TIME_COMPARATOR.compare(lhs, rhs) <= 0 ? lhs : rhs;
   }
 
@@ -108,9 +123,8 @@ public AggregatorFactory getCombiningFactory()
     return new LongFirstAggregatorFactory(name, name)
     {
       @Override
-      public Aggregator factorize(ColumnSelectorFactory metricFactory)
+      public Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new LongFirstAggregator(null, null)
         {
           @Override
@@ -126,9 +140,8 @@ public void aggregate()
       }
 
       @Override
-      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new LongFirstBufferAggregator(null, null)
         {
           @Override
@@ -166,9 +179,10 @@ public Object deserialize(Object object)
   }
 
   @Override
-  public Object finalizeComputation(Object object)
+  @Nullable
+  public Object finalizeComputation(@Nullable Object object)
   {
-    return ((SerializablePair<Long, Long>) object).rhs;
+    return object == null ? null : ((SerializablePair<Long, Long>) object).rhs;
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/query/aggregation/first/StringFirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/StringFirstAggregatorFactory.java
index 187e8915481..71439596e25 100644
--- a/processing/src/main/java/io/druid/query/aggregation/first/StringFirstAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/first/StringFirstAggregatorFactory.java
@@ -29,8 +29,10 @@
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.AggregatorUtil;
 import io.druid.query.aggregation.BufferAggregator;
+import io.druid.query.aggregation.NullableAggregatorFactory;
 import io.druid.query.aggregation.SerializablePairLongString;
 import io.druid.query.cache.CacheKeyBuilder;
+import io.druid.segment.BaseObjectColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 import io.druid.segment.column.Column;
 
@@ -42,7 +44,7 @@
 import java.util.Objects;
 
 @JsonTypeName("stringFirst")
-public class StringFirstAggregatorFactory extends AggregatorFactory
+public class StringFirstAggregatorFactory extends NullableAggregatorFactory<BaseObjectColumnValueSelector>
 {
   public static final int DEFAULT_MAX_STRING_SIZE = 1024;
 
@@ -107,21 +109,27 @@ public StringFirstAggregatorFactory(
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected BaseObjectColumnValueSelector selector(ColumnSelectorFactory metricFactory)
+  {
+    return metricFactory.makeColumnValueSelector(fieldName);
+  }
+
+  @Override
+  public Aggregator factorize(ColumnSelectorFactory metricFactory, BaseObjectColumnValueSelector selector)
   {
     return new StringFirstAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName),
+        selector,
         maxStringBytes
     );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, BaseObjectColumnValueSelector selector)
   {
     return new StringFirstBufferAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName),
+        selector,
         maxStringBytes
     );
   }
diff --git a/processing/src/main/java/io/druid/query/aggregation/first/StringFirstFoldingAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/StringFirstFoldingAggregatorFactory.java
index b268bafa5ae..1b129f59279 100644
--- a/processing/src/main/java/io/druid/query/aggregation/first/StringFirstFoldingAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/first/StringFirstFoldingAggregatorFactory.java
@@ -44,9 +44,8 @@ public StringFirstFoldingAggregatorFactory(
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  public Aggregator factorize(ColumnSelectorFactory metricFactory, BaseObjectColumnValueSelector selector)
   {
-    final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(getName());
     return new StringFirstAggregator(null, null, maxStringBytes)
     {
       @Override
@@ -62,9 +61,8 @@ public void aggregate()
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, BaseObjectColumnValueSelector selector)
   {
-    final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(getName());
     return new StringFirstBufferAggregator(null, null, maxStringBytes)
     {
       @Override
diff --git a/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java
index 9cf9bf15580..e0854312bda 100644
--- a/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java
@@ -30,13 +30,15 @@
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.AggregatorUtil;
 import io.druid.query.aggregation.BufferAggregator;
+import io.druid.query.aggregation.NullableAggregatorFactory;
 import io.druid.query.aggregation.first.DoubleFirstAggregatorFactory;
 import io.druid.query.aggregation.first.LongFirstAggregatorFactory;
 import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
-import io.druid.segment.BaseObjectColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
+import io.druid.segment.ColumnValueSelector;
 import io.druid.segment.column.Column;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Arrays;
 import java.util.Collections;
@@ -45,7 +47,7 @@
 import java.util.Map;
 import java.util.Objects;
 
-public class DoubleLastAggregatorFactory extends AggregatorFactory
+public class DoubleLastAggregatorFactory extends NullableAggregatorFactory<ColumnValueSelector>
 {
 
   private final String fieldName;
@@ -66,20 +68,26 @@ public DoubleLastAggregatorFactory(
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected ColumnValueSelector selector(ColumnSelectorFactory metricFactory)
+  {
+    return metricFactory.makeColumnValueSelector(fieldName);
+  }
+
+  @Override
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new DoubleLastAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new DoubleLastBufferAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
@@ -90,8 +98,15 @@ public Comparator getComparator()
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
   {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return DoubleFirstAggregatorFactory.TIME_COMPARATOR.compare(lhs, rhs) > 0 ? lhs : rhs;
   }
 
@@ -107,9 +122,8 @@ public AggregatorFactory getCombiningFactory()
     return new DoubleLastAggregatorFactory(name, name)
     {
       @Override
-      public Aggregator factorize(ColumnSelectorFactory metricFactory)
+      public Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new DoubleLastAggregator(null, null)
         {
           @Override
@@ -125,9 +139,8 @@ public void aggregate()
       }
 
       @Override
-      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new DoubleLastBufferAggregator(null, null)
         {
           @Override
@@ -165,9 +178,10 @@ public Object deserialize(Object object)
   }
 
   @Override
-  public Object finalizeComputation(Object object)
+  @Nullable
+  public Object finalizeComputation(@Nullable Object object)
   {
-    return ((SerializablePair<Long, Double>) object).rhs;
+    return object == null ? null : ((SerializablePair<Long, Double>) object).rhs;
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/query/aggregation/last/FloatLastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/FloatLastAggregatorFactory.java
index e06c3fcc842..c87804e224d 100644
--- a/processing/src/main/java/io/druid/query/aggregation/last/FloatLastAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/last/FloatLastAggregatorFactory.java
@@ -30,13 +30,15 @@
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.AggregatorUtil;
 import io.druid.query.aggregation.BufferAggregator;
+import io.druid.query.aggregation.NullableAggregatorFactory;
 import io.druid.query.aggregation.first.FloatFirstAggregatorFactory;
 import io.druid.query.aggregation.first.LongFirstAggregatorFactory;
 import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
-import io.druid.segment.BaseObjectColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
+import io.druid.segment.ColumnValueSelector;
 import io.druid.segment.column.Column;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Arrays;
 import java.util.Collections;
@@ -45,7 +47,7 @@
 import java.util.Map;
 import java.util.Objects;
 
-public class FloatLastAggregatorFactory extends AggregatorFactory
+public class FloatLastAggregatorFactory extends NullableAggregatorFactory<ColumnValueSelector>
 {
 
   private final String fieldName;
@@ -64,20 +66,26 @@ public FloatLastAggregatorFactory(
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected ColumnValueSelector selector(ColumnSelectorFactory metricFactory)
+  {
+    return metricFactory.makeColumnValueSelector(fieldName);
+  }
+
+  @Override
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new FloatLastAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new FloatLastBufferAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
@@ -88,8 +96,15 @@ public Comparator getComparator()
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
   {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return FloatFirstAggregatorFactory.TIME_COMPARATOR.compare(lhs, rhs) > 0 ? lhs : rhs;
   }
 
@@ -105,9 +120,8 @@ public AggregatorFactory getCombiningFactory()
     return new FloatLastAggregatorFactory(name, name)
     {
       @Override
-      public Aggregator factorize(ColumnSelectorFactory metricFactory)
+      public Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new FloatLastAggregator(null, null)
         {
           @Override
@@ -123,9 +137,8 @@ public void aggregate()
       }
 
       @Override
-      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new FloatLastBufferAggregator(null, null)
         {
           @Override
@@ -163,9 +176,10 @@ public Object deserialize(Object object)
   }
 
   @Override
-  public Object finalizeComputation(Object object)
+  @Nullable
+  public Object finalizeComputation(@Nullable Object object)
   {
-    return ((SerializablePair<Long, Float>) object).rhs;
+    return object == null ? null : ((SerializablePair<Long, Float>) object).rhs;
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java
index dc186a9f613..5101e3ceff8 100644
--- a/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java
@@ -30,13 +30,15 @@
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.AggregatorUtil;
 import io.druid.query.aggregation.BufferAggregator;
+import io.druid.query.aggregation.NullableAggregatorFactory;
 import io.druid.query.aggregation.first.DoubleFirstAggregatorFactory;
 import io.druid.query.aggregation.first.LongFirstAggregatorFactory;
 import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
-import io.druid.segment.BaseObjectColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
+import io.druid.segment.ColumnValueSelector;
 import io.druid.segment.column.Column;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Arrays;
 import java.util.Collections;
@@ -45,7 +47,7 @@
 import java.util.Map;
 import java.util.Objects;
 
-public class LongLastAggregatorFactory extends AggregatorFactory
+public class LongLastAggregatorFactory extends NullableAggregatorFactory<ColumnValueSelector>
 {
   private final String fieldName;
   private final String name;
@@ -63,20 +65,26 @@ public LongLastAggregatorFactory(
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected ColumnValueSelector selector(ColumnSelectorFactory metricFactory)
+  {
+    return metricFactory.makeColumnValueSelector(fieldName);
+  }
+
+  @Override
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new LongLastAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new LongLastBufferAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
@@ -87,8 +95,15 @@ public Comparator getComparator()
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
   {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return DoubleFirstAggregatorFactory.TIME_COMPARATOR.compare(lhs, rhs) > 0 ? lhs : rhs;
   }
 
@@ -104,9 +119,8 @@ public AggregatorFactory getCombiningFactory()
     return new LongLastAggregatorFactory(name, name)
     {
       @Override
-      public Aggregator factorize(ColumnSelectorFactory metricFactory)
+      public Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new LongLastAggregator(null, null)
         {
           @Override
@@ -122,9 +136,8 @@ public void aggregate()
       }
 
       @Override
-      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new LongLastBufferAggregator(null, null)
         {
           @Override
@@ -162,9 +175,10 @@ public Object deserialize(Object object)
   }
 
   @Override
-  public Object finalizeComputation(Object object)
+  @Nullable
+  public Object finalizeComputation(@Nullable Object object)
   {
-    return ((SerializablePair<Long, Long>) object).rhs;
+    return object == null ? null : ((SerializablePair<Long, Long>) object).rhs;
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/query/aggregation/last/StringLastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/StringLastAggregatorFactory.java
index cb4f3636612..25a9423be91 100644
--- a/processing/src/main/java/io/druid/query/aggregation/last/StringLastAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/last/StringLastAggregatorFactory.java
@@ -28,9 +28,11 @@
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.AggregatorUtil;
 import io.druid.query.aggregation.BufferAggregator;
+import io.druid.query.aggregation.NullableAggregatorFactory;
 import io.druid.query.aggregation.SerializablePairLongString;
 import io.druid.query.aggregation.first.StringFirstAggregatorFactory;
 import io.druid.query.cache.CacheKeyBuilder;
+import io.druid.segment.BaseObjectColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 import io.druid.segment.column.Column;
 
@@ -43,7 +45,7 @@
 
 
 @JsonTypeName("stringLast")
-public class StringLastAggregatorFactory extends AggregatorFactory
+public class StringLastAggregatorFactory extends NullableAggregatorFactory<BaseObjectColumnValueSelector>
 {
   private final String fieldName;
   private final String name;
@@ -66,21 +68,27 @@ public StringLastAggregatorFactory(
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected BaseObjectColumnValueSelector selector(ColumnSelectorFactory metricFactory)
+  {
+    return metricFactory.makeColumnValueSelector(fieldName);
+  }
+
+  @Override
+  public Aggregator factorize(ColumnSelectorFactory metricFactory, BaseObjectColumnValueSelector selector)
   {
     return new StringLastAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName),
+        selector,
         maxStringBytes
     );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, BaseObjectColumnValueSelector selector)
   {
     return new StringLastBufferAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName),
+        selector,
         maxStringBytes
     );
   }
diff --git a/processing/src/main/java/io/druid/query/aggregation/last/StringLastFoldingAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/StringLastFoldingAggregatorFactory.java
index 9bd6a64488e..73d1d8481d5 100644
--- a/processing/src/main/java/io/druid/query/aggregation/last/StringLastFoldingAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/last/StringLastFoldingAggregatorFactory.java
@@ -44,9 +44,8 @@ public StringLastFoldingAggregatorFactory(
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  public Aggregator factorize(ColumnSelectorFactory metricFactory, BaseObjectColumnValueSelector selector)
   {
-    final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(getName());
     return new StringLastAggregator(null, null, maxStringBytes)
     {
       @Override
@@ -62,9 +61,8 @@ public void aggregate()
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, BaseObjectColumnValueSelector selector)
   {
-    final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(getName());
     return new StringLastBufferAggregator(null, null, maxStringBytes)
     {
       @Override
diff --git a/processing/src/main/java/io/druid/query/aggregation/post/ArithmeticPostAggregator.java b/processing/src/main/java/io/druid/query/aggregation/post/ArithmeticPostAggregator.java
index 4ef0f499634..4eabedfa05c 100644
--- a/processing/src/main/java/io/druid/query/aggregation/post/ArithmeticPostAggregator.java
+++ b/processing/src/main/java/io/druid/query/aggregation/post/ArithmeticPostAggregator.java
@@ -24,6 +24,7 @@
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.IAE;
 import io.druid.query.Queries;
 import io.druid.query.aggregation.AggregatorFactory;
@@ -109,11 +110,21 @@ public Comparator getComparator()
   public Object compute(Map<String, Object> values)
   {
     Iterator<PostAggregator> fieldsIter = fields.iterator();
-    double retVal = 0.0;
+    Double retVal = NullHandling.defaultDoubleValue();
     if (fieldsIter.hasNext()) {
-      retVal = ((Number) fieldsIter.next().compute(values)).doubleValue();
+      Number nextVal = (Number) fieldsIter.next().compute(values);
+      if (nextVal == null) {
+        // As per SQL standard if any of the value is null, arithmetic operators will return null.
+        return null;
+      }
+      retVal = nextVal.doubleValue();
       while (fieldsIter.hasNext()) {
-        retVal = op.compute(retVal, ((Number) fieldsIter.next().compute(values)).doubleValue());
+        nextVal = (Number) fieldsIter.next().compute(values);
+        if (nextVal == null) {
+          // As per SQL standard if any of the value is null, arithmetic operators will return null.
+          return null;
+        }
+        retVal = op.compute(retVal, (nextVal).doubleValue());
       }
     }
     return retVal;
@@ -268,7 +279,7 @@ static Ops lookup(String fn)
     /**
      * Ensures the following order: numeric > NaN > Infinite.
      *
-     * The name may be referenced via Ordering.valueOf(String) in the constructor {@link
+     * The name may be referenced via {@link #valueOf(String)} in the constructor {@link
      * ArithmeticPostAggregator#ArithmeticPostAggregator(String, String, List, String)}.
      */
     @SuppressWarnings("unused")
diff --git a/processing/src/main/java/io/druid/query/aggregation/post/DoubleGreatestPostAggregator.java b/processing/src/main/java/io/druid/query/aggregation/post/DoubleGreatestPostAggregator.java
index bc2e8177061..32568e6d6f7 100644
--- a/processing/src/main/java/io/druid/query/aggregation/post/DoubleGreatestPostAggregator.java
+++ b/processing/src/main/java/io/druid/query/aggregation/post/DoubleGreatestPostAggregator.java
@@ -23,6 +23,7 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Sets;
+import io.druid.common.config.NullHandling;
 import io.druid.query.Queries;
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.PostAggregator;
@@ -37,14 +38,9 @@
 
 public class DoubleGreatestPostAggregator implements PostAggregator
 {
-  private static final Comparator COMPARATOR = new Comparator()
-  {
-    @Override
-    public int compare(Object o, Object o1)
-    {
-      return ((Double) o).compareTo((Double) o1);
-    }
-  };
+  private static final Comparator<Number> COMPARATOR = Comparator.nullsFirst(
+      Comparator.comparingDouble(Number::doubleValue)
+  );
 
   private final String name;
   private final List<PostAggregator> fields;
@@ -81,13 +77,15 @@ public Comparator getComparator()
   public Object compute(Map<String, Object> values)
   {
     Iterator<PostAggregator> fieldsIter = fields.iterator();
-    double retVal = Double.NEGATIVE_INFINITY;
-    if (fieldsIter.hasNext()) {
-      retVal = ((Number) fieldsIter.next().compute(values)).doubleValue();
-      while (fieldsIter.hasNext()) {
-        double other = ((Number) fieldsIter.next().compute(values)).doubleValue();
-        if (other > retVal) {
-          retVal = other;
+    Double retVal = NullHandling.replaceWithDefault() ? Double.NEGATIVE_INFINITY : null;
+    while (fieldsIter.hasNext()) {
+      Number nextVal = ((Number) fieldsIter.next().compute(values));
+      // Ignore NULL values and return the greatest out of non-null values.
+      if (nextVal != null && (retVal == null || COMPARATOR.compare(nextVal, retVal) > 0)) {
+        if (nextVal instanceof Double) {
+          retVal = (Double) nextVal;
+        } else {
+          retVal = nextVal.doubleValue();
         }
       }
     }
diff --git a/processing/src/main/java/io/druid/query/aggregation/post/DoubleLeastPostAggregator.java b/processing/src/main/java/io/druid/query/aggregation/post/DoubleLeastPostAggregator.java
index ec5f806a95a..0617a47db16 100644
--- a/processing/src/main/java/io/druid/query/aggregation/post/DoubleLeastPostAggregator.java
+++ b/processing/src/main/java/io/druid/query/aggregation/post/DoubleLeastPostAggregator.java
@@ -23,6 +23,7 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Sets;
+import io.druid.common.config.NullHandling;
 import io.druid.query.Queries;
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.PostAggregator;
@@ -37,14 +38,9 @@
 
 public class DoubleLeastPostAggregator implements PostAggregator
 {
-  private static final Comparator COMPARATOR = new Comparator()
-  {
-    @Override
-    public int compare(Object o, Object o1)
-    {
-      return ((Double) o).compareTo((Double) o1);
-    }
-  };
+  private static final Comparator<Number> COMPARATOR = Comparator.nullsLast(
+      Comparator.comparingDouble(Number::doubleValue)
+  );
 
   private final String name;
   private final List<PostAggregator> fields;
@@ -81,13 +77,15 @@ public Comparator getComparator()
   public Object compute(Map<String, Object> values)
   {
     Iterator<PostAggregator> fieldsIter = fields.iterator();
-    double retVal = Double.POSITIVE_INFINITY;
-    if (fieldsIter.hasNext()) {
-      retVal = ((Number) fieldsIter.next().compute(values)).doubleValue();
-      while (fieldsIter.hasNext()) {
-        double other = ((Number) fieldsIter.next().compute(values)).doubleValue();
-        if (other < retVal) {
-          retVal = other;
+    Double retVal = NullHandling.replaceWithDefault() ? Double.POSITIVE_INFINITY : null;
+    while (fieldsIter.hasNext()) {
+      Number nextVal = ((Number) fieldsIter.next().compute(values));
+      // Ignore NULL values and return the greatest out of non-null values.
+      if (nextVal != null && (retVal == null || COMPARATOR.compare(nextVal, retVal) < 0)) {
+        if (nextVal instanceof Double) {
+          retVal = (Double) nextVal;
+        } else {
+          retVal = nextVal.doubleValue();
         }
       }
     }
diff --git a/processing/src/main/java/io/druid/query/aggregation/post/LongGreatestPostAggregator.java b/processing/src/main/java/io/druid/query/aggregation/post/LongGreatestPostAggregator.java
index 06e25b90f09..5f38b4e7a28 100644
--- a/processing/src/main/java/io/druid/query/aggregation/post/LongGreatestPostAggregator.java
+++ b/processing/src/main/java/io/druid/query/aggregation/post/LongGreatestPostAggregator.java
@@ -23,7 +23,7 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Sets;
-import com.google.common.primitives.Longs;
+import io.druid.common.config.NullHandling;
 import io.druid.query.Queries;
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.PostAggregator;
@@ -38,14 +38,9 @@
 
 public class LongGreatestPostAggregator implements PostAggregator
 {
-  private static final Comparator COMPARATOR = new Comparator()
-  {
-    @Override
-    public int compare(Object o, Object o1)
-    {
-      return Longs.compare(((Number) o).longValue(), ((Number) o1).longValue());
-    }
-  };
+  private static final Comparator<Number> COMPARATOR = Comparator.nullsFirst(
+      Comparator.comparingLong(Number::longValue)
+  );
 
   private final String name;
   private final List<PostAggregator> fields;
@@ -82,13 +77,15 @@ public Comparator getComparator()
   public Object compute(Map<String, Object> values)
   {
     Iterator<PostAggregator> fieldsIter = fields.iterator();
-    long retVal = Long.MIN_VALUE;
-    if (fieldsIter.hasNext()) {
-      retVal = ((Number) fieldsIter.next().compute(values)).longValue();
-      while (fieldsIter.hasNext()) {
-        long other = ((Number) fieldsIter.next().compute(values)).longValue();
-        if (other > retVal) {
-          retVal = other;
+    Long retVal = NullHandling.replaceWithDefault() ? Long.MIN_VALUE : null;
+    while (fieldsIter.hasNext()) {
+      Number nextVal = ((Number) fieldsIter.next().compute(values));
+      // Ignore NULL values and return the greatest out of non-null values.
+      if (nextVal != null && (retVal == null || COMPARATOR.compare(nextVal, retVal) > 0)) {
+        if (nextVal instanceof Long) {
+          retVal = (Long) nextVal;
+        } else {
+          retVal = nextVal.longValue();
         }
       }
     }
diff --git a/processing/src/main/java/io/druid/query/aggregation/post/LongLeastPostAggregator.java b/processing/src/main/java/io/druid/query/aggregation/post/LongLeastPostAggregator.java
index 466995fbaad..e00df00a937 100644
--- a/processing/src/main/java/io/druid/query/aggregation/post/LongLeastPostAggregator.java
+++ b/processing/src/main/java/io/druid/query/aggregation/post/LongLeastPostAggregator.java
@@ -23,7 +23,7 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Sets;
-import com.google.common.primitives.Longs;
+import io.druid.common.config.NullHandling;
 import io.druid.query.Queries;
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.PostAggregator;
@@ -38,14 +38,9 @@
 
 public class LongLeastPostAggregator implements PostAggregator
 {
-  private static final Comparator COMPARATOR = new Comparator()
-  {
-    @Override
-    public int compare(Object o, Object o1)
-    {
-      return Longs.compare(((Number) o).longValue(), ((Number) o1).longValue());
-    }
-  };
+  private static final Comparator<Number> COMPARATOR = Comparator.nullsLast(
+      Comparator.comparingLong(Number::longValue)
+  );
 
   private final String name;
   private final List<PostAggregator> fields;
@@ -82,13 +77,15 @@ public Comparator getComparator()
   public Object compute(Map<String, Object> values)
   {
     Iterator<PostAggregator> fieldsIter = fields.iterator();
-    long retVal = Long.MAX_VALUE;
-    if (fieldsIter.hasNext()) {
-      retVal = ((Number) fieldsIter.next().compute(values)).longValue();
-      while (fieldsIter.hasNext()) {
-        long other = ((Number) fieldsIter.next().compute(values)).longValue();
-        if (other < retVal) {
-          retVal = other;
+    Long retVal = NullHandling.replaceWithDefault() ? Long.MAX_VALUE : null;
+    while (fieldsIter.hasNext()) {
+      Number nextVal = ((Number) fieldsIter.next().compute(values));
+      // Ignore NULL values and return the greatest out of non-null values.
+      if (nextVal != null && (retVal == null || COMPARATOR.compare(nextVal, retVal) < 0)) {
+        if (nextVal instanceof Long) {
+          retVal = (Long) nextVal;
+        } else {
+          retVal = nextVal.longValue();
         }
       }
     }
diff --git a/processing/src/main/java/io/druid/query/dimension/ListFilteredDimensionSpec.java b/processing/src/main/java/io/druid/query/dimension/ListFilteredDimensionSpec.java
index 1d69092ac5b..f5c8714c26e 100644
--- a/processing/src/main/java/io/druid/query/dimension/ListFilteredDimensionSpec.java
+++ b/processing/src/main/java/io/druid/query/dimension/ListFilteredDimensionSpec.java
@@ -23,7 +23,7 @@
 import com.google.common.base.Preconditions;
 import com.google.common.base.Predicate;
 import com.google.common.base.Predicates;
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 import io.druid.query.filter.DimFilterUtils;
 import io.druid.segment.DimensionSelector;
@@ -106,7 +106,7 @@ private DimensionSelector filterWhiteList(DimensionSelector selector)
       }
     } else {
       for (int i = 0; i < selectorCardinality; i++) {
-        if (values.contains(Strings.nullToEmpty(selector.lookupName(i)))) {
+        if (values.contains(NullHandling.nullToEmptyIfNeeded(selector.lookupName(i)))) {
           forwardMapping.put(i, count);
           reverseMapping[count++] = i;
         }
@@ -137,7 +137,7 @@ public boolean apply(@Nullable String input)
     forwardMapping.defaultReturnValue(-1);
     final int[] reverseMapping = new int[maxPossibleFilteredCardinality];
     for (int i = 0; i < selectorCardinality; i++) {
-      if (!values.contains(Strings.nullToEmpty(selector.lookupName(i)))) {
+      if (!values.contains(NullHandling.nullToEmptyIfNeeded(selector.lookupName(i)))) {
         forwardMapping.put(i, count);
         reverseMapping[count++] = i;
       }
diff --git a/processing/src/main/java/io/druid/query/dimension/RegexFilteredDimensionSpec.java b/processing/src/main/java/io/druid/query/dimension/RegexFilteredDimensionSpec.java
index ad7fe665fa3..010719f4908 100644
--- a/processing/src/main/java/io/druid/query/dimension/RegexFilteredDimensionSpec.java
+++ b/processing/src/main/java/io/druid/query/dimension/RegexFilteredDimensionSpec.java
@@ -22,7 +22,7 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Predicate;
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 import io.druid.query.filter.DimFilterUtils;
 import io.druid.segment.DimensionSelector;
@@ -76,7 +76,7 @@ public DimensionSelector decorate(final DimensionSelector selector)
             @Override
             public boolean apply(@Nullable String input)
             {
-              return compiledRegex.matcher(Strings.nullToEmpty(input)).matches();
+              return compiledRegex.matcher(NullHandling.nullToEmptyIfNeeded(input)).matches();
             }
           }
       );
@@ -86,7 +86,8 @@ public boolean apply(@Nullable String input)
     final Int2IntOpenHashMap forwardMapping = new Int2IntOpenHashMap();
     forwardMapping.defaultReturnValue(-1);
     for (int i = 0; i < selectorCardinality; i++) {
-      if (compiledRegex.matcher(Strings.nullToEmpty(selector.lookupName(i))).matches()) {
+      String val = NullHandling.nullToEmptyIfNeeded(selector.lookupName(i));
+      if (val != null && compiledRegex.matcher(val).matches()) {
         forwardMapping.put(i, count++);
       }
     }
diff --git a/processing/src/main/java/io/druid/query/expression/ExprUtils.java b/processing/src/main/java/io/druid/query/expression/ExprUtils.java
index 57926813f58..f79ad2856f1 100644
--- a/processing/src/main/java/io/druid/query/expression/ExprUtils.java
+++ b/processing/src/main/java/io/druid/query/expression/ExprUtils.java
@@ -19,6 +19,7 @@
 
 package io.druid.query.expression;
 
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.DateTimes;
 import io.druid.java.util.common.IAE;
 import io.druid.java.util.common.granularity.PeriodGranularity;
@@ -73,7 +74,14 @@ public static PeriodGranularity toPeriodGranularity(
     } else {
       Chronology chronology = timeZone == null ? ISOChronology.getInstanceUTC() : ISOChronology.getInstance(timeZone);
       final Object value = originArg.eval(bindings).value();
-      origin = value != null ? new DateTime(value, chronology) : null;
+      if (value instanceof String && NullHandling.isNullOrEquivalent((String) value)) {
+        // We get a blank string here, when sql compatible null handling is enabled
+        // and expression contains empty string for for origin
+        // e.g timestamp_floor(\"__time\",'PT1M','','UTC')
+        origin = null;
+      } else {
+        origin = value != null ? new DateTime(value, chronology) : null;
+      }
     }
 
     return new PeriodGranularity(period, origin, timeZone);
diff --git a/processing/src/main/java/io/druid/query/expression/LikeExprMacro.java b/processing/src/main/java/io/druid/query/expression/LikeExprMacro.java
index 45433c66896..f13451d6d0c 100644
--- a/processing/src/main/java/io/druid/query/expression/LikeExprMacro.java
+++ b/processing/src/main/java/io/druid/query/expression/LikeExprMacro.java
@@ -19,7 +19,7 @@
 
 package io.druid.query.expression;
 
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.IAE;
 import io.druid.math.expr.Expr;
 import io.druid.math.expr.ExprEval;
@@ -63,7 +63,7 @@ public Expr apply(final List<Expr> args)
     }
 
     final LikeDimFilter.LikeMatcher likeMatcher = LikeDimFilter.LikeMatcher.from(
-        Strings.nullToEmpty((String) patternExpr.getLiteralValue()),
+        NullHandling.nullToEmptyIfNeeded((String) patternExpr.getLiteralValue()),
         escapeChar
     );
 
diff --git a/processing/src/main/java/io/druid/query/expression/RegexpExtractExprMacro.java b/processing/src/main/java/io/druid/query/expression/RegexpExtractExprMacro.java
index 575319c10cd..4f48b01d473 100644
--- a/processing/src/main/java/io/druid/query/expression/RegexpExtractExprMacro.java
+++ b/processing/src/main/java/io/druid/query/expression/RegexpExtractExprMacro.java
@@ -19,7 +19,7 @@
 
 package io.druid.query.expression;
 
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.IAE;
 import io.druid.math.expr.Expr;
 import io.druid.math.expr.ExprEval;
@@ -63,9 +63,10 @@ public Expr apply(final List<Expr> args)
       @Override
       public ExprEval eval(final ObjectBinding bindings)
       {
-        final Matcher matcher = pattern.matcher(Strings.nullToEmpty(arg.eval(bindings).asString()));
+        String s = arg.eval(bindings).asString();
+        final Matcher matcher = pattern.matcher(NullHandling.nullToEmptyIfNeeded(s));
         final String retVal = matcher.find() ? matcher.group(index) : null;
-        return ExprEval.of(Strings.emptyToNull(retVal));
+        return ExprEval.of(NullHandling.emptyToNullIfNeeded(retVal));
       }
 
       @Override
diff --git a/processing/src/main/java/io/druid/query/expression/TimestampCeilExprMacro.java b/processing/src/main/java/io/druid/query/expression/TimestampCeilExprMacro.java
index c34f4b39042..5ca01ffb5f2 100644
--- a/processing/src/main/java/io/druid/query/expression/TimestampCeilExprMacro.java
+++ b/processing/src/main/java/io/druid/query/expression/TimestampCeilExprMacro.java
@@ -67,6 +67,11 @@ public TimestampCeilExpr(final List<Expr> args)
     @Override
     public ExprEval eval(final ObjectBinding bindings)
     {
+      ExprEval eval = arg.eval(bindings);
+      if (eval.isNumericNull()) {
+        // Return null if the argument if null.
+        return ExprEval.of(null);
+      }
       return ExprEval.of(granularity.bucketEnd(DateTimes.utc(arg.eval(bindings).asLong())).getMillis());
     }
 
diff --git a/processing/src/main/java/io/druid/query/expression/TimestampExtractExprMacro.java b/processing/src/main/java/io/druid/query/expression/TimestampExtractExprMacro.java
index 5af980412e5..a865cc957c4 100644
--- a/processing/src/main/java/io/druid/query/expression/TimestampExtractExprMacro.java
+++ b/processing/src/main/java/io/druid/query/expression/TimestampExtractExprMacro.java
@@ -88,7 +88,12 @@ public Expr apply(final List<Expr> args)
       @Override
       public ExprEval eval(final ObjectBinding bindings)
       {
-        final DateTime dateTime = new DateTime(arg.eval(bindings).asLong(), chronology);
+        Object val = arg.eval(bindings).value();
+        if (val == null) {
+          // Return null if the argument if null.
+          return ExprEval.of(null);
+        }
+        final DateTime dateTime = new DateTime(val, chronology);
         switch (unit) {
           case EPOCH:
             return ExprEval.of(dateTime.getMillis() / 1000);
diff --git a/processing/src/main/java/io/druid/query/expression/TimestampFloorExprMacro.java b/processing/src/main/java/io/druid/query/expression/TimestampFloorExprMacro.java
index fdf827390da..54f29b99e8a 100644
--- a/processing/src/main/java/io/druid/query/expression/TimestampFloorExprMacro.java
+++ b/processing/src/main/java/io/druid/query/expression/TimestampFloorExprMacro.java
@@ -92,7 +92,12 @@ public PeriodGranularity getGranularity()
     @Override
     public ExprEval eval(final ObjectBinding bindings)
     {
-      return ExprEval.of(granularity.bucketStart(DateTimes.utc(arg.eval(bindings).asLong())).getMillis());
+      ExprEval eval = arg.eval(bindings);
+      if (eval.isNumericNull()) {
+        // Return null if the argument if null.
+        return ExprEval.of(null);
+      }
+      return ExprEval.of(granularity.bucketStart(DateTimes.utc(eval.asLong())).getMillis());
     }
 
     @Override
diff --git a/processing/src/main/java/io/druid/query/expression/TimestampFormatExprMacro.java b/processing/src/main/java/io/druid/query/expression/TimestampFormatExprMacro.java
index b460c65c738..3f4f80d5837 100644
--- a/processing/src/main/java/io/druid/query/expression/TimestampFormatExprMacro.java
+++ b/processing/src/main/java/io/druid/query/expression/TimestampFormatExprMacro.java
@@ -74,6 +74,11 @@ public Expr apply(final List<Expr> args)
       @Override
       public ExprEval eval(final ObjectBinding bindings)
       {
+        ExprEval eval = arg.eval(bindings);
+        if (eval.isNumericNull()) {
+          // Return null if the argument if null.
+          return ExprEval.of(null);
+        }
         return ExprEval.of(formatter.print(arg.eval(bindings).asLong()));
       }
 
diff --git a/processing/src/main/java/io/druid/query/expression/TrimExprMacro.java b/processing/src/main/java/io/druid/query/expression/TrimExprMacro.java
index 88521af40e8..c828c6dda36 100644
--- a/processing/src/main/java/io/druid/query/expression/TrimExprMacro.java
+++ b/processing/src/main/java/io/druid/query/expression/TrimExprMacro.java
@@ -113,7 +113,7 @@ public ExprEval eval(final ObjectBinding bindings)
     {
       final ExprEval stringEval = stringExpr.eval(bindings);
 
-      if (chars.length == 0 || stringEval.isNull()) {
+      if (chars.length == 0 || stringEval.value() == null) {
         return stringEval;
       }
 
@@ -176,13 +176,13 @@ public ExprEval eval(final ObjectBinding bindings)
     {
       final ExprEval stringEval = stringExpr.eval(bindings);
 
-      if (stringEval.isNull()) {
+      if (stringEval.value() == null) {
         return stringEval;
       }
 
       final ExprEval charsEval = charsExpr.eval(bindings);
 
-      if (charsEval.isNull()) {
+      if (charsEval.value() == null) {
         return stringEval;
       }
 
diff --git a/processing/src/main/java/io/druid/query/extraction/FunctionalExtraction.java b/processing/src/main/java/io/druid/query/extraction/FunctionalExtraction.java
index eb1a8fa0edc..2d4d989559a 100644
--- a/processing/src/main/java/io/druid/query/extraction/FunctionalExtraction.java
+++ b/processing/src/main/java/io/druid/query/extraction/FunctionalExtraction.java
@@ -21,7 +21,7 @@
 
 import com.google.common.base.Function;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 
 import javax.annotation.Nullable;
 
@@ -52,9 +52,9 @@ public FunctionalExtraction(
   )
   {
     this.retainMissingValue = retainMissingValue;
-    this.replaceMissingValueWith = Strings.emptyToNull(replaceMissingValueWith);
+    this.replaceMissingValueWith = NullHandling.emptyToNullIfNeeded(replaceMissingValueWith);
     Preconditions.checkArgument(
-        !(this.retainMissingValue && !Strings.isNullOrEmpty(this.replaceMissingValueWith)),
+        !(this.retainMissingValue && !(this.replaceMissingValueWith == null)),
         "Cannot specify a [replaceMissingValueWith] and set [retainMissingValue] to true"
     );
 
@@ -69,7 +69,7 @@ public FunctionalExtraction(
         public String apply(@Nullable String dimValue)
         {
           final String retval = extractionFunction.apply(dimValue);
-          return Strings.isNullOrEmpty(retval) ? Strings.emptyToNull(dimValue) : retval;
+          return NullHandling.isNullOrEquivalent(retval) ? NullHandling.emptyToNullIfNeeded(dimValue) : retval;
         }
       };
     } else {
@@ -79,8 +79,10 @@ public String apply(@Nullable String dimValue)
         @Override
         public String apply(@Nullable String dimValue)
         {
-          final String retval = extractionFunction.apply(dimValue);
-          return Strings.isNullOrEmpty(retval) ? FunctionalExtraction.this.replaceMissingValueWith : retval;
+          final String retval = NullHandling.emptyToNullIfNeeded(extractionFunction.apply(dimValue));
+          return retval == null
+                 ? FunctionalExtraction.this.replaceMissingValueWith
+                 : retval;
         }
       };
     }
diff --git a/processing/src/main/java/io/druid/query/extraction/IdentityExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/IdentityExtractionFn.java
index 35d40e71564..e139361547b 100644
--- a/processing/src/main/java/io/druid/query/extraction/IdentityExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/extraction/IdentityExtractionFn.java
@@ -19,7 +19,7 @@
 
 package io.druid.query.extraction;
 
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 
 import javax.annotation.Nullable;
 
@@ -42,14 +42,14 @@ private IdentityExtractionFn()
   @Nullable
   public String apply(@Nullable Object value)
   {
-    return value == null ? null : Strings.emptyToNull(value.toString());
+    return value == null ? null : NullHandling.emptyToNullIfNeeded(value.toString());
   }
 
   @Override
   @Nullable
   public String apply(@Nullable String value)
   {
-    return Strings.emptyToNull(value);
+    return NullHandling.emptyToNullIfNeeded(value);
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/query/extraction/JavaScriptExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/JavaScriptExtractionFn.java
index 47424fce919..d006ead88a8 100644
--- a/processing/src/main/java/io/druid/query/extraction/JavaScriptExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/extraction/JavaScriptExtractionFn.java
@@ -24,7 +24,7 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Function;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 import io.druid.js.JavaScriptConfig;
 import org.mozilla.javascript.Context;
@@ -113,7 +113,7 @@ public boolean isInjective()
   public String apply(@Nullable Object value)
   {
     checkAndCompileScript();
-    return Strings.emptyToNull(fn.apply(value));
+    return NullHandling.emptyToNullIfNeeded(fn.apply(value));
   }
 
   /**
@@ -139,7 +139,7 @@ private void checkAndCompileScript()
   @Nullable
   public String apply(@Nullable String value)
   {
-    return this.apply((Object) Strings.emptyToNull(value));
+    return this.apply((Object) NullHandling.emptyToNullIfNeeded(value));
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/query/extraction/LowerExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/LowerExtractionFn.java
index 109d5f79242..831aff75edf 100644
--- a/processing/src/main/java/io/druid/query/extraction/LowerExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/extraction/LowerExtractionFn.java
@@ -21,7 +21,7 @@
 
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonTypeName;
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 
 import javax.annotation.Nullable;
@@ -52,7 +52,7 @@ public LowerExtractionFn(@JsonProperty("locale") String localeString)
   @Override
   public String apply(@Nullable String key)
   {
-    if (Strings.isNullOrEmpty(key)) {
+    if (NullHandling.isNullOrEquivalent(key)) {
       return null;
     }
     return key.toLowerCase(locale);
@@ -73,7 +73,7 @@ public ExtractionType getExtractionType()
   @Override
   public byte[] getCacheKey()
   {
-    byte[] localeBytes = StringUtils.toUtf8(Strings.nullToEmpty(localeString));
+    byte[] localeBytes = StringUtils.toUtf8(StringUtils.nullToEmptyNonDruidDataString(localeString));
     return ByteBuffer.allocate(2 + localeBytes.length)
                      .put(ExtractionCacheHelper.CACHE_TYPE_ID_LOWER)
                      .put((byte) 0XFF)
diff --git a/processing/src/main/java/io/druid/query/extraction/MapLookupExtractor.java b/processing/src/main/java/io/druid/query/extraction/MapLookupExtractor.java
index f2ec3474937..a90db2990d7 100644
--- a/processing/src/main/java/io/druid/query/extraction/MapLookupExtractor.java
+++ b/processing/src/main/java/io/druid/query/extraction/MapLookupExtractor.java
@@ -23,21 +23,20 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonTypeName;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Predicate;
 import com.google.common.base.Strings;
 import com.google.common.base.Throwables;
 import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 import io.druid.query.lookup.LookupExtractor;
 
 import javax.annotation.Nullable;
-import javax.validation.constraints.NotNull;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
+import java.util.Collections;
 import java.util.List;
 import java.util.Map;
+import java.util.stream.Collectors;
 
 @JsonTypeName("map")
 public class MapLookupExtractor extends LookupExtractor
@@ -64,22 +63,32 @@ public MapLookupExtractor(
 
   @Nullable
   @Override
-  public String apply(@NotNull String val)
+  public String apply(@Nullable String key)
   {
-    return map.get(val);
+    String keyEquivalent = NullHandling.nullToEmptyIfNeeded(key);
+    if (keyEquivalent == null) {
+      // keyEquivalent is null only for SQL Compatible Null Behavior
+      // otherwise null will be replaced with empty string in nullToEmptyIfNeeded above.
+      return null;
+    }
+    return NullHandling.emptyToNullIfNeeded(map.get(keyEquivalent));
   }
 
   @Override
-  public List<String> unapply(final String value)
+  public List<String> unapply(@Nullable final String value)
   {
-    return Lists.newArrayList(Maps.filterKeys(map, new Predicate<String>()
-    {
-      @Override public boolean apply(@Nullable String key)
-      {
-        return map.get(key).equals(Strings.nullToEmpty(value));
-      }
-    }).keySet());
-
+    String valueToLookup = NullHandling.nullToEmptyIfNeeded(value);
+    if (valueToLookup == null) {
+      // valueToLookup is null only for SQL Compatible Null Behavior
+      // otherwise null will be replaced with empty string in nullToEmptyIfNeeded above.
+      // null value maps to empty list when SQL Compatible
+      return Collections.emptyList();
+    }
+    return map.entrySet()
+              .stream()
+              .filter(entry -> entry.getValue().equals(valueToLookup))
+              .map(entry -> entry.getKey())
+              .collect(Collectors.toList());
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/query/extraction/MatchingDimExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/MatchingDimExtractionFn.java
index aa37175a616..82657f2670d 100644
--- a/processing/src/main/java/io/druid/query/extraction/MatchingDimExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/extraction/MatchingDimExtractionFn.java
@@ -22,7 +22,7 @@
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 
 import javax.annotation.Nullable;
@@ -62,7 +62,7 @@ public MatchingDimExtractionFn(
   @Override
   public String apply(@Nullable String dimValue)
   {
-    if (Strings.isNullOrEmpty(dimValue)) {
+    if (NullHandling.isNullOrEquivalent(dimValue)) {
       // We'd return null whether or not the pattern matched
       return null;
     }
diff --git a/processing/src/main/java/io/druid/query/extraction/RegexDimExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/RegexDimExtractionFn.java
index 4a1549ba52e..79977ffb7d6 100644
--- a/processing/src/main/java/io/druid/query/extraction/RegexDimExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/extraction/RegexDimExtractionFn.java
@@ -22,8 +22,8 @@
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.common.primitives.Ints;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 
 import javax.annotation.Nullable;
@@ -106,13 +106,14 @@ public RegexDimExtractionFn(
   public String apply(@Nullable String dimValue)
   {
     final String retVal;
-    final Matcher matcher = pattern.matcher(Strings.nullToEmpty(dimValue));
-    if (matcher.find()) {
+    String val = NullHandling.nullToEmptyIfNeeded(dimValue);
+    final Matcher matcher = val == null ? null : pattern.matcher(val);
+    if (matcher != null && matcher.find()) {
       retVal = matcher.group(index);
     } else {
       retVal = replaceMissingValue ? replaceMissingValueWith : dimValue;
     }
-    return Strings.emptyToNull(retVal);
+    return NullHandling.emptyToNullIfNeeded(retVal);
   }
 
   @JsonProperty("expr")
diff --git a/processing/src/main/java/io/druid/query/extraction/SearchQuerySpecDimExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/SearchQuerySpecDimExtractionFn.java
index 51c742d0199..4c5555d370a 100644
--- a/processing/src/main/java/io/druid/query/extraction/SearchQuerySpecDimExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/extraction/SearchQuerySpecDimExtractionFn.java
@@ -22,7 +22,7 @@
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.query.search.SearchQuerySpec;
 
 import javax.annotation.Nullable;
@@ -64,7 +64,7 @@ public SearchQuerySpec getSearchQuerySpec()
   @Override
   public String apply(@Nullable String dimValue)
   {
-    return searchQuerySpec.accept(dimValue) ? Strings.emptyToNull(dimValue) : null;
+    return searchQuerySpec.accept(dimValue) ? NullHandling.emptyToNullIfNeeded(dimValue) : null;
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/query/extraction/StringFormatExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/StringFormatExtractionFn.java
index ab00338aa98..1bc7ca3fdd5 100644
--- a/processing/src/main/java/io/druid/query/extraction/StringFormatExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/extraction/StringFormatExtractionFn.java
@@ -107,7 +107,7 @@ public String apply(@Nullable String value)
         value = "";
       }
     }
-    return Strings.emptyToNull(StringUtils.format(format, value));
+    return io.druid.common.config.NullHandling.emptyToNullIfNeeded(StringUtils.format(format, value));
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/query/extraction/StrlenExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/StrlenExtractionFn.java
index 395afd7cc84..063441485b0 100644
--- a/processing/src/main/java/io/druid/query/extraction/StrlenExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/extraction/StrlenExtractionFn.java
@@ -20,6 +20,7 @@
 package io.druid.query.extraction;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
+import io.druid.common.config.NullHandling;
 
 import javax.annotation.Nullable;
 
@@ -38,8 +39,12 @@ public static StrlenExtractionFn instance()
   }
 
   @Override
+  @Nullable
   public String apply(@Nullable String value)
   {
+    if (NullHandling.sqlCompatible() && value == null) {
+      return null;
+    }
     return String.valueOf(value == null ? 0 : value.length());
   }
 
diff --git a/processing/src/main/java/io/druid/query/extraction/SubstringDimExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/SubstringDimExtractionFn.java
index 710a4352365..5813ceb705c 100644
--- a/processing/src/main/java/io/druid/query/extraction/SubstringDimExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/extraction/SubstringDimExtractionFn.java
@@ -22,7 +22,7 @@
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 
 import javax.annotation.Nullable;
@@ -63,7 +63,7 @@ public SubstringDimExtractionFn(
   @Override
   public String apply(@Nullable String dimValue)
   {
-    if (Strings.isNullOrEmpty(dimValue)) {
+    if (NullHandling.isNullOrEquivalent(dimValue)) {
       return null;
     }
 
diff --git a/processing/src/main/java/io/druid/query/extraction/TimeDimExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/TimeDimExtractionFn.java
index 52e443a1ea7..3e4f2534a66 100644
--- a/processing/src/main/java/io/druid/query/extraction/TimeDimExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/extraction/TimeDimExtractionFn.java
@@ -22,8 +22,8 @@
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.ibm.icu.text.SimpleDateFormat;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.DateTimes;
 import io.druid.java.util.common.StringUtils;
 import org.joda.time.DateTime;
@@ -120,7 +120,7 @@ public TimeDimExtractionFn(
   @Override
   public String apply(@Nullable String dimValue)
   {
-    if (Strings.isNullOrEmpty(dimValue)) {
+    if (NullHandling.isNullOrEquivalent(dimValue)) {
       return null;
     }
 
diff --git a/processing/src/main/java/io/druid/query/extraction/UpperExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/UpperExtractionFn.java
index 97468ccf7c7..5814a44bb62 100644
--- a/processing/src/main/java/io/druid/query/extraction/UpperExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/extraction/UpperExtractionFn.java
@@ -21,7 +21,7 @@
 
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonTypeName;
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 
 import javax.annotation.Nullable;
@@ -51,7 +51,7 @@ public UpperExtractionFn(@JsonProperty("locale") String localeString)
   @Override
   public String apply(@Nullable String key)
   {
-    if (Strings.isNullOrEmpty(key)) {
+    if (NullHandling.isNullOrEquivalent(key)) {
       return null;
     }
     return key.toUpperCase(locale);
@@ -72,7 +72,7 @@ public ExtractionType getExtractionType()
   @Override
   public byte[] getCacheKey()
   {
-    byte[] localeBytes = StringUtils.toUtf8(Strings.nullToEmpty(localeString));
+    byte[] localeBytes = StringUtils.toUtf8(StringUtils.nullToEmptyNonDruidDataString(localeString));
     return ByteBuffer.allocate(2 + localeBytes.length)
                      .put(ExtractionCacheHelper.CACHE_TYPE_ID_UPPER)
                      .put((byte) 0XFF)
diff --git a/processing/src/main/java/io/druid/query/filter/DoubleValueMatcherColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/filter/DoubleValueMatcherColumnSelectorStrategy.java
index 342db6cbcdd..c1be4c57315 100644
--- a/processing/src/main/java/io/druid/query/filter/DoubleValueMatcherColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/filter/DoubleValueMatcherColumnSelectorStrategy.java
@@ -22,7 +22,6 @@
 import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
 import io.druid.segment.BaseDoubleColumnValueSelector;
 import io.druid.segment.DimensionHandlerUtils;
-import io.druid.segment.filter.BooleanValueMatcher;
 
 
 public class DoubleValueMatcherColumnSelectorStrategy
@@ -33,7 +32,7 @@ public ValueMatcher makeValueMatcher(final BaseDoubleColumnValueSelector selecto
   {
     final Double matchVal = DimensionHandlerUtils.convertObjectToDouble(value);
     if (matchVal == null) {
-      return BooleanValueMatcher.of(false);
+      return ValueMatcher.nullValueMatcher(selector);
     }
 
     final long matchValLongBits = Double.doubleToLongBits(matchVal);
@@ -65,6 +64,9 @@ public ValueMatcher makeValueMatcher(
       @Override
       public boolean matches()
       {
+        if (selector.isNull()) {
+          return predicate.applyNull();
+        }
         return predicate.applyDouble(selector.getDouble());
       }
 
@@ -80,13 +82,11 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector)
   @Override
   public ValueGetter makeValueGetter(final BaseDoubleColumnValueSelector selector)
   {
-    return new ValueGetter()
-    {
-      @Override
-      public String[] get()
-      {
-        return new String[]{Double.toString(selector.getDouble())};
+    return () -> {
+      if (selector.isNull()) {
+        return null;
       }
+      return new String[]{Double.toString(selector.getDouble())};
     };
   }
 }
diff --git a/processing/src/main/java/io/druid/query/filter/DruidDoublePredicate.java b/processing/src/main/java/io/druid/query/filter/DruidDoublePredicate.java
index 2844f2e5bde..4c6a0f69208 100644
--- a/processing/src/main/java/io/druid/query/filter/DruidDoublePredicate.java
+++ b/processing/src/main/java/io/druid/query/filter/DruidDoublePredicate.java
@@ -19,12 +19,38 @@
 
 package io.druid.query.filter;
 
-
+/**
+ * Note: this is not a {@link io.druid.guice.annotations.PublicApi} or an
+ * {@link io.druid.guice.annotations.ExtensionPoint} of Druid.
+ */
+// All implementations are currently lambda expressions and intellij inspections wrongly complains about unused
+// variable. SupressWarnings can be removed once https://youtrack.jetbrains.com/issue/IDEA-191743 is resolved.
+@SuppressWarnings("unused")
 public interface DruidDoublePredicate
 {
   DruidDoublePredicate ALWAYS_FALSE = input -> false;
 
   DruidDoublePredicate ALWAYS_TRUE = input -> true;
 
+  DruidDoublePredicate MATCH_NULL_ONLY = new DruidDoublePredicate()
+  {
+    @Override
+    public boolean applyDouble(double input)
+    {
+      return false;
+    }
+
+    @Override
+    public boolean applyNull()
+    {
+      return true;
+    }
+  };
+
   boolean applyDouble(double input);
+
+  default boolean applyNull()
+  {
+    return false;
+  }
 }
diff --git a/processing/src/main/java/io/druid/query/filter/DruidFloatPredicate.java b/processing/src/main/java/io/druid/query/filter/DruidFloatPredicate.java
index 23d53944800..ead810410e6 100644
--- a/processing/src/main/java/io/druid/query/filter/DruidFloatPredicate.java
+++ b/processing/src/main/java/io/druid/query/filter/DruidFloatPredicate.java
@@ -20,11 +20,36 @@
 package io.druid.query.filter;
 
 /**
- * FloatPredicate is only supported in Java 8+, so use this to avoid boxing when a float predicate is needed.
+ * Note: this is not a {@link io.druid.guice.annotations.PublicApi} or an
+ * {@link io.druid.guice.annotations.ExtensionPoint} of Druid.
  */
+// All implementations are currently lambda expressions and intellij inspections wrongly complains about unused
+// variable. SupressWarnings can be removed once https://youtrack.jetbrains.com/issue/IDEA-191743 is resolved.
+@SuppressWarnings("unused")
 public interface DruidFloatPredicate
 {
   DruidFloatPredicate ALWAYS_FALSE = input -> false;
 
+  DruidFloatPredicate MATCH_NULL_ONLY = new DruidFloatPredicate()
+  {
+    @Override
+    public boolean applyFloat(float input)
+    {
+      return false;
+    }
+
+    @Override
+    public boolean applyNull()
+    {
+      return true;
+    }
+  };
+
+
   boolean applyFloat(float input);
+
+  default boolean applyNull()
+  {
+    return false;
+  }
 }
diff --git a/processing/src/main/java/io/druid/query/filter/DruidLongPredicate.java b/processing/src/main/java/io/druid/query/filter/DruidLongPredicate.java
index 3a14e4a8aa1..60817cf42e5 100644
--- a/processing/src/main/java/io/druid/query/filter/DruidLongPredicate.java
+++ b/processing/src/main/java/io/druid/query/filter/DruidLongPredicate.java
@@ -20,7 +20,8 @@
 package io.druid.query.filter;
 
 /**
- * LongPredicate is only supported in Java 8+, so use this to avoid boxing when a long predicate is needed.
+ * Note: this is not a {@link io.druid.guice.annotations.PublicApi} or an
+ * {@link io.druid.guice.annotations.ExtensionPoint} of Druid.
  */
 public interface DruidLongPredicate
 {
@@ -28,5 +29,25 @@
 
   DruidLongPredicate ALWAYS_TRUE = input -> true;
 
+  DruidLongPredicate MATCH_NULL_ONLY = new DruidLongPredicate()
+  {
+    @Override
+    public boolean applyLong(long input)
+    {
+      return false;
+    }
+
+    @Override
+    public boolean applyNull()
+    {
+      return true;
+    }
+  };
+
   boolean applyLong(long input);
+
+  default boolean applyNull()
+  {
+    return false;
+  }
 }
diff --git a/processing/src/main/java/io/druid/query/filter/FloatValueMatcherColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/filter/FloatValueMatcherColumnSelectorStrategy.java
index 94307c04626..c84be7b5738 100644
--- a/processing/src/main/java/io/druid/query/filter/FloatValueMatcherColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/filter/FloatValueMatcherColumnSelectorStrategy.java
@@ -22,7 +22,6 @@
 import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
 import io.druid.segment.BaseFloatColumnValueSelector;
 import io.druid.segment.DimensionHandlerUtils;
-import io.druid.segment.filter.BooleanValueMatcher;
 
 public class FloatValueMatcherColumnSelectorStrategy
     implements ValueMatcherColumnSelectorStrategy<BaseFloatColumnValueSelector>
@@ -32,7 +31,7 @@ public ValueMatcher makeValueMatcher(final BaseFloatColumnValueSelector selector
   {
     final Float matchVal = DimensionHandlerUtils.convertObjectToFloat(value);
     if (matchVal == null) {
-      return BooleanValueMatcher.of(false);
+      return ValueMatcher.nullValueMatcher(selector);
     }
 
     final int matchValIntBits = Float.floatToIntBits(matchVal);
@@ -64,6 +63,9 @@ public ValueMatcher makeValueMatcher(
       @Override
       public boolean matches()
       {
+        if (selector.isNull()) {
+          return predicate.applyNull();
+        }
         return predicate.applyFloat(selector.getFloat());
       }
 
@@ -79,13 +81,11 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector)
   @Override
   public ValueGetter makeValueGetter(final BaseFloatColumnValueSelector selector)
   {
-    return new ValueGetter()
-    {
-      @Override
-      public String[] get()
-      {
-        return new String[]{Float.toString(selector.getFloat())};
+    return () -> {
+      if (selector.isNull()) {
+        return null;
       }
+      return new String[]{Float.toString(selector.getFloat())};
     };
   }
 }
diff --git a/processing/src/main/java/io/druid/query/filter/InDimFilter.java b/processing/src/main/java/io/druid/query/filter/InDimFilter.java
index 1e97fbb94e2..aeba75db294 100644
--- a/processing/src/main/java/io/druid/query/filter/InDimFilter.java
+++ b/processing/src/main/java/io/druid/query/filter/InDimFilter.java
@@ -21,12 +21,9 @@
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Function;
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.common.base.Supplier;
-import com.google.common.collect.ImmutableSortedSet;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Range;
 import com.google.common.collect.RangeSet;
@@ -34,7 +31,10 @@
 import com.google.common.collect.TreeRangeSet;
 import com.google.common.primitives.Doubles;
 import com.google.common.primitives.Floats;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
+import io.druid.java.util.common.guava.Comparators;
+import io.druid.query.cache.CacheKeyBuilder;
 import io.druid.query.extraction.ExtractionFn;
 import io.druid.query.lookup.LookupExtractionFn;
 import io.druid.query.lookup.LookupExtractor;
@@ -45,7 +45,6 @@
 import it.unimi.dsi.fastutil.longs.LongArrayList;
 import it.unimi.dsi.fastutil.longs.LongOpenHashSet;
 
-import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -53,6 +52,8 @@
 import java.util.List;
 import java.util.Objects;
 import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeSet;
 
 public class InDimFilter implements DimFilter
 {
@@ -60,7 +61,8 @@
   // Hashing threshold is not applied to String for now, String still uses ImmutableSortedSet
   public static final int NUMERIC_HASHING_THRESHOLD = 16;
 
-  private final ImmutableSortedSet<String> values;
+  // Values can contain `null` object
+  private final SortedSet<String> values;
   private final String dimension;
   private final ExtractionFn extractionFn;
   private final Supplier<DruidLongPredicate> longPredicateSupplier;
@@ -76,19 +78,11 @@ public InDimFilter(
   {
     Preconditions.checkNotNull(dimension, "dimension can not be null");
     Preconditions.checkArgument(values != null && !values.isEmpty(), "values can not be null or empty");
-    this.values = ImmutableSortedSet.copyOf(
-        Iterables.transform(
-            values, new Function<String, String>()
-            {
-              @Override
-              public String apply(String input)
-              {
-                return Strings.nullToEmpty(input);
-              }
 
-            }
-        )
-    );
+    this.values = new TreeSet<>(Comparators.naturalNullsFirst());
+    for (String value : values) {
+      this.values.add(NullHandling.emptyToNullIfNeeded(value));
+    }
     this.dimension = dimension;
     this.extractionFn = extractionFn;
     this.longPredicateSupplier = getLongPredicateSupplier();
@@ -117,31 +111,21 @@ public ExtractionFn getExtractionFn()
   @Override
   public byte[] getCacheKey()
   {
-    byte[] dimensionBytes = StringUtils.toUtf8(dimension);
-    final byte[][] valuesBytes = new byte[values.size()][];
-    int valuesBytesSize = 0;
-    int index = 0;
+    boolean hasNull = false;
     for (String value : values) {
-      valuesBytes[index] = StringUtils.toUtf8(Strings.nullToEmpty(value));
-      valuesBytesSize += valuesBytes[index].length + 1;
-      ++index;
-    }
-    byte[] extractionFnBytes = extractionFn == null ? new byte[0] : extractionFn.getCacheKey();
-
-    ByteBuffer filterCacheKey = ByteBuffer.allocate(3
-                                                    + dimensionBytes.length
-                                                    + valuesBytesSize
-                                                    + extractionFnBytes.length)
-                                          .put(DimFilterUtils.IN_CACHE_ID)
-                                          .put(dimensionBytes)
-                                          .put(DimFilterUtils.STRING_SEPARATOR)
-                                          .put(extractionFnBytes)
-                                          .put(DimFilterUtils.STRING_SEPARATOR);
-    for (byte[] bytes : valuesBytes) {
-      filterCacheKey.put(bytes)
-                    .put((byte) 0xFF);
+      if (value == null) {
+        hasNull = true;
+        break;
+      }
     }
-    return filterCacheKey.array();
+    return new CacheKeyBuilder(DimFilterUtils.IN_CACHE_ID)
+        .appendString(dimension)
+        .appendByte(DimFilterUtils.STRING_SEPARATOR)
+        .appendByteArray(extractionFn == null ? new byte[0] : extractionFn.getCacheKey())
+        .appendByte(DimFilterUtils.STRING_SEPARATOR)
+        .appendByte(hasNull ? NullHandling.IS_NULL_BYTE : NullHandling.IS_NOT_NULL_BYTE)
+        .appendByte(DimFilterUtils.STRING_SEPARATOR)
+        .appendStrings(values).build();
   }
 
   @Override
@@ -167,7 +151,7 @@ private InDimFilter optimizeLookup()
         // We cannot do an unapply()-based optimization if the selector value
         // and the replaceMissingValuesWith value are the same, since we have to match on
         // all values that are not present in the lookup.
-        final String convertedValue = Strings.emptyToNull(value);
+        final String convertedValue = NullHandling.emptyToNullIfNeeded(value);
         if (!exFn.isRetainMissingValue() && Objects.equals(convertedValue, exFn.getReplaceMissingValueWith())) {
           return this;
         }
@@ -177,7 +161,7 @@ private InDimFilter optimizeLookup()
         // there may be row values that match the selector value but are not included
         // in the lookup map. Match on the selector value as well.
         // If the selector value is overwritten in the lookup map, don't add selector value to keys.
-        if (exFn.isRetainMissingValue() && lookup.apply(convertedValue) == null) {
+        if (exFn.isRetainMissingValue() && NullHandling.isNullOrEquivalent(lookup.apply(convertedValue))) {
           keys.add(convertedValue);
         }
       }
@@ -212,7 +196,15 @@ public Filter toFilter()
     }
     RangeSet<String> retSet = TreeRangeSet.create();
     for (String value : values) {
-      retSet.add(Range.singleton(Strings.nullToEmpty(value)));
+      String valueEquivalent = NullHandling.nullToEmptyIfNeeded(value);
+      if (valueEquivalent == null) {
+        // Case when SQL compatible null handling is enabled
+        // Range.singleton(null) is invalid, so use the fact that
+        // only null values are less than empty string.
+        retSet.add(Range.lessThan(""));
+      } else {
+        retSet.add(Range.singleton(valueEquivalent));
+      }
     }
     return retSet;
   }
@@ -269,8 +261,13 @@ public String toString()
       builder.append(")");
     }
 
-    builder.append(" IN (").append(Joiner.on(", ").join(values)).append(")");
-
+    builder.append(" IN (")
+           .append(
+               Joiner.on(", ").join(
+                   Iterables.transform(values, input -> StringUtils.nullToEmptyNonDruidDataString(input))
+               )
+           )
+           .append(")");
     return builder.toString();
   }
 
diff --git a/processing/src/main/java/io/druid/query/filter/LikeDimFilter.java b/processing/src/main/java/io/druid/query/filter/LikeDimFilter.java
index 6822c24aff6..430e6f665a7 100644
--- a/processing/src/main/java/io/druid/query/filter/LikeDimFilter.java
+++ b/processing/src/main/java/io/druid/query/filter/LikeDimFilter.java
@@ -23,11 +23,11 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Predicate;
-import com.google.common.base.Strings;
 import com.google.common.collect.RangeSet;
 import com.google.common.collect.Sets;
 import com.google.common.io.BaseEncoding;
 import com.google.common.primitives.Chars;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 import io.druid.query.extraction.ExtractionFn;
 import io.druid.segment.data.Indexed;
@@ -100,7 +100,7 @@ private LikeMatcher(
     )
     {
       this.suffixMatch = Preconditions.checkNotNull(suffixMatch, "suffixMatch");
-      this.prefix = Strings.nullToEmpty(prefix);
+      this.prefix = NullHandling.nullToEmptyIfNeeded(prefix);
       this.pattern = Preconditions.checkNotNull(pattern, "pattern");
     }
 
@@ -153,7 +153,8 @@ private static void addPatternCharacter(final StringBuilder patternBuilder, fina
 
     public boolean matches(@Nullable final String s)
     {
-      return pattern.matcher(Strings.nullToEmpty(s)).matches();
+      String val = NullHandling.nullToEmptyIfNeeded(s);
+      return val != null && pattern.matcher(val).matches();
     }
 
     /**
@@ -167,7 +168,7 @@ public boolean matchesSuffixOnly(final Indexed<String> strings, final int i)
         return true;
       } else if (suffixMatch == SuffixMatch.MATCH_EMPTY) {
         final String s = strings.get(i);
-        return (s == null ? 0 : s.length()) == prefix.length();
+        return s == null ? matches(null) : s.length() == prefix.length();
       } else {
         // suffixMatch is MATCH_PATTERN
         final String s = strings.get(i);
@@ -183,23 +184,9 @@ public DruidPredicateFactory predicateFactory(final ExtractionFn extractionFn)
         public Predicate<String> makeStringPredicate()
         {
           if (extractionFn != null) {
-            return new Predicate<String>()
-            {
-              @Override
-              public boolean apply(String input)
-              {
-                return matches(extractionFn.apply(input));
-              }
-            };
+            return input -> matches(extractionFn.apply(input));
           } else {
-            return new Predicate<String>()
-            {
-              @Override
-              public boolean apply(String input)
-              {
-                return matches(input);
-              }
-            };
+            return input -> matches(input);
           }
         }
 
diff --git a/processing/src/main/java/io/druid/query/filter/LongValueMatcherColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/filter/LongValueMatcherColumnSelectorStrategy.java
index c53d0e6dc78..70fe9d3a1c7 100644
--- a/processing/src/main/java/io/druid/query/filter/LongValueMatcherColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/filter/LongValueMatcherColumnSelectorStrategy.java
@@ -22,7 +22,6 @@
 import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
 import io.druid.segment.BaseLongColumnValueSelector;
 import io.druid.segment.DimensionHandlerUtils;
-import io.druid.segment.filter.BooleanValueMatcher;
 
 public class LongValueMatcherColumnSelectorStrategy
     implements ValueMatcherColumnSelectorStrategy<BaseLongColumnValueSelector>
@@ -32,7 +31,7 @@ public ValueMatcher makeValueMatcher(final BaseLongColumnValueSelector selector,
   {
     final Long matchVal = DimensionHandlerUtils.convertObjectToLong(value);
     if (matchVal == null) {
-      return BooleanValueMatcher.of(false);
+      return ValueMatcher.nullValueMatcher(selector);
     }
     final long matchValLong = matchVal;
     return new ValueMatcher()
@@ -63,6 +62,9 @@ public ValueMatcher makeValueMatcher(
       @Override
       public boolean matches()
       {
+        if (selector.isNull()) {
+          return predicate.applyNull();
+        }
         return predicate.applyLong(selector.getLong());
       }
 
@@ -78,13 +80,11 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector)
   @Override
   public ValueGetter makeValueGetter(final BaseLongColumnValueSelector selector)
   {
-    return new ValueGetter()
-    {
-      @Override
-      public String[] get()
-      {
-        return new String[]{Long.toString(selector.getLong())};
+    return () -> {
+      if (selector.isNull()) {
+        return null;
       }
+      return new String[]{Long.toString(selector.getLong())};
     };
   }
 }
diff --git a/processing/src/main/java/io/druid/query/filter/SelectorDimFilter.java b/processing/src/main/java/io/druid/query/filter/SelectorDimFilter.java
index 5dbf47531eb..5a71ecc4cd9 100644
--- a/processing/src/main/java/io/druid/query/filter/SelectorDimFilter.java
+++ b/processing/src/main/java/io/druid/query/filter/SelectorDimFilter.java
@@ -24,21 +24,22 @@
 import com.google.common.base.Preconditions;
 import com.google.common.base.Predicate;
 import com.google.common.base.Predicates;
-import com.google.common.base.Strings;
-import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Range;
 import com.google.common.collect.RangeSet;
 import com.google.common.collect.Sets;
 import com.google.common.collect.TreeRangeSet;
 import com.google.common.primitives.Doubles;
 import com.google.common.primitives.Floats;
+import io.druid.common.config.NullHandling;
 import io.druid.common.guava.GuavaUtils;
 import io.druid.java.util.common.StringUtils;
+import io.druid.query.cache.CacheKeyBuilder;
 import io.druid.query.extraction.ExtractionFn;
 import io.druid.segment.filter.DimensionPredicateFilter;
 import io.druid.segment.filter.SelectorFilter;
 
-import java.nio.ByteBuffer;
+import javax.annotation.Nullable;
+import java.util.Collections;
 import java.util.HashSet;
 import java.util.Objects;
 
@@ -47,6 +48,8 @@
 public class SelectorDimFilter implements DimFilter
 {
   private final String dimension;
+
+  @Nullable
   private final String value;
   private final ExtractionFn extractionFn;
 
@@ -66,31 +69,28 @@ public SelectorDimFilter(
     Preconditions.checkArgument(dimension != null, "dimension must not be null");
 
     this.dimension = dimension;
-    this.value = Strings.nullToEmpty(value);
+    this.value = NullHandling.emptyToNullIfNeeded(value);
     this.extractionFn = extractionFn;
   }
 
   @Override
   public byte[] getCacheKey()
   {
-    byte[] dimensionBytes = StringUtils.toUtf8(dimension);
-    byte[] valueBytes = (value == null) ? new byte[]{} : StringUtils.toUtf8(value);
-    byte[] extractionFnBytes = extractionFn == null ? new byte[0] : extractionFn.getCacheKey();
-
-    return ByteBuffer.allocate(3 + dimensionBytes.length + valueBytes.length + extractionFnBytes.length)
-                     .put(DimFilterUtils.SELECTOR_CACHE_ID)
-                     .put(dimensionBytes)
-                     .put(DimFilterUtils.STRING_SEPARATOR)
-                     .put(valueBytes)
-                     .put(DimFilterUtils.STRING_SEPARATOR)
-                     .put(extractionFnBytes)
-                     .array();
+    return new CacheKeyBuilder(DimFilterUtils.SELECTOR_CACHE_ID)
+        .appendByte(DimFilterUtils.STRING_SEPARATOR)
+        .appendString(dimension)
+        .appendByte(DimFilterUtils.STRING_SEPARATOR)
+        .appendByte(value == null ? NullHandling.IS_NULL_BYTE : NullHandling.IS_NOT_NULL_BYTE)
+        .appendString(value)
+        .appendByte(DimFilterUtils.STRING_SEPARATOR)
+        .appendByteArray(extractionFn == null ? new byte[0] : extractionFn.getCacheKey())
+        .build();
   }
 
   @Override
   public DimFilter optimize()
   {
-    return new InDimFilter(dimension, ImmutableList.of(value), extractionFn).optimize();
+    return new InDimFilter(dimension, Collections.singletonList(value), extractionFn).optimize();
   }
 
   @Override
@@ -99,14 +99,13 @@ public Filter toFilter()
     if (extractionFn == null) {
       return new SelectorFilter(dimension, value);
     } else {
-      final String valueOrNull = Strings.emptyToNull(value);
 
       final DruidPredicateFactory predicateFactory = new DruidPredicateFactory()
       {
         @Override
         public Predicate<String> makeStringPredicate()
         {
-          return Predicates.equalTo(valueOrNull);
+          return Predicates.equalTo(value);
         }
 
         @Override
@@ -190,7 +189,14 @@ public boolean equals(Object o)
       return null;
     }
     RangeSet<String> retSet = TreeRangeSet.create();
-    retSet.add(Range.singleton(Strings.nullToEmpty(value)));
+    String valueEquivalent = NullHandling.nullToEmptyIfNeeded(value);
+    if (valueEquivalent == null) {
+      // Case when SQL compatible null handling is enabled
+      // Nulls are less than empty String in segments
+      retSet.add(Range.lessThan(""));
+    } else {
+      retSet.add(Range.singleton(valueEquivalent));
+    }
     return retSet;
   }
 
@@ -219,6 +225,10 @@ private void initLongPredicate()
       if (longPredicate != null) {
         return;
       }
+      if (value == null) {
+        longPredicate = DruidLongPredicate.MATCH_NULL_ONLY;
+        return;
+      }
       final Long valueAsLong = GuavaUtils.tryParseLong(value);
       if (valueAsLong == null) {
         longPredicate = DruidLongPredicate.ALWAYS_FALSE;
@@ -239,6 +249,11 @@ private void initFloatPredicate()
       if (floatPredicate != null) {
         return;
       }
+
+      if (value == null) {
+        floatPredicate = DruidFloatPredicate.MATCH_NULL_ONLY;
+        return;
+      }
       final Float valueAsFloat = Floats.tryParse(value);
 
       if (valueAsFloat == null) {
@@ -259,6 +274,10 @@ private void initDoublePredicate()
       if (druidDoublePredicate != null) {
         return;
       }
+      if (value == null) {
+        druidDoublePredicate = DruidDoublePredicate.MATCH_NULL_ONLY;
+        return;
+      }
       final Double aDouble = Doubles.tryParse(value);
 
       if (aDouble == null) {
diff --git a/processing/src/main/java/io/druid/query/filter/StringValueMatcherColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/filter/StringValueMatcherColumnSelectorStrategy.java
index 341ccff53e9..a30303ad8b8 100644
--- a/processing/src/main/java/io/druid/query/filter/StringValueMatcherColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/filter/StringValueMatcherColumnSelectorStrategy.java
@@ -20,7 +20,6 @@
 package io.druid.query.filter;
 
 import com.google.common.base.Predicate;
-import com.google.common.base.Strings;
 import io.druid.segment.DimensionSelector;
 import io.druid.segment.data.IndexedInts;
 import io.druid.segment.filter.BooleanValueMatcher;
@@ -28,19 +27,11 @@
 public class StringValueMatcherColumnSelectorStrategy implements ValueMatcherColumnSelectorStrategy<DimensionSelector>
 {
   private static final String[] NULL_VALUE = new String[]{null};
-  private static final ValueGetter NULL_VALUE_GETTER = new ValueGetter()
-  {
-    @Override
-    public String[] get()
-    {
-      return NULL_VALUE;
-    }
-  };
+  private static final ValueGetter NULL_VALUE_GETTER = () -> NULL_VALUE;
 
   @Override
   public ValueMatcher makeValueMatcher(final DimensionSelector selector, String value)
   {
-    value = Strings.emptyToNull(value);
     if (selector.getValueCardinality() == 0) {
       return BooleanValueMatcher.of(value == null);
     } else {
@@ -68,22 +59,17 @@ public ValueGetter makeValueGetter(final DimensionSelector selector)
     if (selector.getValueCardinality() == 0) {
       return NULL_VALUE_GETTER;
     } else {
-      return new ValueGetter()
-      {
-        @Override
-        public String[] get()
-        {
-          final IndexedInts row = selector.getRow();
-          final int size = row.size();
-          if (size == 0) {
-            return NULL_VALUE;
-          } else {
-            String[] values = new String[size];
-            for (int i = 0; i < size; ++i) {
-              values[i] = Strings.emptyToNull(selector.lookupName(row.get(i)));
-            }
-            return values;
+      return () -> {
+        final IndexedInts row = selector.getRow();
+        final int size = row.size();
+        if (size == 0) {
+          return NULL_VALUE;
+        } else {
+          String[] values = new String[size];
+          for (int i = 0; i < size; ++i) {
+            values[i] = selector.lookupName(row.get(i));
           }
+          return values;
         }
       };
     }
diff --git a/processing/src/main/java/io/druid/query/filter/ValueGetter.java b/processing/src/main/java/io/druid/query/filter/ValueGetter.java
index 7948765fc0e..ce4529c0d3d 100644
--- a/processing/src/main/java/io/druid/query/filter/ValueGetter.java
+++ b/processing/src/main/java/io/druid/query/filter/ValueGetter.java
@@ -19,13 +19,19 @@
 
 package io.druid.query.filter;
 
+import javax.annotation.Nullable;
+
 /**
  */
 public interface ValueGetter
 {
-  // It is not ideal that Long and Float values will get
-  // converted to strings. We should also add functions
-  // for these and modify ColumnComparisonFilter to handle
-  // comparing Long and Float columns to eachother.
+  /**
+   * It is not ideal that Long and Float values will get
+   * converted to strings. We should also add functions
+   * for these and modify ColumnComparisonFilter to handle
+   * comparing Long and Float columns to eachother.
+   * Returns null when the underlying Long/Float value is null.
+   */
+  @Nullable
   String[] get();
 }
diff --git a/processing/src/main/java/io/druid/query/filter/ValueMatcher.java b/processing/src/main/java/io/druid/query/filter/ValueMatcher.java
index 2bda995fc93..ae01dd352a3 100644
--- a/processing/src/main/java/io/druid/query/filter/ValueMatcher.java
+++ b/processing/src/main/java/io/druid/query/filter/ValueMatcher.java
@@ -21,6 +21,8 @@
 
 import io.druid.query.monomorphicprocessing.CalledFromHotLoop;
 import io.druid.query.monomorphicprocessing.HotLoopCallee;
+import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
+import io.druid.segment.BaseNullableColumnValueSelector;
 
 /**
  */
@@ -28,4 +30,23 @@
 {
   @CalledFromHotLoop
   boolean matches();
+
+  // Utility method to match null values.
+  static ValueMatcher nullValueMatcher(BaseNullableColumnValueSelector selector)
+  {
+    return new ValueMatcher()
+    {
+      @Override
+      public boolean matches()
+      {
+        return selector.isNull();
+      }
+
+      @Override
+      public void inspectRuntimeShape(RuntimeShapeInspector inspector)
+      {
+        inspector.visit("selector", selector);
+      }
+    };
+  }
 }
diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java b/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java
index 89925e7e12e..e96b5427d48 100644
--- a/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java
+++ b/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java
@@ -58,6 +58,7 @@
 import io.druid.query.ordering.StringComparators;
 import io.druid.query.spec.LegacySegmentSpec;
 import io.druid.query.spec.QuerySegmentSpec;
+import io.druid.segment.DimensionHandlerUtils;
 import io.druid.segment.VirtualColumn;
 import io.druid.segment.VirtualColumns;
 import io.druid.segment.column.Column;
@@ -533,19 +534,19 @@ private static int compareDims(List<DimensionSpec> dimensions, Row lhs, Row rhs)
     for (DimensionSpec dimension : dimensions) {
       final int dimCompare;
       if (dimension.getOutputType() == ValueType.LONG) {
-        dimCompare = Long.compare(
-            ((Number) lhs.getRaw(dimension.getOutputName())).longValue(),
-            ((Number) rhs.getRaw(dimension.getOutputName())).longValue()
+        dimCompare = Comparators.<Long>naturalNullsFirst().compare(
+            DimensionHandlerUtils.convertObjectToLong(lhs.getRaw(dimension.getOutputName())),
+            DimensionHandlerUtils.convertObjectToLong(rhs.getRaw(dimension.getOutputName()))
         );
       } else if (dimension.getOutputType() == ValueType.FLOAT) {
-        dimCompare = Float.compare(
-            ((Number) lhs.getRaw(dimension.getOutputName())).floatValue(),
-            ((Number) rhs.getRaw(dimension.getOutputName())).floatValue()
+        dimCompare = Comparators.<Float>naturalNullsFirst().compare(
+            DimensionHandlerUtils.convertObjectToFloat(lhs.getRaw(dimension.getOutputName())),
+            DimensionHandlerUtils.convertObjectToFloat(rhs.getRaw(dimension.getOutputName()))
         );
       } else if (dimension.getOutputType() == ValueType.DOUBLE) {
-        dimCompare = Double.compare(
-            ((Number) lhs.getRaw(dimension.getOutputName())).doubleValue(),
-            ((Number) rhs.getRaw(dimension.getOutputName())).doubleValue()
+        dimCompare = Comparators.<Double>naturalNullsFirst().compare(
+            DimensionHandlerUtils.convertObjectToDouble(lhs.getRaw(dimension.getOutputName())),
+            DimensionHandlerUtils.convertObjectToDouble(rhs.getRaw(dimension.getOutputName()))
         );
       } else {
         dimCompare = ((Ordering) Comparators.naturalNullsFirst()).compare(
diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java
index 1d77d08e2cd..be47b5eb6c9 100644
--- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java
+++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java
@@ -347,7 +347,7 @@ public RowIterator(GroupByQuery query, final Cursor cursor, ByteBuffer metricsBu
         AggregatorFactory aggregatorSpec = aggregatorSpecs.get(i);
         aggregators[i] = aggregatorSpec.factorizeBuffered(cursor.getColumnSelectorFactory());
         metricNames[i] = aggregatorSpec.getName();
-        sizesRequired[i] = aggregatorSpec.getMaxIntermediateSize();
+        sizesRequired[i] = aggregatorSpec.getMaxIntermediateSizeWithNulls();
       }
     }
 
diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferArrayGrouper.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferArrayGrouper.java
index 6546b136199..c006a45632f 100644
--- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferArrayGrouper.java
+++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferArrayGrouper.java
@@ -68,7 +68,7 @@ static long requiredBufferCapacity(
   {
     final int cardinalityWithMissingValue = cardinality + 1;
     final int recordSize = Arrays.stream(aggregatorFactories)
-                                 .mapToInt(AggregatorFactory::getMaxIntermediateSize)
+                                 .mapToInt(AggregatorFactory::getMaxIntermediateSizeWithNulls)
                                  .sum();
 
     return getUsedFlagBufferCapacity(cardinalityWithMissingValue) +  // total used flags size
@@ -103,7 +103,7 @@ public BufferArrayGrouper(
     for (int i = 0; i < aggregatorFactories.length; i++) {
       aggregators[i] = aggregatorFactories[i].factorizeBuffered(columnSelectorFactory);
       aggregatorOffsets[i] = offset;
-      offset += aggregatorFactories[i].getMaxIntermediateSize();
+      offset += aggregatorFactories[i].getMaxIntermediateSizeWithNulls();
     }
     recordSize = offset;
   }
diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferHashGrouper.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferHashGrouper.java
index 9a29d8f8fb4..ddbba4f0815 100644
--- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferHashGrouper.java
+++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferHashGrouper.java
@@ -83,7 +83,7 @@ public BufferHashGrouper(
     for (int i = 0; i < aggregatorFactories.length; i++) {
       aggregators[i] = aggregatorFactories[i].factorizeBuffered(columnSelectorFactory);
       aggregatorOffsets[i] = offset;
-      offset += aggregatorFactories[i].getMaxIntermediateSize();
+      offset += aggregatorFactories[i].getMaxIntermediateSizeWithNulls();
     }
 
     this.bucketSize = offset;
diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java
index ab205705cf7..848086d0bdb 100644
--- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java
+++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java
@@ -20,12 +20,12 @@
 package io.druid.query.groupby.epinephelinae;
 
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.common.base.Suppliers;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Maps;
 import io.druid.collections.NonBlockingPool;
 import io.druid.collections.ResourceHolder;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.MapBasedRow;
 import io.druid.data.input.Row;
 import io.druid.java.util.common.DateTimes;
@@ -45,6 +45,7 @@
 import io.druid.query.groupby.epinephelinae.column.GroupByColumnSelectorPlus;
 import io.druid.query.groupby.epinephelinae.column.GroupByColumnSelectorStrategy;
 import io.druid.query.groupby.epinephelinae.column.LongGroupByColumnSelectorStrategy;
+import io.druid.query.groupby.epinephelinae.column.NullableValueGroupByColumnSelectorStrategy;
 import io.druid.query.groupby.epinephelinae.column.StringGroupByColumnSelectorStrategy;
 import io.druid.query.groupby.strategy.GroupByStrategyV2;
 import io.druid.segment.ColumnValueSelector;
@@ -124,8 +125,8 @@ private GroupByQueryEngineV2()
 
     final ResourceHolder<ByteBuffer> bufferHolder = intermediateResultsBufferPool.take();
 
-    final String fudgeTimestampString = Strings.emptyToNull(
-        query.getContextValue(GroupByStrategyV2.CTX_KEY_FUDGE_TIMESTAMP, "")
+    final String fudgeTimestampString = NullHandling.emptyToNullIfNeeded(
+        query.getContextValue(GroupByStrategyV2.CTX_KEY_FUDGE_TIMESTAMP, null)
     );
 
     final DateTime fudgeTimestamp = fudgeTimestampString == null
@@ -248,15 +249,24 @@ public GroupByColumnSelectorStrategy makeColumnSelectorStrategy(
             return new DictionaryBuildingStringGroupByColumnSelectorStrategy();
           }
         case LONG:
-          return new LongGroupByColumnSelectorStrategy();
+          return makeNullableStrategy(new LongGroupByColumnSelectorStrategy());
         case FLOAT:
-          return new FloatGroupByColumnSelectorStrategy();
+          return makeNullableStrategy(new FloatGroupByColumnSelectorStrategy());
         case DOUBLE:
-          return new DoubleGroupByColumnSelectorStrategy();
+          return makeNullableStrategy(new DoubleGroupByColumnSelectorStrategy());
         default:
           throw new IAE("Cannot create query type helper from invalid type [%s]", type);
       }
     }
+
+    private GroupByColumnSelectorStrategy makeNullableStrategy(GroupByColumnSelectorStrategy delegate)
+    {
+      if (NullHandling.sqlCompatible()) {
+        return new NullableValueGroupByColumnSelectorStrategy(delegate);
+      } else {
+        return delegate;
+      }
+    }
   }
 
   private abstract static class GroupByEngineIterator<KeyType> implements Iterator<Row>, Closeable
@@ -544,7 +554,8 @@ protected void putToMap(ByteBuffer key, Map<String, Object> map)
         selectorPlus.getColumnSelectorStrategy().processValueFromGroupingKey(
             selectorPlus,
             key,
-            map
+            map,
+            selectorPlus.getKeyBufferPosition()
         );
       }
     }
@@ -684,19 +695,16 @@ private static void convertRowTypesToOutputTypes(List<DimensionSpec> dimensionSp
           (dimName, baseVal) -> {
             switch (outputType) {
               case STRING:
-                baseVal = baseVal == null ? "" : baseVal.toString();
+                baseVal = DimensionHandlerUtils.convertObjectToString(baseVal);
                 break;
               case LONG:
                 baseVal = DimensionHandlerUtils.convertObjectToLong(baseVal);
-                baseVal = baseVal == null ? 0L : baseVal;
                 break;
               case FLOAT:
                 baseVal = DimensionHandlerUtils.convertObjectToFloat(baseVal);
-                baseVal = baseVal == null ? 0.f : baseVal;
                 break;
               case DOUBLE:
                 baseVal = DimensionHandlerUtils.convertObjectToDouble(baseVal);
-                baseVal = baseVal == null ? 0.d : baseVal;
                 break;
               default:
                 throw new IAE("Unsupported type: " + outputType);
diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouper.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouper.java
index aaa1fcf7fd4..d8cfe3a0d47 100644
--- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouper.java
+++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouper.java
@@ -90,7 +90,7 @@ public LimitedBufferHashGrouper(
     for (int i = 0; i < aggregatorFactories.length; i++) {
       aggregators[i] = aggregatorFactories[i].factorizeBuffered(columnSelectorFactory);
       aggregatorOffsets[i] = offset;
-      offset += aggregatorFactories[i].getMaxIntermediateSize();
+      offset += aggregatorFactories[i].getMaxIntermediateSizeWithNulls();
     }
 
     // For each bucket, store an extra field indicating the bucket's current index within the heap when
diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java
index fb04dbe1e02..4b15af2e725 100644
--- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java
+++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java
@@ -23,7 +23,6 @@
 import com.fasterxml.jackson.annotation.JsonValue;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.common.base.Supplier;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
@@ -31,6 +30,7 @@
 import com.google.common.primitives.Longs;
 import com.google.common.util.concurrent.ListeningExecutorService;
 import io.druid.collections.ReferenceCountingResourceHolder;
+import io.druid.common.config.NullHandling;
 import io.druid.common.utils.IntArrayUtils;
 import io.druid.data.input.MapBasedRow;
 import io.druid.data.input.Row;
@@ -39,6 +39,7 @@
 import io.druid.java.util.common.Pair;
 import io.druid.java.util.common.granularity.AllGranularity;
 import io.druid.java.util.common.guava.Accumulator;
+import io.druid.java.util.common.guava.Comparators;
 import io.druid.query.BaseQuery;
 import io.druid.query.ColumnSelectorPlus;
 import io.druid.query.aggregation.AggregatorFactory;
@@ -438,7 +439,7 @@ public Row apply(Grouper.Entry<RowBasedKey> entry)
               Object dimVal = entry.getKey().getKey()[i];
               theMap.put(
                   query.getDimensions().get(i - dimStart).getOutputName(),
-                  dimVal instanceof String ? Strings.emptyToNull((String) dimVal) : dimVal
+                  dimVal instanceof String ? NullHandling.emptyToNullIfNeeded((String) dimVal) : dimVal
               );
             }
 
@@ -527,18 +528,11 @@ public String toString()
     @Override
     public Supplier<Comparable> makeInputRawSupplier(DimensionSelector selector)
     {
-      return new Supplier<Comparable>()
-      {
-        @Override
-        public Comparable get()
-        {
-          final String value;
-          IndexedInts index = selector.getRow();
-          value = index.size() == 0
-                  ? ""
-                  : selector.lookupName(index.get(0));
-          return Strings.nullToEmpty(value);
-        }
+      return () -> {
+        IndexedInts index = selector.getRow();
+        return index.size() == 0
+               ? null
+               : selector.lookupName(index.get(0));
       };
     }
   }
@@ -605,28 +599,19 @@ public InputRawSupplierColumnSelectorStrategy makeColumnSelectorStrategy(
       type = type == null ? ValueType.STRING : type;
       switch (type) {
         case STRING:
-          functions[i] = input -> input == null ? "" : input.toString();
+          functions[i] = input -> DimensionHandlerUtils.convertObjectToString(input);
           break;
 
         case LONG:
-          functions[i] = input -> {
-            final Long val = DimensionHandlerUtils.convertObjectToLong(input);
-            return val == null ? 0L : val;
-          };
+          functions[i] = input -> DimensionHandlerUtils.convertObjectToLong(input);
           break;
 
         case FLOAT:
-          functions[i] = input -> {
-            final Float val = DimensionHandlerUtils.convertObjectToFloat(input);
-            return val == null ? 0.f : val;
-          };
+          functions[i] = input -> DimensionHandlerUtils.convertObjectToFloat(input);
           break;
 
         case DOUBLE:
-          functions[i] = input -> {
-            Double val = DimensionHandlerUtils.convertObjectToDouble(input);
-            return val == null ? 0.0 : val;
-          };
+          functions[i] = input -> DimensionHandlerUtils.convertObjectToDouble(input);
           break;
         default:
           throw new IAE("invalid type: [%s]", type);
@@ -871,7 +856,10 @@ public int compare(Grouper.Entry<RowBasedKey> entry1, Grouper.Entry<RowBasedKey>
     private static int compareDimsInRows(RowBasedKey key1, RowBasedKey key2, int dimStart)
     {
       for (int i = dimStart; i < key1.getKey().length; i++) {
-        final int cmp = ((Comparable) key1.getKey()[i]).compareTo(key2.getKey()[i]);
+        final int cmp = Comparators.<Comparable>naturalNullsFirst().compare(
+            (Comparable) key1.getKey()[i],
+            (Comparable) key2.getKey()[i]
+        );
         if (cmp != 0) {
           return cmp;
         }
@@ -920,9 +908,10 @@ private static int compareDimsInRowsWithAggs(
 
         if (isNumericField.get(i) && comparator.equals(StringComparators.NUMERIC)) {
           // use natural comparison
-          cmp = lhs.compareTo(rhs);
+          cmp = Comparators.<Comparable>naturalNullsFirst().compare(lhs, rhs);
         } else {
-          cmp = comparator.compare(lhs.toString(), rhs.toString());
+          cmp = comparator.compare(DimensionHandlerUtils.convertObjectToString(lhs),
+                                   DimensionHandlerUtils.convertObjectToString(rhs));
         }
 
         if (cmp != 0) {
@@ -934,9 +923,10 @@ private static int compareDimsInRowsWithAggs(
     }
   }
 
-  static long estimateStringKeySize(String key)
+  static long estimateStringKeySize(@Nullable String key)
   {
-    return (long) key.length() * Character.BYTES + ROUGH_OVERHEAD_PER_DICTIONARY_ENTRY;
+    long length = key == null ? 0 : key.length();
+    return length * Character.BYTES + ROUGH_OVERHEAD_PER_DICTIONARY_ENTRY;
   }
 
   private static class RowBasedKeySerde implements Grouper.KeySerde<RowBasedGrouperHelper.RowBasedKey>
@@ -1024,7 +1014,7 @@ private void initializeRankOfDictionaryIds()
       rankOfDictionaryIds = IntStream.range(0, dictionarySize).toArray();
       IntArrays.quickSort(
           rankOfDictionaryIds,
-          (i1, i2) -> dictionary.get(i1).compareTo(dictionary.get(i2))
+          (i1, i2) -> Comparators.<String>naturalNullsFirst().compare(dictionary.get(i1), dictionary.get(i2))
       );
 
       IntArrayUtils.inverse(rankOfDictionaryIds);
@@ -1208,7 +1198,7 @@ public int compare(ByteBuffer lhsBuffer, ByteBuffer rhsBuffer, int lhsPosition,
               throw new IAE("Cannot order by a non-numeric aggregator[%s]", orderSpec);
             }
 
-            serdeHelper = makeNumericSerdeHelper(valueType, aggOffset, true, stringComparator);
+            serdeHelper = makeNullHandlingNumericserdeHelper(valueType, aggOffset, true, stringComparator);
 
             orderByHelpers.add(serdeHelper);
             needsReverses.add(needsReverse);
@@ -1393,12 +1383,34 @@ private RowBasedKeySerdeHelper makeSerdeHelper(
         case LONG:
         case FLOAT:
         case DOUBLE:
-          return makeNumericSerdeHelper(valueType, keyBufferPosition, pushLimitDown, stringComparator);
+          return makeNullHandlingNumericserdeHelper(valueType, keyBufferPosition, pushLimitDown, stringComparator);
         default:
           throw new IAE("invalid type: %s", valueType);
       }
     }
 
+    private RowBasedKeySerdeHelper makeNullHandlingNumericserdeHelper(
+        ValueType valueType,
+        int keyBufferPosition,
+        boolean pushLimitDown,
+        @Nullable StringComparator stringComparator
+    )
+    {
+      if (NullHandling.sqlCompatible()) {
+        return new NullableRowBasedKeySerdeHelper(
+            makeNumericSerdeHelper(
+                valueType,
+                keyBufferPosition + Byte.BYTES,
+                pushLimitDown,
+                stringComparator
+            ),
+            keyBufferPosition
+        );
+      } else {
+        return makeNumericSerdeHelper(valueType, keyBufferPosition, pushLimitDown, stringComparator);
+      }
+    }
+
     private RowBasedKeySerdeHelper makeNumericSerdeHelper(
         ValueType valueType,
         int keyBufferPosition,
@@ -1581,7 +1593,7 @@ public int getKeyBufferValueSize()
       @Override
       public boolean putToKeyBuffer(RowBasedKey key, int idx)
       {
-        keyBuffer.putLong((Long) key.getKey()[idx]);
+        keyBuffer.putLong(DimensionHandlerUtils.nullToZero((Long) key.getKey()[idx]));
         return true;
       }
 
@@ -1632,7 +1644,7 @@ public int getKeyBufferValueSize()
       @Override
       public boolean putToKeyBuffer(RowBasedKey key, int idx)
       {
-        keyBuffer.putFloat((Float) key.getKey()[idx]);
+        keyBuffer.putFloat(DimensionHandlerUtils.nullToZero((Float) key.getKey()[idx]));
         return true;
       }
 
@@ -1684,7 +1696,7 @@ public int getKeyBufferValueSize()
       @Override
       public boolean putToKeyBuffer(RowBasedKey key, int idx)
       {
-        keyBuffer.putDouble((Double) key.getKey()[idx]);
+        keyBuffer.putDouble(DimensionHandlerUtils.nullToZero((Double) key.getKey()[idx]));
         return true;
       }
 
@@ -1700,6 +1712,81 @@ public BufferComparator getBufferComparator()
         return bufferComparator;
       }
     }
+
+    // This class is only used when SQL compatible null handling is enabled.
+    // When serializing the key, it will add a byte to store the nullability of the serialized object before
+    // serializing the key using delegate RowBasedKeySerdeHelper.
+    // Buffer Layout - 1 byte for storing nullability + bytes from delegate RowBasedKeySerdeHelper.
+    private class NullableRowBasedKeySerdeHelper implements RowBasedKeySerdeHelper
+    {
+      private final RowBasedKeySerdeHelper delegate;
+      private final int keyBufferPosition;
+      private final BufferComparator comparator;
+
+      NullableRowBasedKeySerdeHelper(RowBasedKeySerdeHelper delegate, int keyBufferPosition)
+      {
+        this.delegate = delegate;
+        this.keyBufferPosition = keyBufferPosition;
+        BufferComparator delegateBufferComparator = this.delegate.getBufferComparator();
+        this.comparator = (lhsBuffer, rhsBuffer, lhsPosition, rhsPosition) -> {
+          boolean isLhsNull = (lhsBuffer.get(lhsPosition + keyBufferPosition) == NullHandling.IS_NULL_BYTE);
+          boolean isRhsNull = (rhsBuffer.get(rhsPosition + keyBufferPosition) == NullHandling.IS_NULL_BYTE);
+          if (isLhsNull && isRhsNull) {
+            // Both are null
+            return 0;
+          }
+          // only lhs is null
+          if (isLhsNull) {
+            return -1;
+          }
+          // only rhs is null
+          if (isRhsNull) {
+            return 1;
+          }
+          return delegateBufferComparator.compare(
+              lhsBuffer,
+              rhsBuffer,
+              lhsPosition,
+              rhsPosition
+          );
+        };
+      }
+
+      @Override
+      public int getKeyBufferValueSize()
+      {
+        return delegate.getKeyBufferValueSize() + Byte.BYTES;
+      }
+
+      @Override
+      public boolean putToKeyBuffer(RowBasedKey key, int idx)
+      {
+        Object val = key.getKey()[idx];
+        if (val == null) {
+          keyBuffer.put(NullHandling.IS_NULL_BYTE);
+        } else {
+          keyBuffer.put(NullHandling.IS_NOT_NULL_BYTE);
+        }
+        delegate.putToKeyBuffer(key, idx);
+        return true;
+      }
+
+      @Override
+      public void getFromByteBuffer(ByteBuffer buffer, int initialOffset, int dimValIdx, Comparable[] dimValues)
+      {
+        if (buffer.get(initialOffset + keyBufferPosition) == NullHandling.IS_NULL_BYTE) {
+          dimValues[dimValIdx] = null;
+        } else {
+          delegate.getFromByteBuffer(buffer, initialOffset, dimValIdx, dimValues);
+        }
+      }
+
+      @Override
+      public BufferComparator getBufferComparator()
+      {
+        return comparator;
+      }
+    }
   }
 
   private static int compareDimsInBuffersForNullFudgeTimestamp(
diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/StreamingMergeSortedGrouper.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/StreamingMergeSortedGrouper.java
index 99da300a522..55e5b83fe5e 100644
--- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/StreamingMergeSortedGrouper.java
+++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/StreamingMergeSortedGrouper.java
@@ -121,7 +121,7 @@
   {
     int recordSize = keySerde.keySize();
     for (AggregatorFactory aggregatorFactory : aggregatorFactories) {
-      recordSize += aggregatorFactory.getMaxIntermediateSize();
+      recordSize += aggregatorFactory.getMaxIntermediateSizeWithNulls();
     }
     return recordSize * 3;
   }
@@ -144,7 +144,7 @@
     for (int i = 0; i < aggregatorFactories.length; i++) {
       aggregators[i] = aggregatorFactories[i].factorizeBuffered(columnSelectorFactory);
       aggregatorOffsets[i] = offset;
-      offset += aggregatorFactories[i].getMaxIntermediateSize();
+      offset += aggregatorFactories[i].getMaxIntermediateSizeWithNulls();
     }
     this.recordSize = offset;
 
diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/DictionaryBuildingStringGroupByColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/DictionaryBuildingStringGroupByColumnSelectorStrategy.java
index bfd81ce7f8a..157526ee67f 100644
--- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/DictionaryBuildingStringGroupByColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/DictionaryBuildingStringGroupByColumnSelectorStrategy.java
@@ -21,6 +21,7 @@
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
+import io.druid.common.config.NullHandling;
 import io.druid.segment.ColumnValueSelector;
 import io.druid.segment.DimensionSelector;
 import io.druid.segment.data.ArrayBasedIndexedInts;
@@ -47,9 +48,14 @@
   }
 
   @Override
-  public void processValueFromGroupingKey(GroupByColumnSelectorPlus selectorPlus, ByteBuffer key, Map<String, Object> resultMap)
+  public void processValueFromGroupingKey(
+      GroupByColumnSelectorPlus selectorPlus,
+      ByteBuffer key,
+      Map<String, Object> resultMap,
+      int keyBufferPosition
+  )
   {
-    final int id = key.getInt(selectorPlus.getKeyBufferPosition());
+    final int id = key.getInt(keyBufferPosition);
 
     // GROUP_BY_MISSING_VALUE is used to indicate empty rows, which are omitted from the result map.
     if (id != GROUP_BY_MISSING_VALUE) {
@@ -59,7 +65,7 @@ public void processValueFromGroupingKey(GroupByColumnSelectorPlus selectorPlus,
           value
       );
     } else {
-      resultMap.put(selectorPlus.getOutputName(), "");
+      resultMap.put(selectorPlus.getOutputName(), NullHandling.defaultStringValue());
     }
   }
 
diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/DoubleGroupByColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/DoubleGroupByColumnSelectorStrategy.java
index 0dff6cf5946..8906f324c3e 100644
--- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/DoubleGroupByColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/DoubleGroupByColumnSelectorStrategy.java
@@ -21,7 +21,9 @@
 
 
 import io.druid.segment.ColumnValueSelector;
+import io.druid.segment.DimensionHandlerUtils;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Map;
 
@@ -35,10 +37,13 @@ public int getGroupingKeySize()
 
   @Override
   public void processValueFromGroupingKey(
-      GroupByColumnSelectorPlus selectorPlus, ByteBuffer key, Map<String, Object> resultMap
+      GroupByColumnSelectorPlus selectorPlus,
+      ByteBuffer key,
+      Map<String, Object> resultMap,
+      int keyBufferPosition
   )
   {
-    final double val = key.getDouble(selectorPlus.getKeyBufferPosition());
+    final double val = key.getDouble(keyBufferPosition);
     resultMap.put(selectorPlus.getOutputName(), val);
   }
 
@@ -55,17 +60,21 @@ public Object getOnlyValue(ColumnValueSelector selector)
   }
 
   @Override
-  public void writeToKeyBuffer(int keyBufferPosition, Object obj, ByteBuffer keyBuffer)
+  public void writeToKeyBuffer(int keyBufferPosition, @Nullable Object obj, ByteBuffer keyBuffer)
   {
-    keyBuffer.putDouble(keyBufferPosition, (Double) obj);
+    keyBuffer.putDouble(keyBufferPosition, DimensionHandlerUtils.nullToZero((Double) obj));
   }
 
   @Override
   public void initGroupingKeyColumnValue(
-      int keyBufferPosition, int columnIndex, Object rowObj, ByteBuffer keyBuffer, int[] stack
+      int keyBufferPosition,
+      int columnIndex,
+      Object rowObj,
+      ByteBuffer keyBuffer,
+      int[] stack
   )
   {
-    keyBuffer.putDouble(keyBufferPosition, (Double) rowObj);
+    writeToKeyBuffer(keyBufferPosition, rowObj, keyBuffer);
     stack[columnIndex] = 1;
   }
 
diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/FloatGroupByColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/FloatGroupByColumnSelectorStrategy.java
index 8c5dce4a561..c0fc9e857d2 100644
--- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/FloatGroupByColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/FloatGroupByColumnSelectorStrategy.java
@@ -20,7 +20,9 @@
 package io.druid.query.groupby.epinephelinae.column;
 
 import io.druid.segment.ColumnValueSelector;
+import io.druid.segment.DimensionHandlerUtils;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Map;
 
@@ -35,10 +37,13 @@ public int getGroupingKeySize()
 
   @Override
   public void processValueFromGroupingKey(
-      GroupByColumnSelectorPlus selectorPlus, ByteBuffer key, Map<String, Object> resultMap
+      GroupByColumnSelectorPlus selectorPlus,
+      ByteBuffer key,
+      Map<String, Object> resultMap,
+      int keyBufferPosition
   )
   {
-    final float val = key.getFloat(selectorPlus.getKeyBufferPosition());
+    final float val = key.getFloat(keyBufferPosition);
     resultMap.put(selectorPlus.getOutputName(), val);
   }
 
@@ -55,17 +60,21 @@ public Object getOnlyValue(ColumnValueSelector selector)
   }
 
   @Override
-  public void writeToKeyBuffer(int keyBufferPosition, Object obj, ByteBuffer keyBuffer)
+  public void writeToKeyBuffer(int keyBufferPosition, @Nullable Object obj, ByteBuffer keyBuffer)
   {
-    keyBuffer.putFloat(keyBufferPosition, (Float) obj);
+    keyBuffer.putFloat(keyBufferPosition, DimensionHandlerUtils.nullToZero((Float) obj));
   }
 
   @Override
   public void initGroupingKeyColumnValue(
-      int keyBufferPosition, int columnIndex, Object rowObj, ByteBuffer keyBuffer, int[] stack
+      int keyBufferPosition,
+      int columnIndex,
+      Object rowObj,
+      ByteBuffer keyBuffer,
+      int[] stack
   )
   {
-    keyBuffer.putFloat(keyBufferPosition, (Float) rowObj);
+    writeToKeyBuffer(keyBufferPosition, rowObj, keyBuffer);
     stack[columnIndex] = 1;
   }
 
diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/GroupByColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/GroupByColumnSelectorStrategy.java
index d0beade7b75..bb4586a6e2b 100644
--- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/GroupByColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/GroupByColumnSelectorStrategy.java
@@ -59,11 +59,13 @@
    * @param selectorPlus dimension info containing the key offset, value selector, and dimension spec
    * @param resultMap result map for the group by query being served
    * @param key grouping key
+   * @param keyBufferPosition buffer position for the grouping key, added to support chaining multiple {@link ColumnSelectorStrategy}
    */
   void processValueFromGroupingKey(
       GroupByColumnSelectorPlus selectorPlus,
       ByteBuffer key,
-      Map<String, Object> resultMap
+      Map<String, Object> resultMap,
+      int keyBufferPosition
   );
 
   /**
diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/LongGroupByColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/LongGroupByColumnSelectorStrategy.java
index 6c08d545314..a8c2bb46bd8 100644
--- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/LongGroupByColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/LongGroupByColumnSelectorStrategy.java
@@ -20,7 +20,9 @@
 package io.druid.query.groupby.epinephelinae.column;
 
 import io.druid.segment.ColumnValueSelector;
+import io.druid.segment.DimensionHandlerUtils;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Map;
 
@@ -35,10 +37,13 @@ public int getGroupingKeySize()
 
   @Override
   public void processValueFromGroupingKey(
-      GroupByColumnSelectorPlus selectorPlus, ByteBuffer key, Map<String, Object> resultMap
+      GroupByColumnSelectorPlus selectorPlus,
+      ByteBuffer key,
+      Map<String, Object> resultMap,
+      int keyBufferPosition
   )
   {
-    final long val = key.getLong(selectorPlus.getKeyBufferPosition());
+    final long val = key.getLong(keyBufferPosition);
     resultMap.put(selectorPlus.getOutputName(), val);
   }
 
@@ -55,17 +60,21 @@ public Object getOnlyValue(ColumnValueSelector selector)
   }
 
   @Override
-  public void writeToKeyBuffer(int keyBufferPosition, Object obj, ByteBuffer keyBuffer)
+  public void writeToKeyBuffer(int keyBufferPosition, @Nullable Object obj, ByteBuffer keyBuffer)
   {
-    keyBuffer.putLong(keyBufferPosition, (Long) obj);
+    keyBuffer.putLong(keyBufferPosition, DimensionHandlerUtils.nullToZero((Long) obj));
   }
 
   @Override
   public void initGroupingKeyColumnValue(
-      int keyBufferPosition, int columnIndex, Object rowObj, ByteBuffer keyBuffer, int[] stack
+      int keyBufferPosition,
+      int columnIndex,
+      Object rowObj,
+      ByteBuffer keyBuffer,
+      int[] stack
   )
   {
-    keyBuffer.putLong(keyBufferPosition, (Long) rowObj);
+    writeToKeyBuffer(keyBufferPosition, rowObj, keyBuffer);
     stack[columnIndex] = 1;
   }
 
diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/NullableValueGroupByColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/NullableValueGroupByColumnSelectorStrategy.java
new file mode 100644
index 00000000000..842da6c4d73
--- /dev/null
+++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/NullableValueGroupByColumnSelectorStrategy.java
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package io.druid.query.groupby.epinephelinae.column;
+
+
+import io.druid.common.config.NullHandling;
+import io.druid.segment.ColumnValueSelector;
+
+import javax.annotation.Nullable;
+import java.nio.ByteBuffer;
+import java.util.Map;
+
+public class NullableValueGroupByColumnSelectorStrategy implements GroupByColumnSelectorStrategy
+{
+  private final GroupByColumnSelectorStrategy delegate;
+
+  public NullableValueGroupByColumnSelectorStrategy(GroupByColumnSelectorStrategy delegate)
+  {
+    this.delegate = delegate;
+  }
+
+  @Override
+  public int getGroupingKeySize()
+  {
+    return delegate.getGroupingKeySize() + Byte.BYTES;
+  }
+
+  @Override
+  public void processValueFromGroupingKey(
+      GroupByColumnSelectorPlus selectorPlus,
+      ByteBuffer key,
+      Map<String, Object> resultMap,
+      int keyBufferPosition
+  )
+  {
+    if (key.get(keyBufferPosition) == NullHandling.IS_NULL_BYTE) {
+      resultMap.put(selectorPlus.getOutputName(), null);
+    } else {
+      delegate.processValueFromGroupingKey(selectorPlus, key, resultMap, keyBufferPosition + Byte.BYTES);
+    }
+  }
+
+  @Override
+  public void initColumnValues(ColumnValueSelector selector, int columnIndex, Object[] values)
+  {
+    if (selector.isNull()) {
+      values[columnIndex] = null;
+    } else {
+      delegate.initColumnValues(selector, columnIndex, values);
+    }
+  }
+
+  @Override
+  @Nullable
+  public Object getOnlyValue(ColumnValueSelector selector)
+  {
+    if (selector.isNull()) {
+      return null;
+    }
+    return delegate.getOnlyValue(selector);
+  }
+
+  @Override
+  public void writeToKeyBuffer(int keyBufferPosition, @Nullable Object obj, ByteBuffer keyBuffer)
+  {
+    if (obj == null) {
+      keyBuffer.put(keyBufferPosition, NullHandling.IS_NULL_BYTE);
+    } else {
+      keyBuffer.put(keyBufferPosition, NullHandling.IS_NOT_NULL_BYTE);
+    }
+    delegate.writeToKeyBuffer(keyBufferPosition + Byte.BYTES, obj, keyBuffer);
+  }
+
+  @Override
+  public void initGroupingKeyColumnValue(
+      int keyBufferPosition,
+      int columnIndex,
+      Object rowObj,
+      ByteBuffer keyBuffer,
+      int[] stack
+  )
+  {
+    writeToKeyBuffer(keyBufferPosition, rowObj, keyBuffer);
+    stack[columnIndex] = 1;
+  }
+
+  @Override
+  public boolean checkRowIndexAndAddValueToGroupingKey(
+      int keyBufferPosition, Object rowObj, int rowValIdx, ByteBuffer keyBuffer
+  )
+  {
+    // rows from a nullable column always have a single value, multi-value is not currently supported
+    // this method handles row values after the first in a multivalued row, so just return false
+    return false;
+  }
+}
diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/StringGroupByColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/StringGroupByColumnSelectorStrategy.java
index 356d7d76a49..478ec9173f6 100644
--- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/StringGroupByColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/StringGroupByColumnSelectorStrategy.java
@@ -20,6 +20,7 @@
 package io.druid.query.groupby.epinephelinae.column;
 
 import com.google.common.base.Preconditions;
+import io.druid.common.config.NullHandling;
 import io.druid.segment.ColumnValueSelector;
 import io.druid.segment.DimensionSelector;
 import io.druid.segment.data.IndexedInts;
@@ -36,9 +37,14 @@ public int getGroupingKeySize()
   }
 
   @Override
-  public void processValueFromGroupingKey(GroupByColumnSelectorPlus selectorPlus, ByteBuffer key, Map<String, Object> resultMap)
+  public void processValueFromGroupingKey(
+      GroupByColumnSelectorPlus selectorPlus,
+      ByteBuffer key,
+      Map<String, Object> resultMap,
+      int keyBufferPosition
+  )
   {
-    final int id = key.getInt(selectorPlus.getKeyBufferPosition());
+    final int id = key.getInt(keyBufferPosition);
 
     // GROUP_BY_MISSING_VALUE is used to indicate empty rows, which are omitted from the result map.
     if (id != GROUP_BY_MISSING_VALUE) {
@@ -47,7 +53,7 @@ public void processValueFromGroupingKey(GroupByColumnSelectorPlus selectorPlus,
           ((DimensionSelector) selectorPlus.getSelector()).lookupName(id)
       );
     } else {
-      resultMap.put(selectorPlus.getOutputName(), "");
+      resultMap.put(selectorPlus.getOutputName(), NullHandling.defaultStringValue());
     }
   }
 
diff --git a/processing/src/main/java/io/druid/query/groupby/having/EqualToHavingSpec.java b/processing/src/main/java/io/druid/query/groupby/having/EqualToHavingSpec.java
index 32712278694..7c4efef9218 100644
--- a/processing/src/main/java/io/druid/query/groupby/having/EqualToHavingSpec.java
+++ b/processing/src/main/java/io/druid/query/groupby/having/EqualToHavingSpec.java
@@ -68,7 +68,11 @@ public void setAggregators(Map<String, AggregatorFactory> aggregators)
   @Override
   public boolean eval(Row row)
   {
-    return HavingSpecMetricComparator.compare(row, aggregationName, value, aggregators) == 0;
+    Object metricVal = row.getRaw(aggregationName);
+    if (metricVal == null || value == null) {
+      return metricVal == null && value == null;
+    }
+    return HavingSpecMetricComparator.compare(aggregationName, value, aggregators, metricVal) == 0;
   }
 
   /**
diff --git a/processing/src/main/java/io/druid/query/groupby/having/GreaterThanHavingSpec.java b/processing/src/main/java/io/druid/query/groupby/having/GreaterThanHavingSpec.java
index 691ec2890bd..f48e3e90aed 100644
--- a/processing/src/main/java/io/druid/query/groupby/having/GreaterThanHavingSpec.java
+++ b/processing/src/main/java/io/druid/query/groupby/having/GreaterThanHavingSpec.java
@@ -68,7 +68,11 @@ public void setAggregators(Map<String, AggregatorFactory> aggregators)
   @Override
   public boolean eval(Row row)
   {
-    return HavingSpecMetricComparator.compare(row, aggregationName, value, aggregators) > 0;
+    Object metricVal = row.getRaw(aggregationName);
+    if (metricVal == null || value == null) {
+      return false;
+    }
+    return HavingSpecMetricComparator.compare(aggregationName, value, aggregators, metricVal) > 0;
   }
 
   /**
diff --git a/processing/src/main/java/io/druid/query/groupby/having/HavingSpecMetricComparator.java b/processing/src/main/java/io/druid/query/groupby/having/HavingSpecMetricComparator.java
index 2080b5b257e..a156e4ecc50 100644
--- a/processing/src/main/java/io/druid/query/groupby/having/HavingSpecMetricComparator.java
+++ b/processing/src/main/java/io/druid/query/groupby/having/HavingSpecMetricComparator.java
@@ -21,7 +21,6 @@
 
 import com.google.common.primitives.Doubles;
 import com.google.common.primitives.Longs;
-import io.druid.data.input.Row;
 import io.druid.java.util.common.ISE;
 import io.druid.query.aggregation.AggregatorFactory;
 
@@ -35,11 +34,8 @@
 {
   static final Pattern LONG_PAT = Pattern.compile("[-|+]?\\d+");
 
-  static int compare(Row row, String aggregationName, Number value, Map<String, AggregatorFactory> aggregators)
+  static int compare(String aggregationName, Number value, Map<String, AggregatorFactory> aggregators, Object metricValueObj)
   {
-
-    Object metricValueObj = row.getRaw(aggregationName);
-
     if (metricValueObj != null) {
       if (aggregators != null && aggregators.containsKey(aggregationName)) {
         metricValueObj = aggregators.get(aggregationName).finalizeComputation(metricValueObj);
diff --git a/processing/src/main/java/io/druid/query/groupby/having/LessThanHavingSpec.java b/processing/src/main/java/io/druid/query/groupby/having/LessThanHavingSpec.java
index cc7557b9ed6..5a9ce0c9c36 100644
--- a/processing/src/main/java/io/druid/query/groupby/having/LessThanHavingSpec.java
+++ b/processing/src/main/java/io/druid/query/groupby/having/LessThanHavingSpec.java
@@ -66,7 +66,11 @@ public void setAggregators(Map<String, AggregatorFactory> aggregators)
   @Override
   public boolean eval(Row row)
   {
-    return HavingSpecMetricComparator.compare(row, aggregationName, value, aggregators) < 0;
+    Object metricVal = row.getRaw(aggregationName);
+    if (metricVal == null || value == null) {
+      return false;
+    }
+    return HavingSpecMetricComparator.compare(aggregationName, value, aggregators, metricVal) < 0;
   }
 
   /**
diff --git a/processing/src/main/java/io/druid/query/groupby/orderby/DefaultLimitSpec.java b/processing/src/main/java/io/druid/query/groupby/orderby/DefaultLimitSpec.java
index fb53c65264b..ab114b88b0d 100644
--- a/processing/src/main/java/io/druid/query/groupby/orderby/DefaultLimitSpec.java
+++ b/processing/src/main/java/io/druid/query/groupby/orderby/DefaultLimitSpec.java
@@ -30,6 +30,7 @@
 import com.google.common.collect.Sets;
 import com.google.common.primitives.Ints;
 import com.google.common.primitives.Longs;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.Row;
 import io.druid.java.util.common.ISE;
 import io.druid.java.util.common.granularity.Granularities;
@@ -269,12 +270,26 @@ public int compare(Row left, Row right)
 
   private Ordering<Row> metricOrdering(final String column, final Comparator comparator)
   {
-    return Ordering.from(Comparator.comparing((Row row) -> row.getRaw(column), Comparator.nullsLast(comparator)));
+    // As per SQL standard we need to have same ordering for metrics as dimensions i.e nulls first
+    // If SQL compatibility is not enabled we use nullsLast ordering for null metrics for backwards compatibility.
+    if (NullHandling.sqlCompatible()) {
+      return Ordering.from(Comparator.comparing((Row row) -> row.getRaw(column), Comparator.nullsFirst(comparator)));
+    } else {
+      return Ordering.from(Comparator.comparing((Row row) -> row.getRaw(column), Comparator.nullsLast(comparator)));
+    }
   }
 
   private Ordering<Row> dimensionOrdering(final String dimension, final StringComparator comparator)
   {
-    return Ordering.from(Comparator.comparing((Row row) -> row.getDimension(dimension).isEmpty() ? null : row.getDimension(dimension).get(0), Comparator.nullsFirst(comparator)));
+    return Ordering.from(
+        Comparator.comparing((Row row) -> getDimensionValue(row, dimension), Comparator.nullsFirst(comparator))
+    );
+  }
+
+  private static String getDimensionValue(Row row, String column)
+  {
+    List<String> values = row.getDimension(column);
+    return values.isEmpty() ? null : values.get(0);
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java
index d50e2c269e2..d19a7e9b03c 100644
--- a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java
+++ b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java
@@ -228,6 +228,15 @@ public boolean doMergeResults(final GroupByQuery query)
 
     // Fudge timestamp, maybe.
     final DateTime fudgeTimestamp = getUniversalTimestamp(query);
+    ImmutableMap.Builder<String, Object> context = ImmutableMap.builder();
+    context.put("finalize", false);
+    context.put(GroupByQueryConfig.CTX_KEY_STRATEGY, GroupByStrategySelector.STRATEGY_V2);
+    if (fudgeTimestamp != null) {
+      context.put(CTX_KEY_FUDGE_TIMESTAMP, String.valueOf(fudgeTimestamp.getMillis()));
+    }
+    context.put(CTX_KEY_OUTERMOST, false);
+    // the having spec shouldn't be passed down, so we need to convey the existing limit push down status
+    context.put(GroupByQueryConfig.CTX_KEY_APPLY_LIMIT_PUSH_DOWN, query.isApplyLimitPushDown());
 
     final GroupByQuery newQuery = new GroupByQuery(
         query.getDataSource(),
@@ -243,14 +252,7 @@ public boolean doMergeResults(final GroupByQuery query)
         query.getLimitSpec(),
         query.getContext()
     ).withOverriddenContext(
-        ImmutableMap.of(
-            "finalize", false,
-            GroupByQueryConfig.CTX_KEY_STRATEGY, GroupByStrategySelector.STRATEGY_V2,
-            CTX_KEY_FUDGE_TIMESTAMP, fudgeTimestamp == null ? "" : String.valueOf(fudgeTimestamp.getMillis()),
-            CTX_KEY_OUTERMOST, false,
-            // the having spec shouldn't be passed down, so we need to convey the existing limit push down status
-            GroupByQueryConfig.CTX_KEY_APPLY_LIMIT_PUSH_DOWN, query.isApplyLimitPushDown()
-        )
+        context.build()
     );
 
     Sequence<Row> rowSequence = Sequences.map(
diff --git a/processing/src/main/java/io/druid/query/lookup/LookupConfig.java b/processing/src/main/java/io/druid/query/lookup/LookupConfig.java
index 7d97835dbee..d296d6974d7 100644
--- a/processing/src/main/java/io/druid/query/lookup/LookupConfig.java
+++ b/processing/src/main/java/io/druid/query/lookup/LookupConfig.java
@@ -21,7 +21,7 @@
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Strings;
+import io.druid.java.util.common.StringUtils;
 
 import javax.validation.constraints.Min;
 import java.util.Objects;
@@ -56,7 +56,7 @@ public LookupConfig(
       @JsonProperty("snapshotWorkingDir") String snapshotWorkingDir
   )
   {
-    this.snapshotWorkingDir = Strings.nullToEmpty(snapshotWorkingDir);
+    this.snapshotWorkingDir = StringUtils.nullToEmptyNonDruidDataString(snapshotWorkingDir);
   }
 
   public String getSnapshotWorkingDir()
diff --git a/processing/src/main/java/io/druid/query/lookup/LookupExtractionFn.java b/processing/src/main/java/io/druid/query/lookup/LookupExtractionFn.java
index 702f1802063..e41710b42e0 100644
--- a/processing/src/main/java/io/druid/query/lookup/LookupExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/lookup/LookupExtractionFn.java
@@ -23,7 +23,6 @@
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Function;
-import com.google.common.base.Strings;
 import com.google.common.base.Throwables;
 import io.druid.java.util.common.StringUtils;
 import io.druid.query.extraction.ExtractionCacheHelper;
@@ -52,9 +51,9 @@ public LookupExtractionFn(
         {
           @Nullable
           @Override
-          public String apply(String input)
+          public String apply(@Nullable String input)
           {
-            return lookup.apply(Strings.nullToEmpty(input));
+            return lookup.apply(input);
           }
         },
         retainMissingValue,
diff --git a/processing/src/main/java/io/druid/query/lookup/LookupExtractor.java b/processing/src/main/java/io/druid/query/lookup/LookupExtractor.java
index e5e9d6f0385..897001c22e6 100644
--- a/processing/src/main/java/io/druid/query/lookup/LookupExtractor.java
+++ b/processing/src/main/java/io/druid/query/lookup/LookupExtractor.java
@@ -25,7 +25,6 @@
 import io.druid.query.extraction.MapLookupExtractor;
 
 import javax.annotation.Nullable;
-import javax.validation.constraints.NotNull;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
@@ -40,12 +39,12 @@
   /**
    * Apply a particular lookup methodology to the input string
    *
-   * @param key The value to apply the lookup to. May not be null
+   * @param key The value to apply the lookup to.
    *
-   * @return The lookup, or null key cannot have the lookup applied to it and should be treated as missing.
+   * @return The lookup, or null when key is `null` or cannot have the lookup applied to it and should be treated as missing.
    */
   @Nullable
-  public abstract String apply(@NotNull String key);
+  public abstract String apply(@Nullable String key);
 
   /**
    * @param keys set of keys to apply lookup for each element
@@ -71,15 +70,15 @@
    * Provide the reverse mapping from a given value to a list of keys
    *
    * @param value the value to apply the reverse lookup
-   *              Null and empty are considered to be the same value = nullToEmpty(value)
    *
    * @return the list of keys that maps to value or empty list.
    * Note that for the case of a none existing value in the lookup we have to cases either return an empty list OR list with null element.
    * returning an empty list implies that user want to ignore such a lookup value.
    * In the other hand returning a list with the null element implies user want to map the none existing value to the key null.
+   * Null value maps to empty list.
    */
 
-  public abstract List<String> unapply(String value);
+  public abstract List<String> unapply(@Nullable String value);
 
   /**
    * @param values Iterable of values for which will perform reverse lookup
diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java b/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java
index 2da31bd7bb2..f0e1709b3db 100644
--- a/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java
+++ b/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java
@@ -21,10 +21,10 @@
 
 import com.google.common.base.Function;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.common.granularity.Granularities;
 import io.druid.java.util.common.guava.Accumulator;
@@ -216,8 +216,8 @@ private ColumnAnalysis analyzeStringColumn(
     }
 
     if (analyzingMinMax() && cardinality > 0) {
-      min = Strings.nullToEmpty(bitmapIndex.getValue(0));
-      max = Strings.nullToEmpty(bitmapIndex.getValue(cardinality - 1));
+      min = NullHandling.nullToEmptyIfNeeded(bitmapIndex.getValue(0));
+      max = NullHandling.nullToEmptyIfNeeded(bitmapIndex.getValue(cardinality - 1));
     }
 
     return new ColumnAnalysis(
diff --git a/processing/src/main/java/io/druid/query/search/SearchHit.java b/processing/src/main/java/io/druid/query/search/SearchHit.java
index de4d38e1a3b..deb192e986c 100644
--- a/processing/src/main/java/io/druid/query/search/SearchHit.java
+++ b/processing/src/main/java/io/druid/query/search/SearchHit.java
@@ -22,6 +22,7 @@
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
+import io.druid.common.config.NullHandling;
 
 /**
  */
@@ -39,7 +40,7 @@ public SearchHit(
   )
   {
     this.dimension = Preconditions.checkNotNull(dimension);
-    this.value = Preconditions.checkNotNull(value);
+    this.value = NullHandling.nullToEmptyIfNeeded(value);
     this.count = count;
   }
 
diff --git a/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java b/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java
index 9a9540eb307..de51b2cffc3 100644
--- a/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java
+++ b/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java
@@ -20,7 +20,6 @@
 package io.druid.query.search;
 
 import com.google.common.base.Function;
-import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
@@ -132,7 +131,7 @@ public void updateSearchResultSet(
         for (int i = 0, rowSize = row.size(); i < rowSize; ++i) {
           final String dimVal = selector.lookupName(row.get(i));
           if (searchQuerySpec.accept(dimVal)) {
-            set.addTo(new SearchHit(outputName, Strings.nullToEmpty(dimVal)), 1);
+            set.addTo(new SearchHit(outputName, dimVal), 1);
             if (set.size() >= limit) {
               return;
             }
diff --git a/processing/src/main/java/io/druid/query/search/UseIndexesStrategy.java b/processing/src/main/java/io/druid/query/search/UseIndexesStrategy.java
index 9694e9dc7b6..2276bf868ae 100644
--- a/processing/src/main/java/io/druid/query/search/UseIndexesStrategy.java
+++ b/processing/src/main/java/io/druid/query/search/UseIndexesStrategy.java
@@ -20,7 +20,6 @@
 package io.druid.query.search;
 
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Lists;
 import io.druid.collections.bitmap.BitmapFactory;
@@ -261,7 +260,7 @@ public IndexOnlyExecutor(
           extractionFn = IdentityExtractionFn.getInstance();
         }
         for (int i = 0; i < bitmapIndex.getCardinality(); ++i) {
-          String dimVal = Strings.nullToEmpty(extractionFn.apply(bitmapIndex.getValue(i)));
+          String dimVal = extractionFn.apply(bitmapIndex.getValue(i));
           if (!searchQuerySpec.accept(dimVal)) {
             continue;
           }
diff --git a/processing/src/main/java/io/druid/query/topn/PooledTopNAlgorithm.java b/processing/src/main/java/io/druid/query/topn/PooledTopNAlgorithm.java
index f6f2f683b8d..eb92dfbd891 100644
--- a/processing/src/main/java/io/druid/query/topn/PooledTopNAlgorithm.java
+++ b/processing/src/main/java/io/druid/query/topn/PooledTopNAlgorithm.java
@@ -252,7 +252,7 @@ public PooledTopNParams makeInitParams(
     int numBytesPerRecord = 0;
 
     for (int i = 0; i < query.getAggregatorSpecs().size(); ++i) {
-      aggregatorSizes[i] = query.getAggregatorSpecs().get(i).getMaxIntermediateSize();
+      aggregatorSizes[i] = query.getAggregatorSpecs().get(i).getMaxIntermediateSizeWithNulls();
       numBytesPerRecord += aggregatorSizes[i];
     }
 
diff --git a/processing/src/main/java/io/druid/query/topn/TopNMapFn.java b/processing/src/main/java/io/druid/query/topn/TopNMapFn.java
index 7e64ea9a653..4725ff8d4b9 100644
--- a/processing/src/main/java/io/druid/query/topn/TopNMapFn.java
+++ b/processing/src/main/java/io/druid/query/topn/TopNMapFn.java
@@ -49,21 +49,13 @@
     }
   }
 
-  private static Function<Object, Object> STRING_TRANSFORMER = input -> Objects.toString(input, null);
-
-  private static Function<Object, Object> LONG_TRANSFORMER = input -> {
-    final Long longVal = DimensionHandlerUtils.convertObjectToLong(input);
-    return longVal == null ? DimensionHandlerUtils.ZERO_LONG : longVal;
-  };
-
-  private static Function<Object, Object> FLOAT_TRANSFORMER = input -> {
-    final Float floatVal = DimensionHandlerUtils.convertObjectToFloat(input);
-    return floatVal == null ? DimensionHandlerUtils.ZERO_FLOAT : floatVal;
-  };
-  private static Function<Object, Object> DOUBLE_TRANSFORMER = input -> {
-    final Double doubleValue = DimensionHandlerUtils.convertObjectToDouble(input);
-    return doubleValue == null ? DimensionHandlerUtils.ZERO_DOUBLE : doubleValue;
-  };
+  private static Function<Object, Object> STRING_TRANSFORMER = Objects::toString;
+
+  private static Function<Object, Object> LONG_TRANSFORMER = DimensionHandlerUtils::convertObjectToLong;
+
+  private static Function<Object, Object> FLOAT_TRANSFORMER = DimensionHandlerUtils::convertObjectToFloat;
+
+  private static Function<Object, Object> DOUBLE_TRANSFORMER = DimensionHandlerUtils::convertObjectToDouble;
 
   private static final TopNColumnSelectorStrategyFactory STRATEGY_FACTORY = new TopNColumnSelectorStrategyFactory();
 
diff --git a/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java b/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java
index ffdb07e25b5..82057973149 100644
--- a/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java
+++ b/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java
@@ -116,7 +116,7 @@ private TopNMapFn getMapFn(
 
     int numBytesPerRecord = 0;
     for (AggregatorFactory aggregatorFactory : query.getAggregatorSpecs()) {
-      numBytesPerRecord += aggregatorFactory.getMaxIntermediateSize();
+      numBytesPerRecord += aggregatorFactory.getMaxIntermediateSizeWithNulls();
     }
 
     final TopNAlgorithmSelector selector = new TopNAlgorithmSelector(cardinality, numBytesPerRecord);
diff --git a/processing/src/main/java/io/druid/segment/BaseObjectColumnValueSelector.java b/processing/src/main/java/io/druid/segment/BaseObjectColumnValueSelector.java
index da3b5de0d07..7db0ae13a3f 100644
--- a/processing/src/main/java/io/druid/segment/BaseObjectColumnValueSelector.java
+++ b/processing/src/main/java/io/druid/segment/BaseObjectColumnValueSelector.java
@@ -31,7 +31,7 @@
  * All implementations of this interface MUST also implement {@link ColumnValueSelector}.
  */
 @PublicApi
-public interface BaseObjectColumnValueSelector<T>
+public interface BaseObjectColumnValueSelector<T> extends BaseNullableColumnValueSelector
 {
   @Nullable
   T getObject();
diff --git a/processing/src/main/java/io/druid/segment/ColumnSelectorBitmapIndexSelector.java b/processing/src/main/java/io/druid/segment/ColumnSelectorBitmapIndexSelector.java
index 90ea9a1ac46..ac7b9586cd4 100644
--- a/processing/src/main/java/io/druid/segment/ColumnSelectorBitmapIndexSelector.java
+++ b/processing/src/main/java/io/druid/segment/ColumnSelectorBitmapIndexSelector.java
@@ -225,7 +225,7 @@ public ImmutableBitmap getBitmapIndex(String dimension, String value)
     }
 
     final BitmapIndex bitmapIndex = column.getBitmapIndex();
-    return bitmapIndex.getBitmap(bitmapIndex.getIndex(NullHandling.emptyToNullIfNeeded(value)));
+    return bitmapIndex.getBitmap(bitmapIndex.getIndex(value));
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/segment/DimensionHandlerUtils.java b/processing/src/main/java/io/druid/segment/DimensionHandlerUtils.java
index bcc3ac5976e..a823e25675f 100644
--- a/processing/src/main/java/io/druid/segment/DimensionHandlerUtils.java
+++ b/processing/src/main/java/io/druid/segment/DimensionHandlerUtils.java
@@ -237,6 +237,15 @@ private static ColumnCapabilities getEffectiveCapabilities(
     return strategyFactory.makeColumnSelectorStrategy(capabilities, selector);
   }
 
+  @Nullable
+  public static String convertObjectToString(@Nullable Object valObj)
+  {
+    if (valObj == null) {
+      return null;
+    }
+    return valObj.toString();
+  }
+
   @Nullable
   public static Long convertObjectToLong(@Nullable Object valObj)
   {
diff --git a/processing/src/main/java/io/druid/segment/DoubleDimensionIndexer.java b/processing/src/main/java/io/druid/segment/DoubleDimensionIndexer.java
index 6745a112087..4c7b04dfe8d 100644
--- a/processing/src/main/java/io/druid/segment/DoubleDimensionIndexer.java
+++ b/processing/src/main/java/io/druid/segment/DoubleDimensionIndexer.java
@@ -44,9 +44,7 @@ public Double processRowValsToUnsortedEncodedKeyComponent(Object dimValues, bool
     if (dimValues instanceof List) {
       throw new UnsupportedOperationException("Numeric columns do not support multivalue rows.");
     }
-    Double ret = DimensionHandlerUtils.convertObjectToDouble(dimValues, reportParseExceptions);
-    // remove null -> zero conversion when https://github.com/druid-io/druid/pull/5278 series of patches is merged
-    return ret == null ? DimensionHandlerUtils.ZERO_DOUBLE : ret;
+    return DimensionHandlerUtils.convertObjectToDouble(dimValues, reportParseExceptions);
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/segment/FloatDimensionIndexer.java b/processing/src/main/java/io/druid/segment/FloatDimensionIndexer.java
index 8783b7b1574..f48fa10d3bb 100644
--- a/processing/src/main/java/io/druid/segment/FloatDimensionIndexer.java
+++ b/processing/src/main/java/io/druid/segment/FloatDimensionIndexer.java
@@ -45,9 +45,7 @@ public Float processRowValsToUnsortedEncodedKeyComponent(Object dimValues, boole
       throw new UnsupportedOperationException("Numeric columns do not support multivalue rows.");
     }
 
-    Float ret = DimensionHandlerUtils.convertObjectToFloat(dimValues, reportParseExceptions);
-    // remove null -> zero conversion when https://github.com/druid-io/druid/pull/5278 series of patches is merged
-    return ret == null ? DimensionHandlerUtils.ZERO_FLOAT : ret;
+    return DimensionHandlerUtils.convertObjectToFloat(dimValues, reportParseExceptions);
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/segment/LongDimensionIndexer.java b/processing/src/main/java/io/druid/segment/LongDimensionIndexer.java
index 208567101b6..76c433c3eaa 100644
--- a/processing/src/main/java/io/druid/segment/LongDimensionIndexer.java
+++ b/processing/src/main/java/io/druid/segment/LongDimensionIndexer.java
@@ -45,9 +45,7 @@ public Long processRowValsToUnsortedEncodedKeyComponent(Object dimValues, boolea
       throw new UnsupportedOperationException("Numeric columns do not support multivalue rows.");
     }
 
-    Long ret = DimensionHandlerUtils.convertObjectToLong(dimValues, reportParseExceptions);
-    // remove null -> zero conversion when https://github.com/druid-io/druid/pull/5278 series of patches is merged
-    return ret == null ? DimensionHandlerUtils.ZERO_LONG : ret;
+    return DimensionHandlerUtils.convertObjectToLong(dimValues, reportParseExceptions);
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/segment/RowCombiningTimeAndDimsIterator.java b/processing/src/main/java/io/druid/segment/RowCombiningTimeAndDimsIterator.java
index 05a0ce9ceb2..b73ca16a95c 100644
--- a/processing/src/main/java/io/druid/segment/RowCombiningTimeAndDimsIterator.java
+++ b/processing/src/main/java/io/druid/segment/RowCombiningTimeAndDimsIterator.java
@@ -117,7 +117,7 @@
     );
 
     combinedMetricSelectors = new AggregateCombiner[metricAggs.length];
-    Arrays.setAll(combinedMetricSelectors, metricIndex -> metricAggs[metricIndex].makeAggregateCombiner());
+    Arrays.setAll(combinedMetricSelectors, metricIndex -> metricAggs[metricIndex].makeNullableAggregateCombiner());
     combinedMetricNames = metricNames;
 
     combinedTimeAndDimsPointersByOriginalIteratorIndex = new TimeAndDimsPointer[numCombinedIterators];
diff --git a/processing/src/main/java/io/druid/segment/filter/BoundFilter.java b/processing/src/main/java/io/druid/segment/filter/BoundFilter.java
index 3ef52e5c035..eeb484e9edf 100644
--- a/processing/src/main/java/io/druid/segment/filter/BoundFilter.java
+++ b/processing/src/main/java/io/druid/segment/filter/BoundFilter.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Predicate;
 import com.google.common.base.Supplier;
 import io.druid.collections.bitmap.ImmutableBitmap;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.Pair;
 import io.druid.query.BitmapResultFactory;
 import io.druid.query.extraction.ExtractionFn;
@@ -148,7 +149,7 @@ public boolean supportsSelectivityEstimation(
     if (!boundDimFilter.hasLowerBound()) {
       startIndex = 0;
     } else {
-      final int found = bitmapIndex.getIndex(boundDimFilter.getLower());
+      final int found = bitmapIndex.getIndex(NullHandling.emptyToNullIfNeeded(boundDimFilter.getLower()));
       if (found >= 0) {
         startIndex = boundDimFilter.isLowerStrict() ? found + 1 : found;
       } else {
@@ -159,7 +160,7 @@ public boolean supportsSelectivityEstimation(
     if (!boundDimFilter.hasUpperBound()) {
       endIndex = bitmapIndex.getCardinality();
     } else {
-      final int found = bitmapIndex.getIndex(boundDimFilter.getUpper());
+      final int found = bitmapIndex.getIndex(NullHandling.emptyToNullIfNeeded(boundDimFilter.getUpper()));
       if (found >= 0) {
         endIndex = boundDimFilter.isUpperStrict() ? found : found + 1;
       } else {
@@ -249,9 +250,10 @@ private boolean doesMatch(String input)
   {
     if (input == null) {
       return (!boundDimFilter.hasLowerBound()
-              || (boundDimFilter.getLower().isEmpty() && !boundDimFilter.isLowerStrict())) // lower bound allows null
+              || (NullHandling.isNullOrEquivalent(boundDimFilter.getLower()) && !boundDimFilter.isLowerStrict()))
+             // lower bound allows null
              && (!boundDimFilter.hasUpperBound()
-                 || !boundDimFilter.getUpper().isEmpty()
+                 || !NullHandling.isNullOrEquivalent(boundDimFilter.getUpper())
                  || !boundDimFilter.isUpperStrict()); // upper bound allows null
     }
     int lowerComparing = 1;
diff --git a/processing/src/main/java/io/druid/segment/filter/ExpressionFilter.java b/processing/src/main/java/io/druid/segment/filter/ExpressionFilter.java
index 1fb815dd001..8f52d9b86b1 100644
--- a/processing/src/main/java/io/druid/segment/filter/ExpressionFilter.java
+++ b/processing/src/main/java/io/druid/segment/filter/ExpressionFilter.java
@@ -21,6 +21,7 @@
 
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Iterables;
+import io.druid.common.config.NullHandling;
 import io.druid.math.expr.Evals;
 import io.druid.math.expr.Expr;
 import io.druid.math.expr.ExprEval;
@@ -58,6 +59,9 @@ public ValueMatcher makeMatcher(final ColumnSelectorFactory factory)
       @Override
       public boolean matches()
       {
+        if (NullHandling.sqlCompatible() && selector.isNull()) {
+          return false;
+        }
         return Evals.asBoolean(selector.getLong());
       }
 
@@ -108,7 +112,8 @@ public boolean supportsBitmapIndex(final BitmapIndexSelector selector)
           value -> expr.eval(identifierName -> {
             // There's only one binding, and it must be the single column, so it can safely be ignored in production.
             assert column.equals(identifierName);
-            return value;
+            // convert null to Empty before passing to expressions if needed.
+            return NullHandling.nullToEmptyIfNeeded(value);
           }).asBoolean()
       );
     }
diff --git a/processing/src/main/java/io/druid/segment/filter/Filters.java b/processing/src/main/java/io/druid/segment/filter/Filters.java
index 48be26062fd..66557dbb322 100644
--- a/processing/src/main/java/io/druid/segment/filter/Filters.java
+++ b/processing/src/main/java/io/druid/segment/filter/Filters.java
@@ -25,7 +25,6 @@
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Lists;
 import io.druid.collections.bitmap.ImmutableBitmap;
-import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.guava.FunctionalIterable;
 import io.druid.query.BitmapResultFactory;
 import io.druid.query.ColumnSelectorPlus;
@@ -135,8 +134,7 @@ public static ValueMatcher makeValueMatcher(
             columnSelectorFactory
         );
 
-    return selector.getColumnSelectorStrategy()
-                   .makeValueMatcher(selector.getSelector(), NullHandling.emptyToNullIfNeeded(value));
+    return selector.getColumnSelectorStrategy().makeValueMatcher(selector.getSelector(), value);
   }
 
   /**
diff --git a/processing/src/main/java/io/druid/segment/filter/InFilter.java b/processing/src/main/java/io/druid/segment/filter/InFilter.java
index 63a75c2b429..00d13e617f9 100644
--- a/processing/src/main/java/io/druid/segment/filter/InFilter.java
+++ b/processing/src/main/java/io/druid/segment/filter/InFilter.java
@@ -20,7 +20,6 @@
 package io.druid.segment.filter;
 
 import com.google.common.base.Predicate;
-import com.google.common.base.Strings;
 import com.google.common.base.Supplier;
 import io.druid.collections.bitmap.ImmutableBitmap;
 import io.druid.query.BitmapResultFactory;
@@ -165,9 +164,9 @@ private DruidPredicateFactory getPredicateFactory()
       public Predicate<String> makeStringPredicate()
       {
         if (extractionFn != null) {
-          return input -> values.contains(Strings.nullToEmpty(extractionFn.apply(input)));
+          return input -> values.contains(extractionFn.apply(input));
         } else {
-          return input -> values.contains(Strings.nullToEmpty(input));
+          return input -> values.contains(input);
         }
       }
 
diff --git a/processing/src/main/java/io/druid/segment/filter/LikeFilter.java b/processing/src/main/java/io/druid/segment/filter/LikeFilter.java
index 6e03fce286c..3de050e2d5b 100644
--- a/processing/src/main/java/io/druid/segment/filter/LikeFilter.java
+++ b/processing/src/main/java/io/druid/segment/filter/LikeFilter.java
@@ -19,9 +19,9 @@
 
 package io.druid.segment.filter;
 
-import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableList;
 import io.druid.collections.bitmap.ImmutableBitmap;
+import io.druid.common.config.NullHandling;
 import io.druid.query.BitmapResultFactory;
 import io.druid.query.extraction.ExtractionFn;
 import io.druid.query.filter.BitmapIndexSelector;
@@ -90,7 +90,12 @@ public boolean supportsSelectivityEstimation(
   {
     if (isSimpleEquals()) {
       // Verify that dimension equals prefix.
-      return ImmutableList.of(selector.getBitmapIndex(dimension, likeMatcher.getPrefix()));
+      return ImmutableList.of(
+          selector.getBitmapIndex(
+              dimension,
+              NullHandling.emptyToNullIfNeeded(likeMatcher.getPrefix())
+          )
+      );
     } else if (isSimplePrefix()) {
       // Verify that dimension startsWith prefix, and is accepted by likeMatcher.matchesSuffixOnly.
       final BitmapIndex bitmapIndex = selector.getBitmapIndex(dimension);
@@ -140,16 +145,24 @@ private IntIterable getDimValueIndexIterableForPrefixMatch(
       final Indexed<String> dimValues
   )
   {
-    final String lower = Strings.nullToEmpty(likeMatcher.getPrefix());
-    final String upper = Strings.nullToEmpty(likeMatcher.getPrefix()) + Character.MAX_VALUE;
+
+    final String lower = NullHandling.nullToEmptyIfNeeded(likeMatcher.getPrefix());
+    final String upper = NullHandling.nullToEmptyIfNeeded(likeMatcher.getPrefix()) + Character.MAX_VALUE;
+
     final int startIndex; // inclusive
     final int endIndex; // exclusive
 
-    final int lowerFound = bitmapIndex.getIndex(lower);
-    startIndex = lowerFound >= 0 ? lowerFound : -(lowerFound + 1);
+    if (lower == null) {
+      // For Null values
+      startIndex = bitmapIndex.getIndex(null);
+      endIndex = startIndex + 1;
+    } else {
+      final int lowerFound = bitmapIndex.getIndex(lower);
+      startIndex = lowerFound >= 0 ? lowerFound : -(lowerFound + 1);
 
-    final int upperFound = bitmapIndex.getIndex(upper);
-    endIndex = upperFound >= 0 ? upperFound + 1 : -(upperFound + 1);
+      final int upperFound = bitmapIndex.getIndex(upper);
+      endIndex = upperFound >= 0 ? upperFound + 1 : -(upperFound + 1);
+    }
 
     return new IntIterable()
     {
diff --git a/processing/src/main/java/io/druid/segment/incremental/OffheapIncrementalIndex.java b/processing/src/main/java/io/druid/segment/incremental/OffheapIncrementalIndex.java
index 5ee0cf4722a..472adfdc673 100644
--- a/processing/src/main/java/io/druid/segment/incremental/OffheapIncrementalIndex.java
+++ b/processing/src/main/java/io/druid/segment/incremental/OffheapIncrementalIndex.java
@@ -129,11 +129,11 @@ public FactsHolder getFacts()
       if (i == 0) {
         aggOffsetInBuffer[i] = 0;
       } else {
-        aggOffsetInBuffer[i] = aggOffsetInBuffer[i - 1] + metrics[i - 1].getMaxIntermediateSize();
+        aggOffsetInBuffer[i] = aggOffsetInBuffer[i - 1] + metrics[i - 1].getMaxIntermediateSizeWithNulls();
       }
     }
 
-    aggsTotalSize = aggOffsetInBuffer[metrics.length - 1] + metrics[metrics.length - 1].getMaxIntermediateSize();
+    aggsTotalSize = aggOffsetInBuffer[metrics.length - 1] + metrics[metrics.length - 1].getMaxIntermediateSizeWithNulls();
 
     return new BufferAggregator[metrics.length];
   }
diff --git a/processing/src/main/java/io/druid/segment/incremental/OnheapIncrementalIndex.java b/processing/src/main/java/io/druid/segment/incremental/OnheapIncrementalIndex.java
index 566d51ed1e2..0b86dd05ce7 100644
--- a/processing/src/main/java/io/druid/segment/incremental/OnheapIncrementalIndex.java
+++ b/processing/src/main/java/io/druid/segment/incremental/OnheapIncrementalIndex.java
@@ -106,7 +106,7 @@ private static long getMaxBytesPerRowForAggregators(IncrementalIndexSchema incre
   {
     long maxAggregatorIntermediateSize = Integer.BYTES * incrementalIndexSchema.getMetrics().length;
     maxAggregatorIntermediateSize += Arrays.stream(incrementalIndexSchema.getMetrics())
-                                           .mapToLong(aggregator -> aggregator.getMaxIntermediateSize() + Long.BYTES * 2)
+                                           .mapToLong(aggregator -> aggregator.getMaxIntermediateSizeWithNulls() + Long.BYTES * 2)
                                            .sum();
     return maxAggregatorIntermediateSize;
   }
diff --git a/processing/src/main/java/io/druid/segment/serde/BitmapIndexColumnPartSupplier.java b/processing/src/main/java/io/druid/segment/serde/BitmapIndexColumnPartSupplier.java
index da121e8ae73..d00ce22e3d6 100644
--- a/processing/src/main/java/io/druid/segment/serde/BitmapIndexColumnPartSupplier.java
+++ b/processing/src/main/java/io/druid/segment/serde/BitmapIndexColumnPartSupplier.java
@@ -22,7 +22,6 @@
 import com.google.common.base.Supplier;
 import io.druid.collections.bitmap.BitmapFactory;
 import io.druid.collections.bitmap.ImmutableBitmap;
-import io.druid.common.config.NullHandling;
 import io.druid.segment.column.BitmapIndex;
 import io.druid.segment.data.GenericIndexed;
 
@@ -78,7 +77,7 @@ public BitmapFactory getBitmapFactory()
       public int getIndex(String value)
       {
         // GenericIndexed.indexOf satisfies contract needed by BitmapIndex.indexOf
-        return dictionary.indexOf(NullHandling.emptyToNullIfNeeded(value));
+        return dictionary.indexOf(value);
       }
 
       @Override
diff --git a/processing/src/main/java/io/druid/segment/serde/ComplexMetricSerde.java b/processing/src/main/java/io/druid/segment/serde/ComplexMetricSerde.java
index 8a46d192259..8d833aaeed4 100644
--- a/processing/src/main/java/io/druid/segment/serde/ComplexMetricSerde.java
+++ b/processing/src/main/java/io/druid/segment/serde/ComplexMetricSerde.java
@@ -21,13 +21,11 @@
 
 import com.google.common.base.Function;
 import io.druid.guice.annotations.ExtensionPoint;
-import io.druid.segment.writeout.SegmentWriteOutMedium;
 import io.druid.segment.GenericColumnSerializer;
 import io.druid.segment.column.ColumnBuilder;
 import io.druid.segment.data.ObjectStrategy;
-import it.unimi.dsi.fastutil.bytes.ByteArrays;
+import io.druid.segment.writeout.SegmentWriteOutMedium;
 
-import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 
 /**
@@ -82,9 +80,9 @@
    *
    * @return serialized intermediate representation of aggregate in byte[]
    */
-  public byte[] toBytes(@Nullable Object val)
+  public byte[] toBytes(Object val)
   {
-    return val != null ? getObjectStrategy().toBytes(val) : ByteArrays.EMPTY_ARRAY;
+    return getObjectStrategy().toBytes(val);
   }
 
   /**
diff --git a/processing/src/main/java/io/druid/segment/virtual/ExpressionColumnValueSelector.java b/processing/src/main/java/io/druid/segment/virtual/ExpressionColumnValueSelector.java
index c2900680909..4af0d1eb63d 100644
--- a/processing/src/main/java/io/druid/segment/virtual/ExpressionColumnValueSelector.java
+++ b/processing/src/main/java/io/druid/segment/virtual/ExpressionColumnValueSelector.java
@@ -82,6 +82,9 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector)
   @Override
   public boolean isNull()
   {
-    return getObject().isNull();
+    // It is possible for an expression to have a non-null String value but it can return null when parsed
+    // as a primitive long/float/double.
+    // ExprEval.isNumericNull checks whether the parsed primitive value is null or not.
+    return getObject().isNumericNull();
   }
 }
diff --git a/processing/src/main/java/io/druid/segment/virtual/ExpressionSelectors.java b/processing/src/main/java/io/druid/segment/virtual/ExpressionSelectors.java
index e191d6a040a..b55507d7e8c 100644
--- a/processing/src/main/java/io/druid/segment/virtual/ExpressionSelectors.java
+++ b/processing/src/main/java/io/druid/segment/virtual/ExpressionSelectors.java
@@ -161,7 +161,7 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector)
     if (bindings.equals(ExprUtils.nilBindings())) {
       // Optimization for constant expressions.
       final ExprEval eval = expression.eval(bindings);
-      if (NullHandling.sqlCompatible() && eval.isNull()) {
+      if (NullHandling.sqlCompatible() && eval.isNumericNull()) {
         return NilColumnValueSelector.instance();
       }
       return new ConstantColumnValueSelector<>(
@@ -248,19 +248,27 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector)
   {
     final Map<String, Supplier<Object>> suppliers = Maps.newHashMap();
     for (String columnName : Parser.findRequiredBindings(expression)) {
-      final ColumnCapabilities columnCapabilities = columnSelectorFactory.getColumnCapabilities(columnName);
+      final ColumnCapabilities columnCapabilities = columnSelectorFactory
+              .getColumnCapabilities(columnName);
       final ValueType nativeType = columnCapabilities != null ? columnCapabilities.getType() : null;
       final Supplier<Object> supplier;
 
       if (nativeType == ValueType.FLOAT) {
-        supplier = columnSelectorFactory.makeColumnValueSelector(columnName)::getFloat;
+        ColumnValueSelector selector = columnSelectorFactory
+                .makeColumnValueSelector(columnName);
+        supplier = makeNullableSupplier(selector, selector::getFloat);
       } else if (nativeType == ValueType.LONG) {
-        supplier = columnSelectorFactory.makeColumnValueSelector(columnName)::getLong;
+        ColumnValueSelector selector = columnSelectorFactory
+                .makeColumnValueSelector(columnName);
+        supplier = makeNullableSupplier(selector, selector::getLong);
       } else if (nativeType == ValueType.DOUBLE) {
-        supplier = columnSelectorFactory.makeColumnValueSelector(columnName)::getDouble;
+        ColumnValueSelector selector = columnSelectorFactory
+                .makeColumnValueSelector(columnName);
+        supplier = makeNullableSupplier(selector, selector::getDouble);
       } else if (nativeType == ValueType.STRING) {
         supplier = supplierFromDimensionSelector(
-            columnSelectorFactory.makeDimensionSelector(new DefaultDimensionSpec(columnName, columnName))
+                columnSelectorFactory
+                        .makeDimensionSelector(new DefaultDimensionSpec(columnName, columnName))
         );
       } else if (nativeType == null) {
         // Unknown ValueType. Try making an Object selector and see if that gives us anything useful.
@@ -292,6 +300,23 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector)
     }
   }
 
+  private static <T> Supplier<T> makeNullableSupplier(
+      ColumnValueSelector selector,
+      Supplier<T> supplier
+  )
+  {
+    if (NullHandling.replaceWithDefault()) {
+      return supplier;
+    } else {
+      return () -> {
+        if (selector.isNull()) {
+          return null;
+        }
+        return supplier.get();
+      };
+    }
+  }
+
   @VisibleForTesting
   @Nonnull
   static Supplier<Object> supplierFromDimensionSelector(final DimensionSelector selector)
diff --git a/processing/src/main/java/io/druid/segment/virtual/SingleLongInputCachingExpressionColumnValueSelector.java b/processing/src/main/java/io/druid/segment/virtual/SingleLongInputCachingExpressionColumnValueSelector.java
index f5ce2e9eb08..d7af85fcf56 100644
--- a/processing/src/main/java/io/druid/segment/virtual/SingleLongInputCachingExpressionColumnValueSelector.java
+++ b/processing/src/main/java/io/druid/segment/virtual/SingleLongInputCachingExpressionColumnValueSelector.java
@@ -150,6 +150,9 @@ private ExprEval eval(final long value)
   @Override
   public boolean isNull()
   {
-    return getObject().isNull();
+    // It is possible for an expression to have a non-null String value but it can return null when parsed
+    // as a primitive long/float/double.
+    // ExprEval.isNumericNull checks whether the parsed primitive value is null or not.
+    return getObject().isNumericNull();
   }
 }
diff --git a/processing/src/main/java/io/druid/segment/virtual/SingleStringInputCachingExpressionColumnValueSelector.java b/processing/src/main/java/io/druid/segment/virtual/SingleStringInputCachingExpressionColumnValueSelector.java
index 2e30acabdb8..d18a8de4ce3 100644
--- a/processing/src/main/java/io/druid/segment/virtual/SingleStringInputCachingExpressionColumnValueSelector.java
+++ b/processing/src/main/java/io/druid/segment/virtual/SingleStringInputCachingExpressionColumnValueSelector.java
@@ -139,7 +139,10 @@ private ExprEval eval()
   @Override
   public boolean isNull()
   {
-    return eval().isNull();
+    // It is possible for an expression to have a non-null String value but it can return null when parsed
+    // as a primitive long/float/double.
+    // ExprEval.isNumericNull checks whether the parsed primitive value is null or not.
+    return eval().isNumericNull();
   }
 
   public static class LruEvalCache
diff --git a/processing/src/test/java/io/druid/query/SchemaEvolutionTest.java b/processing/src/test/java/io/druid/query/SchemaEvolutionTest.java
index 6fcfc2ca409..a0bafa059b0 100644
--- a/processing/src/test/java/io/druid/query/SchemaEvolutionTest.java
+++ b/processing/src/test/java/io/druid/query/SchemaEvolutionTest.java
@@ -25,6 +25,7 @@
 import com.google.common.collect.Maps;
 import com.google.common.io.Closeables;
 import com.google.common.util.concurrent.MoreExecutors;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
 import io.druid.data.input.impl.MapInputRowParser;
@@ -47,6 +48,7 @@
 import io.druid.segment.IndexBuilder;
 import io.druid.segment.QueryableIndex;
 import io.druid.segment.QueryableIndexSegment;
+import io.druid.segment.TestHelper;
 import io.druid.segment.incremental.IncrementalIndexSchema;
 import org.junit.After;
 import org.junit.Assert;
@@ -283,8 +285,13 @@ public void testNumericEvolutionTimeseriesAggregation()
     );
 
     // Only nonexistent(4)
+    Map<String, Object> result = Maps.newHashMap();
+    result.put("a", NullHandling.defaultLongValue());
+    result.put("b", NullHandling.defaultDoubleValue());
+    result.put("c", NullHandling.defaultLongValue());
+    result.put("d", NullHandling.defaultDoubleValue());
     Assert.assertEquals(
-        timeseriesResult(ImmutableMap.of("a", 0L, "b", 0.0, "c", 0L, "d", 0.0)),
+        timeseriesResult(result),
         runQuery(query, factory, ImmutableList.of(index4))
     );
 
@@ -353,7 +360,14 @@ public void testNumericEvolutionFiltering()
 
     // Only nonexistent(4)
     Assert.assertEquals(
-        timeseriesResult(ImmutableMap.of("a", 0L, "b", 0.0, "c", 0L)),
+        timeseriesResult(TestHelper.createExpectedMap(
+            "a",
+            NullHandling.defaultLongValue(),
+            "b",
+            NullHandling.defaultDoubleValue(),
+            "c",
+            0L
+        )),
         runQuery(query, factory, ImmutableList.of(index4))
     );
 
diff --git a/processing/src/test/java/io/druid/query/aggregation/AggregationTestHelper.java b/processing/src/test/java/io/druid/query/aggregation/AggregationTestHelper.java
index 52cfcc7330b..d738008eccc 100644
--- a/processing/src/test/java/io/druid/query/aggregation/AggregationTestHelper.java
+++ b/processing/src/test/java/io/druid/query/aggregation/AggregationTestHelper.java
@@ -636,7 +636,7 @@ public ObjectMapper getObjectMapper()
     agg.aggregate(myBuf, 0);
     results[0] = (T) agg.get(myBuf, 0);
 
-    byte[] theBytes = new byte[factory.getMaxIntermediateSize()];
+    byte[] theBytes = new byte[factory.getMaxIntermediateSizeWithNulls()];
     myBuf.get(theBytes);
 
     ByteBuffer newBuf = ByteBuffer.allocate(941209);
diff --git a/processing/src/test/java/io/druid/query/aggregation/DoubleMaxAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/DoubleMaxAggregationTest.java
index a0a85011961..dd8fd1a69ac 100644
--- a/processing/src/test/java/io/druid/query/aggregation/DoubleMaxAggregationTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/DoubleMaxAggregationTest.java
@@ -56,7 +56,7 @@ public void setup()
   @Test
   public void testDoubleMaxAggregator()
   {
-    DoubleMaxAggregator agg = (DoubleMaxAggregator) doubleMaxAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = doubleMaxAggFactory.factorize(colSelectorFactory);
 
     aggregate(selector, agg);
     aggregate(selector, agg);
@@ -71,9 +71,9 @@ public void testDoubleMaxAggregator()
   @Test
   public void testDoubleMaxBufferAggregator()
   {
-    DoubleMaxBufferAggregator agg = (DoubleMaxBufferAggregator) doubleMaxAggFactory.factorizeBuffered(colSelectorFactory);
+    BufferAggregator agg = doubleMaxAggFactory.factorizeBuffered(colSelectorFactory);
 
-    ByteBuffer buffer = ByteBuffer.wrap(new byte[Double.BYTES]);
+    ByteBuffer buffer = ByteBuffer.wrap(new byte[Double.BYTES + Byte.BYTES]);
     agg.init(buffer, 0);
 
     aggregate(selector, agg, buffer, 0);
@@ -105,13 +105,13 @@ public void testEqualsAndHashCode()
     Assert.assertFalse(one.equals(two));
   }
 
-  private void aggregate(TestDoubleColumnSelectorImpl selector, DoubleMaxAggregator agg)
+  private void aggregate(TestDoubleColumnSelectorImpl selector, Aggregator agg)
   {
     agg.aggregate();
     selector.increment();
   }
 
-  private void aggregate(TestDoubleColumnSelectorImpl selector, DoubleMaxBufferAggregator agg, ByteBuffer buff, int position)
+  private void aggregate(TestDoubleColumnSelectorImpl selector, BufferAggregator agg, ByteBuffer buff, int position)
   {
     agg.aggregate(buff, position);
     selector.increment();
diff --git a/processing/src/test/java/io/druid/query/aggregation/DoubleMinAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/DoubleMinAggregationTest.java
index 067cb38f599..79446bce53d 100644
--- a/processing/src/test/java/io/druid/query/aggregation/DoubleMinAggregationTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/DoubleMinAggregationTest.java
@@ -56,7 +56,7 @@ public void setup()
   @Test
   public void testDoubleMinAggregator()
   {
-    DoubleMinAggregator agg = (DoubleMinAggregator) doubleMinAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = doubleMinAggFactory.factorize(colSelectorFactory);
 
     aggregate(selector, agg);
     aggregate(selector, agg);
@@ -71,9 +71,9 @@ public void testDoubleMinAggregator()
   @Test
   public void testDoubleMinBufferAggregator()
   {
-    DoubleMinBufferAggregator agg = (DoubleMinBufferAggregator) doubleMinAggFactory.factorizeBuffered(colSelectorFactory);
+    BufferAggregator agg = doubleMinAggFactory.factorizeBuffered(colSelectorFactory);
 
-    ByteBuffer buffer = ByteBuffer.wrap(new byte[Double.BYTES]);
+    ByteBuffer buffer = ByteBuffer.wrap(new byte[Double.BYTES + Byte.BYTES]);
     agg.init(buffer, 0);
 
     aggregate(selector, agg, buffer, 0);
@@ -105,13 +105,13 @@ public void testEqualsAndHashCode()
     Assert.assertFalse(one.equals(two));
   }
 
-  private void aggregate(TestDoubleColumnSelectorImpl selector, DoubleMinAggregator agg)
+  private void aggregate(TestDoubleColumnSelectorImpl selector, Aggregator agg)
   {
     agg.aggregate();
     selector.increment();
   }
 
-  private void aggregate(TestDoubleColumnSelectorImpl selector, DoubleMinBufferAggregator agg, ByteBuffer buff, int position)
+  private void aggregate(TestDoubleColumnSelectorImpl selector, BufferAggregator agg, ByteBuffer buff, int position)
   {
     agg.aggregate(buff, position);
     selector.increment();
diff --git a/processing/src/test/java/io/druid/query/aggregation/FilteredAggregatorTest.java b/processing/src/test/java/io/druid/query/aggregation/FilteredAggregatorTest.java
index cabc760315d..00e1921317c 100644
--- a/processing/src/test/java/io/druid/query/aggregation/FilteredAggregatorTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/FilteredAggregatorTest.java
@@ -21,6 +21,7 @@
 
 import com.google.common.base.Predicate;
 import com.google.common.collect.Lists;
+import io.druid.common.config.NullHandling;
 import io.druid.js.JavaScriptConfig;
 import io.druid.query.dimension.DimensionSpec;
 import io.druid.query.extraction.ExtractionFn;
@@ -223,9 +224,9 @@ public ColumnCapabilities getColumnCapabilities(String columnName)
 
   private void assertValues(FilteredAggregator agg, TestFloatColumnSelector selector, double... expectedVals)
   {
-    Assert.assertEquals(0.0d, agg.get());
-    Assert.assertEquals(0.0d, agg.get());
-    Assert.assertEquals(0.0d, agg.get());
+    Assert.assertEquals(NullHandling.defaultDoubleValue(), agg.get());
+    Assert.assertEquals(NullHandling.defaultDoubleValue(), agg.get());
+    Assert.assertEquals(NullHandling.defaultDoubleValue(), agg.get());
     for (double expectedVal : expectedVals) {
       aggregate(selector, agg);
       Assert.assertEquals(expectedVal, agg.get());
diff --git a/processing/src/test/java/io/druid/query/aggregation/HistogramAggregatorTest.java b/processing/src/test/java/io/druid/query/aggregation/HistogramAggregatorTest.java
index ab9b63670d7..7339528d7d3 100644
--- a/processing/src/test/java/io/druid/query/aggregation/HistogramAggregatorTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/HistogramAggregatorTest.java
@@ -114,7 +114,7 @@ public void testBufferAggregate()
     );
     HistogramBufferAggregator agg = new HistogramBufferAggregator(selector, breaks);
 
-    ByteBuffer buf = ByteBuffer.allocateDirect(factory.getMaxIntermediateSize());
+    ByteBuffer buf = ByteBuffer.allocateDirect(factory.getMaxIntermediateSizeWithNulls());
     int position = 0;
 
     agg.init(buf, position);
diff --git a/processing/src/test/java/io/druid/query/aggregation/LongMaxAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/LongMaxAggregationTest.java
index a95232f5780..59c42ceff6c 100644
--- a/processing/src/test/java/io/druid/query/aggregation/LongMaxAggregationTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/LongMaxAggregationTest.java
@@ -56,7 +56,7 @@ public void setup()
   @Test
   public void testLongMaxAggregator()
   {
-    LongMaxAggregator agg = (LongMaxAggregator) longMaxAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = longMaxAggFactory.factorize(colSelectorFactory);
 
     aggregate(selector, agg);
     aggregate(selector, agg);
@@ -71,9 +71,9 @@ public void testLongMaxAggregator()
   @Test
   public void testLongMaxBufferAggregator()
   {
-    LongMaxBufferAggregator agg = (LongMaxBufferAggregator) longMaxAggFactory.factorizeBuffered(colSelectorFactory);
+    BufferAggregator agg = longMaxAggFactory.factorizeBuffered(colSelectorFactory);
 
-    ByteBuffer buffer = ByteBuffer.wrap(new byte[Long.BYTES]);
+    ByteBuffer buffer = ByteBuffer.wrap(new byte[Long.BYTES + Byte.BYTES]);
     agg.init(buffer, 0);
 
     aggregate(selector, agg, buffer, 0);
@@ -105,13 +105,13 @@ public void testEqualsAndHashCode()
     Assert.assertFalse(one.equals(two));
   }
 
-  private void aggregate(TestLongColumnSelector selector, LongMaxAggregator agg)
+  private void aggregate(TestLongColumnSelector selector, Aggregator agg)
   {
     agg.aggregate();
     selector.increment();
   }
 
-  private void aggregate(TestLongColumnSelector selector, LongMaxBufferAggregator agg, ByteBuffer buff, int position)
+  private void aggregate(TestLongColumnSelector selector, BufferAggregator agg, ByteBuffer buff, int position)
   {
     agg.aggregate(buff, position);
     selector.increment();
diff --git a/processing/src/test/java/io/druid/query/aggregation/LongMinAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/LongMinAggregationTest.java
index 59c3d3d52e7..3c1443348a2 100644
--- a/processing/src/test/java/io/druid/query/aggregation/LongMinAggregationTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/LongMinAggregationTest.java
@@ -56,7 +56,7 @@ public void setup()
   @Test
   public void testLongMinAggregator()
   {
-    LongMinAggregator agg = (LongMinAggregator) longMinAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = longMinAggFactory.factorize(colSelectorFactory);
 
     aggregate(selector, agg);
     aggregate(selector, agg);
@@ -71,9 +71,9 @@ public void testLongMinAggregator()
   @Test
   public void testLongMinBufferAggregator()
   {
-    LongMinBufferAggregator agg = (LongMinBufferAggregator) longMinAggFactory.factorizeBuffered(colSelectorFactory);
+    BufferAggregator agg = longMinAggFactory.factorizeBuffered(colSelectorFactory);
 
-    ByteBuffer buffer = ByteBuffer.wrap(new byte[Long.BYTES]);
+    ByteBuffer buffer = ByteBuffer.wrap(new byte[Long.BYTES + Byte.BYTES]);
     agg.init(buffer, 0);
 
     aggregate(selector, agg, buffer, 0);
@@ -105,13 +105,13 @@ public void testEqualsAndHashCode()
     Assert.assertFalse(one.equals(two));
   }
 
-  private void aggregate(TestLongColumnSelector selector, LongMinAggregator agg)
+  private void aggregate(TestLongColumnSelector selector, Aggregator agg)
   {
     agg.aggregate();
     selector.increment();
   }
 
-  private void aggregate(TestLongColumnSelector selector, LongMinBufferAggregator agg, ByteBuffer buff, int position)
+  private void aggregate(TestLongColumnSelector selector, BufferAggregator agg, ByteBuffer buff, int position)
   {
     agg.aggregate(buff, position);
     selector.increment();
diff --git a/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorBenchmark.java b/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorBenchmark.java
index d61bb588321..3e9dffc6919 100644
--- a/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorBenchmark.java
+++ b/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorBenchmark.java
@@ -105,7 +105,7 @@ protected void setUp()
         byRow
     );
 
-    int maxSize = factory.getMaxIntermediateSize();
+    int maxSize = factory.getMaxIntermediateSizeWithNulls();
     buf = ByteBuffer.allocate(maxSize + 64);
     pos = 10;
     buf.limit(pos + maxSize);
diff --git a/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java b/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java
index 9d54852d5f2..338fda381e7 100644
--- a/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java
@@ -27,6 +27,7 @@
 import com.google.common.collect.Iterators;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
+import io.druid.common.config.NullHandling;
 import io.druid.jackson.DefaultObjectMapper;
 import io.druid.js.JavaScriptConfig;
 import io.druid.query.ColumnSelectorPlus;
@@ -430,8 +431,8 @@ public void testAggregateValues()
     for (int i = 0; i < values1.size(); ++i) {
       aggregate(selectorList, agg);
     }
-    Assert.assertEquals(7.0, (Double) valueAggregatorFactory.finalizeComputation(agg.get()), 0.05);
-    Assert.assertEquals(7L, rowAggregatorFactoryRounded.finalizeComputation(agg.get()));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? 7.0 : 6.0, (Double) valueAggregatorFactory.finalizeComputation(agg.get()), 0.05);
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? 7L : 6L, rowAggregatorFactoryRounded.finalizeComputation(agg.get()));
   }
 
   @Test
@@ -442,7 +443,7 @@ public void testBufferAggregateRows()
         true
     );
 
-    int maxSize = rowAggregatorFactory.getMaxIntermediateSize();
+    int maxSize = rowAggregatorFactory.getMaxIntermediateSizeWithNulls();
     ByteBuffer buf = ByteBuffer.allocate(maxSize + 64);
     int pos = 10;
     buf.limit(pos + maxSize);
@@ -464,7 +465,7 @@ public void testBufferAggregateValues()
         false
     );
 
-    int maxSize = valueAggregatorFactory.getMaxIntermediateSize();
+    int maxSize = valueAggregatorFactory.getMaxIntermediateSizeWithNulls();
     ByteBuffer buf = ByteBuffer.allocate(maxSize + 64);
     int pos = 10;
     buf.limit(pos + maxSize);
@@ -474,8 +475,8 @@ public void testBufferAggregateValues()
     for (int i = 0; i < values1.size(); ++i) {
       bufferAggregate(selectorList, agg, buf, pos);
     }
-    Assert.assertEquals(7.0, (Double) valueAggregatorFactory.finalizeComputation(agg.get(buf, pos)), 0.05);
-    Assert.assertEquals(7L, rowAggregatorFactoryRounded.finalizeComputation(agg.get(buf, pos)));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? 7.0 : 6.0, (Double) valueAggregatorFactory.finalizeComputation(agg.get(buf, pos)), 0.05);
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? 7L : 6L, rowAggregatorFactoryRounded.finalizeComputation(agg.get(buf, pos)));
   }
 
   @Test
@@ -554,11 +555,11 @@ public void testCombineValues()
       aggregate(selector2, agg2);
     }
 
-    Assert.assertEquals(4.0, (Double) valueAggregatorFactory.finalizeComputation(agg1.get()), 0.05);
-    Assert.assertEquals(7.0, (Double) valueAggregatorFactory.finalizeComputation(agg2.get()), 0.05);
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? 4.0 : 3.0, (Double) valueAggregatorFactory.finalizeComputation(agg1.get()), 0.05);
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? 7.0 : 6.0, (Double) valueAggregatorFactory.finalizeComputation(agg2.get()), 0.05);
 
     Assert.assertEquals(
-        7.0,
+        NullHandling.replaceWithDefault() ? 7.0 : 6.0,
         (Double) rowAggregatorFactory.finalizeComputation(
             rowAggregatorFactory.combine(
                 agg1.get(),
diff --git a/processing/src/test/java/io/druid/query/aggregation/first/DoubleFirstAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/first/DoubleFirstAggregationTest.java
index b31f04dfe28..7fc70962fcb 100644
--- a/processing/src/test/java/io/druid/query/aggregation/first/DoubleFirstAggregationTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/first/DoubleFirstAggregationTest.java
@@ -22,7 +22,9 @@
 import io.druid.collections.SerializablePair;
 import io.druid.jackson.DefaultObjectMapper;
 import io.druid.java.util.common.Pair;
+import io.druid.query.aggregation.Aggregator;
 import io.druid.query.aggregation.AggregatorFactory;
+import io.druid.query.aggregation.BufferAggregator;
 import io.druid.query.aggregation.TestDoubleColumnSelectorImpl;
 import io.druid.query.aggregation.TestLongColumnSelector;
 import io.druid.query.aggregation.TestObjectColumnSelector;
@@ -71,7 +73,7 @@ public void setup()
   @Test
   public void testDoubleFirstAggregator()
   {
-    DoubleFirstAggregator agg = (DoubleFirstAggregator) doubleFirstAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = doubleFirstAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -89,10 +91,10 @@ public void testDoubleFirstAggregator()
   @Test
   public void testDoubleFirstBufferAggregator()
   {
-    DoubleFirstBufferAggregator agg = (DoubleFirstBufferAggregator) doubleFirstAggFactory.factorizeBuffered(
+    BufferAggregator agg = doubleFirstAggFactory.factorizeBuffered(
         colSelectorFactory);
 
-    ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleFirstAggFactory.getMaxIntermediateSize()]);
+    ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleFirstAggFactory.getMaxIntermediateSizeWithNulls()]);
     agg.init(buffer, 0);
 
     aggregate(agg, buffer, 0);
@@ -119,7 +121,7 @@ public void testCombine()
   @Test
   public void testDoubleFirstCombiningAggregator()
   {
-    DoubleFirstAggregator agg = (DoubleFirstAggregator) combiningAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = combiningAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -138,10 +140,10 @@ public void testDoubleFirstCombiningAggregator()
   @Test
   public void testDoubleFirstCombiningBufferAggregator()
   {
-    DoubleFirstBufferAggregator agg = (DoubleFirstBufferAggregator) combiningAggFactory.factorizeBuffered(
+    BufferAggregator agg = combiningAggFactory.factorizeBuffered(
         colSelectorFactory);
 
-    ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleFirstAggFactory.getMaxIntermediateSize()]);
+    ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleFirstAggFactory.getMaxIntermediateSizeWithNulls()]);
     agg.init(buffer, 0);
 
     aggregate(agg, buffer, 0);
@@ -168,7 +170,7 @@ public void testSerde() throws Exception
   }
 
   private void aggregate(
-      DoubleFirstAggregator agg
+      Aggregator agg
   )
   {
     agg.aggregate();
@@ -178,7 +180,7 @@ private void aggregate(
   }
 
   private void aggregate(
-      DoubleFirstBufferAggregator agg,
+      BufferAggregator agg,
       ByteBuffer buff,
       int position
   )
diff --git a/processing/src/test/java/io/druid/query/aggregation/first/FloatFirstAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/first/FloatFirstAggregationTest.java
index 91e94aacfe0..27ebeaf0514 100644
--- a/processing/src/test/java/io/druid/query/aggregation/first/FloatFirstAggregationTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/first/FloatFirstAggregationTest.java
@@ -22,7 +22,9 @@
 import io.druid.collections.SerializablePair;
 import io.druid.jackson.DefaultObjectMapper;
 import io.druid.java.util.common.Pair;
+import io.druid.query.aggregation.Aggregator;
 import io.druid.query.aggregation.AggregatorFactory;
+import io.druid.query.aggregation.BufferAggregator;
 import io.druid.query.aggregation.TestFloatColumnSelector;
 import io.druid.query.aggregation.TestLongColumnSelector;
 import io.druid.query.aggregation.TestObjectColumnSelector;
@@ -71,7 +73,7 @@ public void setup()
   @Test
   public void testDoubleFirstAggregator()
   {
-    FloatFirstAggregator agg = (FloatFirstAggregator) floatFirstAggregatorFactory.factorize(colSelectorFactory);
+    Aggregator agg = floatFirstAggregatorFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -89,10 +91,10 @@ public void testDoubleFirstAggregator()
   @Test
   public void testDoubleFirstBufferAggregator()
   {
-    FloatFirstBufferAggregator agg = (FloatFirstBufferAggregator) floatFirstAggregatorFactory.factorizeBuffered(
+    BufferAggregator agg = floatFirstAggregatorFactory.factorizeBuffered(
         colSelectorFactory);
 
-    ByteBuffer buffer = ByteBuffer.wrap(new byte[floatFirstAggregatorFactory.getMaxIntermediateSize()]);
+    ByteBuffer buffer = ByteBuffer.wrap(new byte[floatFirstAggregatorFactory.getMaxIntermediateSizeWithNulls()]);
     agg.init(buffer, 0);
 
     aggregate(agg, buffer, 0);
@@ -119,7 +121,7 @@ public void testCombine()
   @Test
   public void testDoubleFirstCombiningAggregator()
   {
-    FloatFirstAggregator agg = (FloatFirstAggregator) combiningAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = combiningAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -138,10 +140,10 @@ public void testDoubleFirstCombiningAggregator()
   @Test
   public void testDoubleFirstCombiningBufferAggregator()
   {
-    FloatFirstBufferAggregator agg = (FloatFirstBufferAggregator) combiningAggFactory.factorizeBuffered(
+    BufferAggregator agg = combiningAggFactory.factorizeBuffered(
         colSelectorFactory);
 
-    ByteBuffer buffer = ByteBuffer.wrap(new byte[floatFirstAggregatorFactory.getMaxIntermediateSize()]);
+    ByteBuffer buffer = ByteBuffer.wrap(new byte[floatFirstAggregatorFactory.getMaxIntermediateSizeWithNulls()]);
     agg.init(buffer, 0);
 
     aggregate(agg, buffer, 0);
@@ -168,7 +170,7 @@ public void testSerde() throws Exception
   }
 
   private void aggregate(
-      FloatFirstAggregator agg
+      Aggregator agg
   )
   {
     agg.aggregate();
@@ -178,7 +180,7 @@ private void aggregate(
   }
 
   private void aggregate(
-      FloatFirstBufferAggregator agg,
+      BufferAggregator agg,
       ByteBuffer buff,
       int position
   )
diff --git a/processing/src/test/java/io/druid/query/aggregation/first/LongFirstAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/first/LongFirstAggregationTest.java
index 25fdb3106fd..c2c81b14c87 100644
--- a/processing/src/test/java/io/druid/query/aggregation/first/LongFirstAggregationTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/first/LongFirstAggregationTest.java
@@ -22,7 +22,9 @@
 import io.druid.collections.SerializablePair;
 import io.druid.jackson.DefaultObjectMapper;
 import io.druid.java.util.common.Pair;
+import io.druid.query.aggregation.Aggregator;
 import io.druid.query.aggregation.AggregatorFactory;
+import io.druid.query.aggregation.BufferAggregator;
 import io.druid.query.aggregation.TestLongColumnSelector;
 import io.druid.query.aggregation.TestObjectColumnSelector;
 import io.druid.segment.ColumnSelectorFactory;
@@ -70,7 +72,7 @@ public void setup()
   @Test
   public void testLongFirstAggregator()
   {
-    LongFirstAggregator agg = (LongFirstAggregator) longFirstAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = longFirstAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -88,10 +90,10 @@ public void testLongFirstAggregator()
   @Test
   public void testLongFirstBufferAggregator()
   {
-    LongFirstBufferAggregator agg = (LongFirstBufferAggregator) longFirstAggFactory.factorizeBuffered(
+    BufferAggregator agg = longFirstAggFactory.factorizeBuffered(
         colSelectorFactory);
 
-    ByteBuffer buffer = ByteBuffer.wrap(new byte[longFirstAggFactory.getMaxIntermediateSize()]);
+    ByteBuffer buffer = ByteBuffer.wrap(new byte[longFirstAggFactory.getMaxIntermediateSizeWithNulls()]);
     agg.init(buffer, 0);
 
     aggregate(agg, buffer, 0);
@@ -118,7 +120,7 @@ public void testCombine()
   @Test
   public void testLongFirstCombiningAggregator()
   {
-    LongFirstAggregator agg = (LongFirstAggregator) combiningAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = combiningAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -137,10 +139,10 @@ public void testLongFirstCombiningAggregator()
   @Test
   public void testLongFirstCombiningBufferAggregator()
   {
-    LongFirstBufferAggregator agg = (LongFirstBufferAggregator) combiningAggFactory.factorizeBuffered(
+    BufferAggregator agg = combiningAggFactory.factorizeBuffered(
         colSelectorFactory);
 
-    ByteBuffer buffer = ByteBuffer.wrap(new byte[longFirstAggFactory.getMaxIntermediateSize()]);
+    ByteBuffer buffer = ByteBuffer.wrap(new byte[longFirstAggFactory.getMaxIntermediateSizeWithNulls()]);
     agg.init(buffer, 0);
 
     aggregate(agg, buffer, 0);
@@ -167,7 +169,7 @@ public void testSerde() throws Exception
   }
 
   private void aggregate(
-      LongFirstAggregator agg
+      Aggregator agg
   )
   {
     agg.aggregate();
@@ -177,7 +179,7 @@ private void aggregate(
   }
 
   private void aggregate(
-      LongFirstBufferAggregator agg,
+      BufferAggregator agg,
       ByteBuffer buff,
       int position
   )
diff --git a/processing/src/test/java/io/druid/query/aggregation/first/StringFirstAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/first/StringFirstAggregationTest.java
index 8f523c02ee5..0c07ffa06da 100644
--- a/processing/src/test/java/io/druid/query/aggregation/first/StringFirstAggregationTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/first/StringFirstAggregationTest.java
@@ -20,6 +20,10 @@
 package io.druid.query.aggregation.first;
 
 import io.druid.java.util.common.Pair;
+import io.druid.query.aggregation.AggregateCombiner;
+import io.druid.query.aggregation.Aggregator;
+import io.druid.query.aggregation.AggregatorFactory;
+import io.druid.query.aggregation.BufferAggregator;
 import io.druid.query.aggregation.SerializablePairLongString;
 import io.druid.query.aggregation.TestLongColumnSelector;
 import io.druid.query.aggregation.TestObjectColumnSelector;
@@ -35,8 +39,8 @@
 public class StringFirstAggregationTest
 {
   private final Integer MAX_STRING_SIZE = 1024;
-  private StringFirstAggregatorFactory stringLastAggFactory;
-  private StringFirstAggregatorFactory combiningAggFactory;
+  private AggregatorFactory stringLastAggFactory;
+  private AggregatorFactory combiningAggFactory;
   private ColumnSelectorFactory colSelectorFactory;
   private TestLongColumnSelector timeSelector;
   private TestObjectColumnSelector<String> valueSelector;
@@ -56,7 +60,7 @@
   public void setup()
   {
     stringLastAggFactory = new StringFirstAggregatorFactory("billy", "nilly", MAX_STRING_SIZE);
-    combiningAggFactory = (StringFirstAggregatorFactory) stringLastAggFactory.getCombiningFactory();
+    combiningAggFactory = stringLastAggFactory.getCombiningFactory();
     timeSelector = new TestLongColumnSelector(times);
     valueSelector = new TestObjectColumnSelector<>(strings);
     objectSelector = new TestObjectColumnSelector<>(pairs);
@@ -70,7 +74,7 @@ public void setup()
   @Test
   public void testStringLastAggregator()
   {
-    StringFirstAggregator agg = (StringFirstAggregator) stringLastAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = stringLastAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -85,7 +89,7 @@ public void testStringLastAggregator()
   @Test
   public void testStringLastBufferAggregator()
   {
-    StringFirstBufferAggregator agg = (StringFirstBufferAggregator) stringLastAggFactory.factorizeBuffered(
+    BufferAggregator agg = stringLastAggFactory.factorizeBuffered(
         colSelectorFactory);
 
     ByteBuffer buffer = ByteBuffer.wrap(new byte[stringLastAggFactory.getMaxIntermediateSize()]);
@@ -112,7 +116,7 @@ public void testCombine()
   @Test
   public void testStringLastCombiningAggregator()
   {
-    StringFirstAggregator agg = (StringFirstAggregator) combiningAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = combiningAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -129,7 +133,7 @@ public void testStringLastCombiningAggregator()
   @Test
   public void testStringFirstCombiningBufferAggregator()
   {
-    StringFirstBufferAggregator agg = (StringFirstBufferAggregator) combiningAggFactory.factorizeBuffered(
+    BufferAggregator agg = combiningAggFactory.factorizeBuffered(
         colSelectorFactory);
 
     ByteBuffer buffer = ByteBuffer.wrap(new byte[stringLastAggFactory.getMaxIntermediateSize()]);
@@ -153,8 +157,8 @@ public void testStringFirstAggregateCombiner()
     final String[] strings = {"AAAA", "BBBB", "CCCC", "DDDD", "EEEE"};
     TestObjectColumnSelector columnSelector = new TestObjectColumnSelector<>(strings);
 
-    StringFirstAggregateCombiner stringFirstAggregateCombiner =
-        (StringFirstAggregateCombiner) combiningAggFactory.makeAggregateCombiner();
+    AggregateCombiner stringFirstAggregateCombiner =
+        combiningAggFactory.makeAggregateCombiner();
 
     stringFirstAggregateCombiner.reset(columnSelector);
 
@@ -171,7 +175,7 @@ public void testStringFirstAggregateCombiner()
   }
 
   private void aggregate(
-      StringFirstAggregator agg
+      Aggregator agg
   )
   {
     agg.aggregate();
@@ -181,7 +185,7 @@ private void aggregate(
   }
 
   private void aggregate(
-      StringFirstBufferAggregator agg,
+      BufferAggregator agg,
       ByteBuffer buff,
       int position
   )
diff --git a/processing/src/test/java/io/druid/query/aggregation/first/StringFirstTimeseriesQueryTest.java b/processing/src/test/java/io/druid/query/aggregation/first/StringFirstTimeseriesQueryTest.java
index bac9a6dfcbc..8a0d9ea8772 100644
--- a/processing/src/test/java/io/druid/query/aggregation/first/StringFirstTimeseriesQueryTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/first/StringFirstTimeseriesQueryTest.java
@@ -99,7 +99,7 @@ public void testTopNWithDistinctCountAgg() throws Exception
                                   .granularity(QueryRunnerTestHelper.allGran)
                                   .intervals(QueryRunnerTestHelper.fullOnInterval)
                                   .aggregators(
-                                      Lists.newArrayList(
+                                      Collections.singletonList(
                                           new StringFirstAggregatorFactory(
                                               "last_client_type", client_type, 1024
                                           )
diff --git a/processing/src/test/java/io/druid/query/aggregation/last/DoubleLastAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/last/DoubleLastAggregationTest.java
index acc886c2b91..c44f476e4e7 100644
--- a/processing/src/test/java/io/druid/query/aggregation/last/DoubleLastAggregationTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/last/DoubleLastAggregationTest.java
@@ -22,7 +22,9 @@
 import io.druid.collections.SerializablePair;
 import io.druid.jackson.DefaultObjectMapper;
 import io.druid.java.util.common.Pair;
+import io.druid.query.aggregation.Aggregator;
 import io.druid.query.aggregation.AggregatorFactory;
+import io.druid.query.aggregation.BufferAggregator;
 import io.druid.query.aggregation.TestDoubleColumnSelectorImpl;
 import io.druid.query.aggregation.TestLongColumnSelector;
 import io.druid.query.aggregation.TestObjectColumnSelector;
@@ -71,7 +73,7 @@ public void setup()
   @Test
   public void testDoubleLastAggregator()
   {
-    DoubleLastAggregator agg = (DoubleLastAggregator) doubleLastAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = doubleLastAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -89,10 +91,10 @@ public void testDoubleLastAggregator()
   @Test
   public void testDoubleLastBufferAggregator()
   {
-    DoubleLastBufferAggregator agg = (DoubleLastBufferAggregator) doubleLastAggFactory.factorizeBuffered(
+    BufferAggregator agg = doubleLastAggFactory.factorizeBuffered(
         colSelectorFactory);
 
-    ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleLastAggFactory.getMaxIntermediateSize()]);
+    ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleLastAggFactory.getMaxIntermediateSizeWithNulls()]);
     agg.init(buffer, 0);
 
     aggregate(agg, buffer, 0);
@@ -119,7 +121,7 @@ public void testCombine()
   @Test
   public void testDoubleLastCombiningAggregator()
   {
-    DoubleLastAggregator agg = (DoubleLastAggregator) combiningAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = combiningAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -138,10 +140,10 @@ public void testDoubleLastCombiningAggregator()
   @Test
   public void testDoubleLastCombiningBufferAggregator()
   {
-    DoubleLastBufferAggregator agg = (DoubleLastBufferAggregator) combiningAggFactory.factorizeBuffered(
+    BufferAggregator agg = combiningAggFactory.factorizeBuffered(
         colSelectorFactory);
 
-    ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleLastAggFactory.getMaxIntermediateSize()]);
+    ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleLastAggFactory.getMaxIntermediateSizeWithNulls()]);
     agg.init(buffer, 0);
 
     aggregate(agg, buffer, 0);
@@ -168,7 +170,7 @@ public void testSerde() throws Exception
   }
 
   private void aggregate(
-      DoubleLastAggregator agg
+      Aggregator agg
   )
   {
     agg.aggregate();
@@ -178,7 +180,7 @@ private void aggregate(
   }
 
   private void aggregate(
-      DoubleLastBufferAggregator agg,
+      BufferAggregator agg,
       ByteBuffer buff,
       int position
   )
diff --git a/processing/src/test/java/io/druid/query/aggregation/last/FloatLastAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/last/FloatLastAggregationTest.java
index e4f60942e58..dce99494652 100644
--- a/processing/src/test/java/io/druid/query/aggregation/last/FloatLastAggregationTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/last/FloatLastAggregationTest.java
@@ -22,7 +22,9 @@
 import io.druid.collections.SerializablePair;
 import io.druid.jackson.DefaultObjectMapper;
 import io.druid.java.util.common.Pair;
+import io.druid.query.aggregation.Aggregator;
 import io.druid.query.aggregation.AggregatorFactory;
+import io.druid.query.aggregation.BufferAggregator;
 import io.druid.query.aggregation.TestFloatColumnSelector;
 import io.druid.query.aggregation.TestLongColumnSelector;
 import io.druid.query.aggregation.TestObjectColumnSelector;
@@ -71,7 +73,7 @@ public void setup()
   @Test
   public void testDoubleLastAggregator()
   {
-    FloatLastAggregator agg = (FloatLastAggregator) floatLastAggregatorFactory.factorize(colSelectorFactory);
+    Aggregator agg = floatLastAggregatorFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -89,10 +91,10 @@ public void testDoubleLastAggregator()
   @Test
   public void testDoubleLastBufferAggregator()
   {
-    FloatLastBufferAggregator agg = (FloatLastBufferAggregator) floatLastAggregatorFactory.factorizeBuffered(
+    BufferAggregator agg = floatLastAggregatorFactory.factorizeBuffered(
         colSelectorFactory);
 
-    ByteBuffer buffer = ByteBuffer.wrap(new byte[floatLastAggregatorFactory.getMaxIntermediateSize()]);
+    ByteBuffer buffer = ByteBuffer.wrap(new byte[floatLastAggregatorFactory.getMaxIntermediateSizeWithNulls()]);
     agg.init(buffer, 0);
 
     aggregate(agg, buffer, 0);
@@ -119,7 +121,7 @@ public void testCombine()
   @Test
   public void testDoubleLastCombiningAggregator()
   {
-    FloatLastAggregator agg = (FloatLastAggregator) combiningAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = combiningAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -138,10 +140,10 @@ public void testDoubleLastCombiningAggregator()
   @Test
   public void testDoubleLastCombiningBufferAggregator()
   {
-    FloatLastBufferAggregator agg = (FloatLastBufferAggregator) combiningAggFactory.factorizeBuffered(
+    BufferAggregator agg = combiningAggFactory.factorizeBuffered(
         colSelectorFactory);
 
-    ByteBuffer buffer = ByteBuffer.wrap(new byte[floatLastAggregatorFactory.getMaxIntermediateSize()]);
+    ByteBuffer buffer = ByteBuffer.wrap(new byte[floatLastAggregatorFactory.getMaxIntermediateSizeWithNulls()]);
     agg.init(buffer, 0);
 
     aggregate(agg, buffer, 0);
@@ -168,7 +170,7 @@ public void testSerde() throws Exception
   }
 
   private void aggregate(
-      FloatLastAggregator agg
+      Aggregator agg
   )
   {
     agg.aggregate();
@@ -178,7 +180,7 @@ private void aggregate(
   }
 
   private void aggregate(
-      FloatLastBufferAggregator agg,
+      BufferAggregator agg,
       ByteBuffer buff,
       int position
   )
diff --git a/processing/src/test/java/io/druid/query/aggregation/last/LongLastAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/last/LongLastAggregationTest.java
index 77b0d00ad02..d888247015f 100644
--- a/processing/src/test/java/io/druid/query/aggregation/last/LongLastAggregationTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/last/LongLastAggregationTest.java
@@ -22,7 +22,9 @@
 import io.druid.collections.SerializablePair;
 import io.druid.jackson.DefaultObjectMapper;
 import io.druid.java.util.common.Pair;
+import io.druid.query.aggregation.Aggregator;
 import io.druid.query.aggregation.AggregatorFactory;
+import io.druid.query.aggregation.BufferAggregator;
 import io.druid.query.aggregation.TestLongColumnSelector;
 import io.druid.query.aggregation.TestObjectColumnSelector;
 import io.druid.segment.ColumnSelectorFactory;
@@ -70,7 +72,7 @@ public void setup()
   @Test
   public void testLongLastAggregator()
   {
-    LongLastAggregator agg = (LongLastAggregator) longLastAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = longLastAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -88,10 +90,10 @@ public void testLongLastAggregator()
   @Test
   public void testLongLastBufferAggregator()
   {
-    LongLastBufferAggregator agg = (LongLastBufferAggregator) longLastAggFactory.factorizeBuffered(
+    BufferAggregator agg = longLastAggFactory.factorizeBuffered(
         colSelectorFactory);
 
-    ByteBuffer buffer = ByteBuffer.wrap(new byte[longLastAggFactory.getMaxIntermediateSize()]);
+    ByteBuffer buffer = ByteBuffer.wrap(new byte[longLastAggFactory.getMaxIntermediateSizeWithNulls()]);
     agg.init(buffer, 0);
 
     aggregate(agg, buffer, 0);
@@ -118,7 +120,7 @@ public void testCombine()
   @Test
   public void testLongLastCombiningAggregator()
   {
-    LongLastAggregator agg = (LongLastAggregator) combiningAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = combiningAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -137,10 +139,10 @@ public void testLongLastCombiningAggregator()
   @Test
   public void testLongLastCombiningBufferAggregator()
   {
-    LongLastBufferAggregator agg = (LongLastBufferAggregator) combiningAggFactory.factorizeBuffered(
+    BufferAggregator agg = combiningAggFactory.factorizeBuffered(
         colSelectorFactory);
 
-    ByteBuffer buffer = ByteBuffer.wrap(new byte[longLastAggFactory.getMaxIntermediateSize()]);
+    ByteBuffer buffer = ByteBuffer.wrap(new byte[longLastAggFactory.getMaxIntermediateSizeWithNulls()]);
     agg.init(buffer, 0);
 
     aggregate(agg, buffer, 0);
@@ -167,7 +169,7 @@ public void testSerde() throws Exception
   }
 
   private void aggregate(
-      LongLastAggregator agg
+      Aggregator agg
   )
   {
     agg.aggregate();
@@ -177,7 +179,7 @@ private void aggregate(
   }
 
   private void aggregate(
-      LongLastBufferAggregator agg,
+      BufferAggregator agg,
       ByteBuffer buff,
       int position
   )
diff --git a/processing/src/test/java/io/druid/query/aggregation/last/StringLastAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/last/StringLastAggregationTest.java
index 1f2ecc48152..817469ab948 100644
--- a/processing/src/test/java/io/druid/query/aggregation/last/StringLastAggregationTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/last/StringLastAggregationTest.java
@@ -20,6 +20,10 @@
 package io.druid.query.aggregation.last;
 
 import io.druid.java.util.common.Pair;
+import io.druid.query.aggregation.AggregateCombiner;
+import io.druid.query.aggregation.Aggregator;
+import io.druid.query.aggregation.AggregatorFactory;
+import io.druid.query.aggregation.BufferAggregator;
 import io.druid.query.aggregation.SerializablePairLongString;
 import io.druid.query.aggregation.TestLongColumnSelector;
 import io.druid.query.aggregation.TestObjectColumnSelector;
@@ -35,8 +39,8 @@
 public class StringLastAggregationTest
 {
   private final Integer MAX_STRING_SIZE = 1024;
-  private StringLastAggregatorFactory stringLastAggFactory;
-  private StringLastAggregatorFactory combiningAggFactory;
+  private AggregatorFactory stringLastAggFactory;
+  private AggregatorFactory combiningAggFactory;
   private ColumnSelectorFactory colSelectorFactory;
   private TestLongColumnSelector timeSelector;
   private TestObjectColumnSelector<String> valueSelector;
@@ -56,7 +60,7 @@
   public void setup()
   {
     stringLastAggFactory = new StringLastAggregatorFactory("billy", "nilly", MAX_STRING_SIZE);
-    combiningAggFactory = (StringLastAggregatorFactory) stringLastAggFactory.getCombiningFactory();
+    combiningAggFactory = stringLastAggFactory.getCombiningFactory();
     timeSelector = new TestLongColumnSelector(times);
     valueSelector = new TestObjectColumnSelector<>(strings);
     objectSelector = new TestObjectColumnSelector<>(pairs);
@@ -70,7 +74,7 @@ public void setup()
   @Test
   public void testStringLastAggregator()
   {
-    StringLastAggregator agg = (StringLastAggregator) stringLastAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = stringLastAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -85,7 +89,7 @@ public void testStringLastAggregator()
   @Test
   public void testStringLastBufferAggregator()
   {
-    StringLastBufferAggregator agg = (StringLastBufferAggregator) stringLastAggFactory.factorizeBuffered(
+    BufferAggregator agg = stringLastAggFactory.factorizeBuffered(
         colSelectorFactory);
 
     ByteBuffer buffer = ByteBuffer.wrap(new byte[stringLastAggFactory.getMaxIntermediateSize()]);
@@ -112,7 +116,7 @@ public void testCombine()
   @Test
   public void testStringLastCombiningAggregator()
   {
-    StringLastAggregator agg = (StringLastAggregator) combiningAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = combiningAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -120,7 +124,7 @@ public void testStringLastCombiningAggregator()
     aggregate(agg);
 
     Pair<Long, String> result = (Pair<Long, String>) agg.get();
-    Pair<Long, String> expected = (Pair<Long, String>) pairs[2];
+    Pair<Long, String> expected = pairs[2];
 
     Assert.assertEquals(expected.lhs, result.lhs);
     Assert.assertEquals(expected.rhs, result.rhs);
@@ -129,7 +133,7 @@ public void testStringLastCombiningAggregator()
   @Test
   public void testStringLastCombiningBufferAggregator()
   {
-    StringLastBufferAggregator agg = (StringLastBufferAggregator) combiningAggFactory.factorizeBuffered(
+    BufferAggregator agg = combiningAggFactory.factorizeBuffered(
         colSelectorFactory);
 
     ByteBuffer buffer = ByteBuffer.wrap(new byte[stringLastAggFactory.getMaxIntermediateSize()]);
@@ -141,7 +145,7 @@ public void testStringLastCombiningBufferAggregator()
     aggregate(agg, buffer, 0);
 
     Pair<Long, String> result = (Pair<Long, String>) agg.get(buffer, 0);
-    Pair<Long, String> expected = (Pair<Long, String>) pairs[2];
+    Pair<Long, String> expected = pairs[2];
 
     Assert.assertEquals(expected.lhs, result.lhs);
     Assert.assertEquals(expected.rhs, result.rhs);
@@ -153,8 +157,7 @@ public void testStringLastAggregateCombiner()
     final String[] strings = {"AAAA", "BBBB", "CCCC", "DDDD", "EEEE"};
     TestObjectColumnSelector columnSelector = new TestObjectColumnSelector<>(strings);
 
-    StringLastAggregateCombiner stringFirstAggregateCombiner =
-        (StringLastAggregateCombiner) combiningAggFactory.makeAggregateCombiner();
+    AggregateCombiner stringFirstAggregateCombiner = combiningAggFactory.makeAggregateCombiner();
 
     stringFirstAggregateCombiner.reset(columnSelector);
 
@@ -171,7 +174,7 @@ public void testStringLastAggregateCombiner()
   }
 
   private void aggregate(
-      StringLastAggregator agg
+      Aggregator agg
   )
   {
     agg.aggregate();
@@ -181,7 +184,7 @@ private void aggregate(
   }
 
   private void aggregate(
-      StringLastBufferAggregator agg,
+      BufferAggregator agg,
       ByteBuffer buff,
       int position
   )
diff --git a/processing/src/test/java/io/druid/query/extraction/FunctionalExtractionTest.java b/processing/src/test/java/io/druid/query/extraction/FunctionalExtractionTest.java
index aaeec734e43..2073ddd8a3f 100644
--- a/processing/src/test/java/io/druid/query/extraction/FunctionalExtractionTest.java
+++ b/processing/src/test/java/io/druid/query/extraction/FunctionalExtractionTest.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Function;
 import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableList;
+import io.druid.common.config.NullHandling;
 import org.junit.Assert;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -135,7 +136,7 @@ public void testRetainMissing()
         false
     );
     final String out = fn.apply(in);
-    Assert.assertEquals(Strings.isNullOrEmpty(out) ? in : out, exFn.apply(in));
+    Assert.assertEquals(NullHandling.isNullOrEquivalent(out) ? in : out, exFn.apply(in));
   }
 
   @Test
@@ -149,7 +150,7 @@ public void testRetainMissingButFound()
         false
     );
     final String out = fn.apply(in);
-    Assert.assertEquals(Strings.isNullOrEmpty(out) ? in : out, exFn.apply(in));
+    Assert.assertEquals(NullHandling.isNullOrEquivalent(out) ? in : out, exFn.apply(in));
   }
 
   @Test
@@ -163,7 +164,11 @@ public void testReplaceMissing()
         false
     );
     final String out = fn.apply(in);
-    Assert.assertEquals(Strings.isNullOrEmpty(out) ? MISSING : out, exFn.apply(in));
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(NullHandling.isNullOrEquivalent(out) ? MISSING : out, exFn.apply(in));
+    } else {
+      Assert.assertEquals(out == null ? MISSING : out, exFn.apply(in));
+    }
   }
 
 
@@ -178,7 +183,11 @@ public void testReplaceMissingBlank()
         false
     );
     final String out = fn.apply(in);
-    Assert.assertEquals(Strings.isNullOrEmpty(out) ? null : out, exFn.apply(in));
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(Strings.isNullOrEmpty(out) ? null : out, exFn.apply(in));
+    } else {
+      Assert.assertEquals(out == null ? "" : out, exFn.apply(in));
+    }
   }
 
   @Test
@@ -192,7 +201,11 @@ public void testOnlyOneValuePresent()
         false
     );
     final String out = fn.apply(in);
-    Assert.assertEquals(Strings.isNullOrEmpty(out) ? null : out, exFn.apply(in));
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(Strings.isNullOrEmpty(out) ? null : out, exFn.apply(in));
+    } else {
+      Assert.assertEquals(Strings.isNullOrEmpty(out) ? "" : out, exFn.apply(in));
+    }
   }
 
   @Test
@@ -204,7 +217,7 @@ public void testNullInputs()
         null,
         false
     );
-    if (Strings.isNullOrEmpty(fn.apply(null))) {
+    if (NullHandling.isNullOrEquivalent(fn.apply(null))) {
       Assert.assertEquals(null, exFn.apply(null));
     }
   }
diff --git a/processing/src/test/java/io/druid/query/extraction/JavaScriptExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/JavaScriptExtractionFnTest.java
index 796c5732a46..5450522e0c7 100644
--- a/processing/src/test/java/io/druid/query/extraction/JavaScriptExtractionFnTest.java
+++ b/processing/src/test/java/io/druid/query/extraction/JavaScriptExtractionFnTest.java
@@ -23,6 +23,7 @@
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.collect.Iterators;
 import com.google.common.collect.Lists;
+import io.druid.common.config.NullHandling;
 import io.druid.jackson.DefaultObjectMapper;
 import io.druid.java.util.common.DateTimes;
 import io.druid.js.JavaScriptConfig;
@@ -131,7 +132,11 @@ public void testJavascriptIsNull()
 
     Assert.assertEquals("yes", extractionFn.apply((String) null));
     Assert.assertEquals("yes", extractionFn.apply((Object) null));
-    Assert.assertEquals("yes", extractionFn.apply(""));
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals("yes", extractionFn.apply(""));
+    } else {
+      Assert.assertEquals("no", extractionFn.apply(""));
+    }
     Assert.assertEquals("no", extractionFn.apply("abc"));
     Assert.assertEquals("no", extractionFn.apply(new Object()));
     Assert.assertEquals("no", extractionFn.apply(1));
diff --git a/processing/src/test/java/io/druid/query/extraction/LowerExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/LowerExtractionFnTest.java
index 803b416f0f4..cf3af19dd0d 100644
--- a/processing/src/test/java/io/druid/query/extraction/LowerExtractionFnTest.java
+++ b/processing/src/test/java/io/druid/query/extraction/LowerExtractionFnTest.java
@@ -19,6 +19,7 @@
 
 package io.druid.query.extraction;
 
+import io.druid.common.config.NullHandling;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -31,7 +32,7 @@
   public void testApply()
   {
     Assert.assertEquals("lower 1 string", extractionFn.apply("lOwER 1 String"));
-    Assert.assertEquals(null, extractionFn.apply(""));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? null : "", extractionFn.apply(""));
     Assert.assertEquals(null, extractionFn.apply(null));
     Assert.assertEquals(null, extractionFn.apply((Object) null));
     Assert.assertEquals("1", extractionFn.apply(1));
diff --git a/processing/src/test/java/io/druid/query/extraction/MapLookupExtractorTest.java b/processing/src/test/java/io/druid/query/extraction/MapLookupExtractorTest.java
index b3aedea1301..751eafec3ee 100644
--- a/processing/src/test/java/io/druid/query/extraction/MapLookupExtractorTest.java
+++ b/processing/src/test/java/io/druid/query/extraction/MapLookupExtractorTest.java
@@ -21,6 +21,7 @@
 
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Sets;
+import io.druid.common.config.NullHandling;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -39,9 +40,19 @@ public void testUnApply()
   {
     Assert.assertEquals(Collections.singletonList("foo"), fn.unapply("bar"));
     Assert.assertEquals(Sets.newHashSet("null", "empty String"), Sets.newHashSet(fn.unapply("")));
-    Assert.assertEquals("Null value should be equal to empty string",
-                        Sets.newHashSet("null", "empty String"),
-                        Sets.newHashSet(fn.unapply((String) null)));
+    if (NullHandling.sqlCompatible()) {
+      Assert.assertEquals(
+          "Null value should be equal to empty list",
+          Sets.newHashSet(),
+          Sets.newHashSet(fn.unapply((String) null))
+      );
+    } else {
+      Assert.assertEquals(
+          "Null value should be equal to empty string",
+          Sets.newHashSet("null", "empty String"),
+          Sets.newHashSet(fn.unapply((String) null))
+      );
+    }
     Assert.assertEquals(Sets.newHashSet(""), Sets.newHashSet(fn.unapply("empty_string")));
     Assert.assertEquals("not existing value returns empty list", Collections.EMPTY_LIST, fn.unapply("not There"));
   }
@@ -55,7 +66,6 @@ public void testGetMap()
   @Test
   public void testApply()
   {
-
     Assert.assertEquals("bar", fn.apply("foo"));
   }
 
diff --git a/processing/src/test/java/io/druid/query/extraction/MatchingDimExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/MatchingDimExtractionFnTest.java
index 8649d843fb6..a354e44d545 100644
--- a/processing/src/test/java/io/druid/query/extraction/MatchingDimExtractionFnTest.java
+++ b/processing/src/test/java/io/druid/query/extraction/MatchingDimExtractionFnTest.java
@@ -21,6 +21,7 @@
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.collect.Sets;
+import io.druid.common.config.NullHandling;
 import io.druid.jackson.DefaultObjectMapper;
 import org.junit.Assert;
 import org.junit.Test;
@@ -75,7 +76,7 @@ public void testNullExtraction()
 
     Assert.assertNull(extractionFn.apply((Object) null));
     Assert.assertNull(extractionFn.apply((String) null));
-    Assert.assertNull(extractionFn.apply((String) ""));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? null : "", extractionFn.apply((String) ""));
   }
 
   @Test
diff --git a/processing/src/test/java/io/druid/query/extraction/RegexDimExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/RegexDimExtractionFnTest.java
index f9815578294..f499c3f0fe9 100644
--- a/processing/src/test/java/io/druid/query/extraction/RegexDimExtractionFnTest.java
+++ b/processing/src/test/java/io/druid/query/extraction/RegexDimExtractionFnTest.java
@@ -22,6 +22,7 @@
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Sets;
+import io.druid.common.config.NullHandling;
 import io.druid.jackson.DefaultObjectMapper;
 import org.junit.Assert;
 import org.junit.Test;
@@ -145,11 +146,11 @@ public void testNullAndEmpty()
     String regex = "(.*)/.*/.*";
     ExtractionFn extractionFn = new RegexDimExtractionFn(regex, false, null);
     // no match, map empty input value to null
-    Assert.assertEquals(null, extractionFn.apply(""));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? null : "", extractionFn.apply(""));
     // null value, returns null
     Assert.assertEquals(null, extractionFn.apply(null));
     // empty match, map empty result to null
-    Assert.assertEquals(null, extractionFn.apply("/a/b"));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? null : "", extractionFn.apply("/a/b"));
   }
 
   @Test
@@ -168,8 +169,8 @@ public void testMissingValueReplacementWhenPatternMatchesNull()
   {
     String regex = "^()$";
     ExtractionFn extractionFn = new RegexDimExtractionFn(regex, true, "NO MATCH");
-    Assert.assertEquals(null, extractionFn.apply(""));
-    Assert.assertEquals(null, extractionFn.apply(null));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? null : "", extractionFn.apply(""));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? null : "NO MATCH", extractionFn.apply(null));
     Assert.assertEquals("NO MATCH", extractionFn.apply("abc"));
   }
 
@@ -178,10 +179,10 @@ public void testMissingValueReplacementToEmpty()
   {
     String regex = "(bob)";
     ExtractionFn extractionFn = new RegexDimExtractionFn(regex, true, "");
-    Assert.assertEquals(null, extractionFn.apply(null));
-    Assert.assertEquals(null, extractionFn.apply(""));
-    Assert.assertEquals(null, extractionFn.apply("abc"));
-    Assert.assertEquals(null, extractionFn.apply("123"));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? null : "", extractionFn.apply(null));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? null : "", extractionFn.apply(""));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? null : "", extractionFn.apply("abc"));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? null : "", extractionFn.apply("123"));
     Assert.assertEquals("bob", extractionFn.apply("bobby"));
   }
 
diff --git a/processing/src/test/java/io/druid/query/extraction/StringFormatExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/StringFormatExtractionFnTest.java
index f7e51a35ca6..189194098b4 100644
--- a/processing/src/test/java/io/druid/query/extraction/StringFormatExtractionFnTest.java
+++ b/processing/src/test/java/io/druid/query/extraction/StringFormatExtractionFnTest.java
@@ -21,6 +21,7 @@
 
 import com.fasterxml.jackson.databind.JsonMappingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
+import io.druid.common.config.NullHandling;
 import io.druid.jackson.DefaultObjectMapper;
 import org.junit.Assert;
 import org.junit.Test;
@@ -55,7 +56,10 @@ public void testApplyNull2()
   {
     String test = null;
     Assert.assertEquals("null", format("%s", "nullString").apply(test));
-    Assert.assertNull(format("%s", "emptyString").apply(test));
+    Assert.assertEquals(
+        NullHandling.emptyToNullIfNeeded(""),
+        format("%s", "emptyString").apply(test)
+    );
     Assert.assertNull(format("%s", "returnNull").apply(test));
   }
 
diff --git a/processing/src/test/java/io/druid/query/extraction/StrlenExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/StrlenExtractionFnTest.java
index c4f44e19390..17b754a62a0 100644
--- a/processing/src/test/java/io/druid/query/extraction/StrlenExtractionFnTest.java
+++ b/processing/src/test/java/io/druid/query/extraction/StrlenExtractionFnTest.java
@@ -20,6 +20,7 @@
 package io.druid.query.extraction;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
+import io.druid.common.config.NullHandling;
 import io.druid.jackson.DefaultObjectMapper;
 import org.junit.Assert;
 import org.junit.Test;
@@ -29,7 +30,7 @@
   @Test
   public void testApply()
   {
-    Assert.assertEquals("0", StrlenExtractionFn.instance().apply(null));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? "0" : null, StrlenExtractionFn.instance().apply(null));
     Assert.assertEquals("0", StrlenExtractionFn.instance().apply(""));
     Assert.assertEquals("1", StrlenExtractionFn.instance().apply("x"));
     Assert.assertEquals("3", StrlenExtractionFn.instance().apply("foo"));
diff --git a/processing/src/test/java/io/druid/query/extraction/TimeDimExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/TimeDimExtractionFnTest.java
index c7fc9547fcc..2b7f5fe7af9 100644
--- a/processing/src/test/java/io/druid/query/extraction/TimeDimExtractionFnTest.java
+++ b/processing/src/test/java/io/druid/query/extraction/TimeDimExtractionFnTest.java
@@ -21,6 +21,7 @@
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.collect.Sets;
+import io.druid.common.config.NullHandling;
 import io.druid.jackson.DefaultObjectMapper;
 import org.junit.Assert;
 import org.junit.Test;
@@ -48,7 +49,11 @@ public void testEmptyNullAndUnparseableExtraction()
       ExtractionFn extractionFn = new TimeDimExtractionFn("MM/dd/yyyy", "MM/yyyy", joda);
 
       Assert.assertNull(extractionFn.apply(null));
-      Assert.assertNull(extractionFn.apply(""));
+      if (NullHandling.replaceWithDefault()) {
+        Assert.assertNull(extractionFn.apply(""));
+      } else {
+        Assert.assertEquals("", extractionFn.apply(""));
+      }
       Assert.assertEquals("foo", extractionFn.apply("foo"));
     }
   }
diff --git a/processing/src/test/java/io/druid/query/extraction/UpperExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/UpperExtractionFnTest.java
index 8ba95f76c1e..55d8f5255a3 100644
--- a/processing/src/test/java/io/druid/query/extraction/UpperExtractionFnTest.java
+++ b/processing/src/test/java/io/druid/query/extraction/UpperExtractionFnTest.java
@@ -19,6 +19,7 @@
 
 package io.druid.query.extraction;
 
+import io.druid.common.config.NullHandling;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -31,7 +32,7 @@
   public void testApply()
   {
     Assert.assertEquals("UPPER", extractionFn.apply("uPpeR"));
-    Assert.assertEquals(null, extractionFn.apply(""));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? null : "", extractionFn.apply(""));
     Assert.assertEquals(null, extractionFn.apply(null));
     Assert.assertEquals(null, extractionFn.apply((Object) null));
     Assert.assertEquals("1", extractionFn.apply(1));
diff --git a/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java b/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java
index 26aa033bc35..e9e9c238575 100644
--- a/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java
+++ b/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java
@@ -23,6 +23,7 @@
 import com.google.common.collect.ImmutableRangeSet;
 import com.google.common.collect.Range;
 import com.google.common.collect.RangeSet;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.Intervals;
 import io.druid.js.JavaScriptConfig;
 import io.druid.query.extraction.IdentityExtractionFn;
@@ -60,7 +61,8 @@
   );
   private final DimFilter other1 = new RegexDimFilter("someDim", "pattern", null);
   private final DimFilter other2 = new JavaScriptDimFilter("someOtherDim", "function(x) { return x }", null,
-                                                           JavaScriptConfig.getEnabledInstance());
+                                                           JavaScriptConfig.getEnabledInstance()
+  );
   private final DimFilter other3 = new SearchQueryDimFilter("dim", new ContainsSearchQuerySpec("a", true), null);
 
   private final DimFilter interval1 = new IntervalDimFilter(
@@ -91,13 +93,13 @@ public void testSimpleFilter()
     Assert.assertEquals(expected1, selector1.getDimensionRangeSet("dim1"));
     Assert.assertNull(selector1.getDimensionRangeSet("dim2"));
 
-    RangeSet expected2 = rangeSet(point(""));
+    RangeSet expected2 = rangeSet(point(null));
     Assert.assertEquals(expected2, selector5.getDimensionRangeSet("dim1"));
 
     RangeSet expected3 = rangeSet(ImmutableList.of(point("testing"), point("this"), point("filter"), point("tillend")));
     Assert.assertEquals(expected3, in1.getDimensionRangeSet("dim1"));
 
-    RangeSet expected4 = rangeSet(ImmutableList.of(point("null"), point("")));
+    RangeSet expected4 = rangeSet(ImmutableList.of(point("null"), point(null)));
     Assert.assertEquals(expected4, in3.getDimensionRangeSet("dim1"));
 
     RangeSet expected5 = ImmutableRangeSet.of(Range.closed("from", "to"));
@@ -146,12 +148,13 @@ public void testAndFilter()
   public void testOrFilter()
   {
     DimFilter or1 = new OrDimFilter(ImmutableList.of(selector1, selector2, selector5));
-    RangeSet expected1 = rangeSet(ImmutableList.of(point(""), point("a"), point("z")));
+    RangeSet expected1 = rangeSet(ImmutableList.of(point(null), point("a"), point("z")));
     Assert.assertEquals(expected1, or1.getDimensionRangeSet("dim1"));
 
     DimFilter or2 = new OrDimFilter(ImmutableList.of(selector5, in1, in3));
     RangeSet expected2 = rangeSet(ImmutableList.of(point("testing"), point("this"), point("filter"), point("tillend"),
-                                                   point("null"), point("")));
+                                                   point("null"), point(null)
+    ));
     Assert.assertEquals(expected2, or2.getDimensionRangeSet("dim1"));
 
     DimFilter or3 = new OrDimFilter(ImmutableList.of(bound1, bound2, bound3));
@@ -162,11 +165,13 @@ public void testOrFilter()
     Assert.assertNull(or4.getDimensionRangeSet("dim2"));
 
     DimFilter or5 = new OrDimFilter(ImmutableList.of(or1, or2, bound1));
-    RangeSet expected5 = rangeSet(ImmutableList.of(point(""), point("a"), point("filter"), Range.closed("from", "to"),
-                                                   point("z")));
+    RangeSet expected5 = rangeSet(ImmutableList.of(point(null), point("a"), point("filter"), Range.closed("from", "to"),
+                                                   point("z")
+    ));
     Assert.assertEquals(expected5, or5.getDimensionRangeSet("dim1"));
   }
 
+
   @Test
   public void testNotFilter()
   {
@@ -176,15 +181,28 @@ public void testNotFilter()
     Assert.assertNull(not1.getDimensionRangeSet("dim2"));
 
     DimFilter not2 = new NotDimFilter(in3);
-    RangeSet expected2 = rangeSet(ImmutableList.of(Range.lessThan(""), Range.open("", "null"), Range.greaterThan("null")));
-    Assert.assertEquals(expected2, not2.getDimensionRangeSet("dim1"));
+    if (NullHandling.sqlCompatible()) {
+      // Empty string is included when != null for SQL Compatible case
+      RangeSet expected2 = rangeSet(ImmutableList.of(
+          Range.closedOpen("", "null"),
+          Range.greaterThan("null")
+      ));
+      Assert.assertEquals(expected2, not2.getDimensionRangeSet("dim1"));
+    } else {
+      RangeSet expected2 = rangeSet(ImmutableList.of(
+          Range.lessThan(""),
+          Range.open("", "null"),
+          Range.greaterThan("null")
+      ));
+      Assert.assertEquals(expected2, not2.getDimensionRangeSet("dim1"));
+    }
 
     DimFilter not3 = new NotDimFilter(bound1);
     RangeSet expected3 = rangeSet(ImmutableList.of(Range.lessThan("from"), Range.greaterThan("to")));
     Assert.assertEquals(expected3, not3.getDimensionRangeSet("dim1"));
 
     DimFilter not4 = new NotDimFilter(not2);
-    RangeSet expected4 = rangeSet(ImmutableList.of(point(""), point("null")));
+    RangeSet expected4 = rangeSet(ImmutableList.of(point(null), point("null")));
     Assert.assertEquals(expected4, not4.getDimensionRangeSet("dim1"));
 
     DimFilter or1 = new OrDimFilter(ImmutableList.of(selector1, selector2, bound1, bound3));
@@ -203,7 +221,8 @@ public void testNotFilter()
     DimFilter and1 = new AndDimFilter(ImmutableList.of(in1, bound1, bound2));
     DimFilter not7 = new NotDimFilter(and1);
     RangeSet expected7 = rangeSet(ImmutableList.of(Range.lessThan("testing"), Range.open("testing", "this"),
-                                                   Range.open("this", "tillend"), Range.greaterThan("tillend")));
+                                                   Range.open("this", "tillend"), Range.greaterThan("tillend")
+    ));
     Assert.assertEquals(expected7, not7.getDimensionRangeSet("dim1"));
     Assert.assertNull(not7.getDimensionRangeSet("dim2"));
 
@@ -216,6 +235,15 @@ public void testNotFilter()
 
   private static Range<String> point(String s)
   {
+    if (s == null) {
+      if (NullHandling.sqlCompatible()) {
+        // Range.singleton(null) is invalid
+        return Range.lessThan("");
+      } else {
+        // For non-sql compatible case, null and "" are equivalent
+        return Range.singleton("");
+      }
+    }
     return Range.singleton(s);
   }
 
diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java
index 0f04bb4557a..d6809b52101 100644
--- a/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java
+++ b/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java
@@ -572,7 +572,7 @@ public void testDescendingNumerics()
         "d2", 13L,
         "a0", 2L
     );
-
+    System.out.println(results);
     Assert.assertEquals(4, results.size());
     Assert.assertEquals(expectedRow0, results.get(0));
     Assert.assertEquals(expectedRow1, results.get(1));
diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java
index 882cd48f28b..01e67bb370c 100644
--- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java
+++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java
@@ -35,6 +35,7 @@
 import io.druid.collections.DefaultBlockingPool;
 import io.druid.collections.NonBlockingPool;
 import io.druid.collections.StupidPool;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.Row;
 import io.druid.java.util.common.DateTimes;
 import io.druid.java.util.common.IAE;
@@ -6314,12 +6315,23 @@ public void testGroupByWithExtractionDimFilterCaseMappingValueIsNullOrEmpty()
                                      .setGranularity(QueryRunnerTestHelper.dayGran)
                                      .setDimFilter(new ExtractionDimFilter("quality", "", lookupExtractionFn, null))
                                      .build();
-    List<Row> expectedResults = Arrays.asList(
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 3L, "idx", 2870L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "news", "rows", 1L, "idx", 121L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "mezzanine", "rows", 3L, "idx", 2447L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "news", "rows", 1L, "idx", 114L)
-    );
+
+    List<Row> expectedResults;
+
+    if (NullHandling.replaceWithDefault()) {
+      expectedResults = Arrays.asList(
+          GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 3L, "idx", 2870L),
+          GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "news", "rows", 1L, "idx", 121L),
+          GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "mezzanine", "rows", 3L, "idx", 2447L),
+          GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "news", "rows", 1L, "idx", 114L)
+      );
+    } else {
+      // Only empty string should match, nulls will not match
+      expectedResults = Arrays.asList(
+          GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "news", "rows", 1L, "idx", 121L),
+          GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "news", "rows", 1L, "idx", 114L)
+      );
+    }
 
     Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
     TestHelper.assertExpectedObjects(expectedResults, results, "");
@@ -6354,10 +6366,17 @@ public void testGroupByWithExtractionDimFilterWhenSearchValueNotInTheMap()
   public void testGroupByWithExtractionDimFilterKeyisNull()
   {
     Map<String, String> extractionMap = new HashMap<>();
-    extractionMap.put("", "NULLorEMPTY");
+
 
     MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false);
-    LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, false);
+    LookupExtractionFn lookupExtractionFn;
+    if (NullHandling.replaceWithDefault()) {
+      lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, false);
+      extractionMap.put("", "REPLACED_VALUE");
+    } else {
+      lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, "REPLACED_VALUE", true, false);
+      extractionMap.put("", "NOT_USED");
+    }
 
     GroupByQuery query = GroupByQuery.builder()
                                      .setDataSource(QueryRunnerTestHelper.dataSource)
@@ -6367,9 +6386,14 @@ public void testGroupByWithExtractionDimFilterKeyisNull()
                                                          new LongSumAggregatorFactory("idx", "index"))
                                      .setGranularity(QueryRunnerTestHelper.dayGran)
                                      .setDimFilter(
-                                         new ExtractionDimFilter("null_column", "NULLorEMPTY", lookupExtractionFn, null)
-                                     )
-                                     .build();
+                                         new ExtractionDimFilter(
+                                             "null_column",
+                                             "REPLACED_VALUE",
+                                             lookupExtractionFn,
+                                             null
+                                         )
+                                     ).build();
+
     List<Row> expectedResults = Arrays
         .asList(
             GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", null, "rows", 13L, "idx", 6619L),
@@ -6411,25 +6435,137 @@ public void testGroupByWithAggregatorFilterAndExtractionFunction()
                                      .setGranularity(QueryRunnerTestHelper.dayGran)
                                      .build();
     List<Row> expectedResults = Arrays.asList(
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "automotive", "rows", 0L, "idx", 0L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "business", "rows", 0L, "idx", 0L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "entertainment", "rows", 0L, "idx", 0L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "health", "rows", 0L, "idx", 0L),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-01",
+            "alias",
+            "automotive",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-01",
+            "alias",
+            "business",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-01",
+            "alias",
+            "entertainment",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-01",
+            "alias",
+            "health",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
         GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 3L, "idx", 2870L),
         GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "news", "rows", 1L, "idx", 121L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "premium", "rows", 0L, "idx", 0L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "technology", "rows", 0L, "idx", 0L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "travel", "rows", 0L, "idx", 0L),
-
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "automotive", "rows", 0L, "idx", 0L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "business", "rows", 0L, "idx", 0L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "entertainment", "rows", 0L, "idx", 0L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "health", "rows", 0L, "idx", 0L),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-01",
+            "alias",
+            "premium",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-01",
+            "alias",
+            "technology",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-01",
+            "alias",
+            "travel",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
+
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-02",
+            "alias",
+            "automotive",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-02",
+            "alias",
+            "business",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-02",
+            "alias",
+            "entertainment",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-02",
+            "alias",
+            "health",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
         GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "mezzanine", "rows", 3L, "idx", 2447L),
         GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "news", "rows", 1L, "idx", 114L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "premium", "rows", 0L, "idx", 0L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "technology", "rows", 0L, "idx", 0L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "travel", "rows", 0L, "idx", 0L)
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-02",
+            "alias",
+            "premium",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-02",
+            "alias",
+            "technology",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-02",
+            "alias",
+            "travel",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        )
     );
 
     Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
@@ -6481,7 +6617,14 @@ public void testGroupByWithExtractionDimFilterNullDims()
     extractionMap.put("", "EMPTY");
 
     MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false);
-    LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, true);
+    LookupExtractionFn lookupExtractionFn;
+    if (NullHandling.replaceWithDefault()) {
+      extractionMap.put("", "EMPTY");
+      lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, true);
+    } else {
+      extractionMap.put("", "SHOULD_NOT_BE_USED");
+      lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, "EMPTY", true, true);
+    }
 
     GroupByQuery query = GroupByQuery.builder()
                                      .setDataSource(QueryRunnerTestHelper.dataSource)
@@ -6600,7 +6743,7 @@ public void testGroupByWithAllFiltersOnNullDimsWithExtractionFns()
     extractionMap.put(null, "EMPTY");
 
     MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false);
-    LookupExtractionFn extractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, true);
+    LookupExtractionFn extractionFn = new LookupExtractionFn(mapLookupExtractor, false, "EMPTY", true, true);
     String jsFn = "function(x) { return(x === 'EMPTY') }";
 
     List<DimFilter> superFilterList = new ArrayList<>();
@@ -7362,14 +7505,16 @@ public void testGroupByNumericStringsAsNumericWithDecoration()
         .setDimFilter(new InDimFilter("quality", Arrays.asList("entertainment", "technology"), null))
         .setAggregatorSpecs(new CountAggregatorFactory("count"))
         .setGranularity(QueryRunnerTestHelper.allGran)
+        .addOrderByColumn("ql")
         .build();
 
+    List<Row> expectedResults;
     // "entertainment" rows are excluded by the decorated specs, they become empty rows
-    List<Row> expectedResults = Arrays.asList(
+    expectedResults = Arrays.asList(
         GroupByQueryRunnerTestHelper.createExpectedRow(
             "2011-04-01",
-            "ql", 0L,
-            "qf", 0.0,
+            "ql", NullHandling.defaultLongValue(),
+            "qf", NullHandling.defaultDoubleValue(),
             "count", 2L
         ),
         GroupByQueryRunnerTestHelper.createExpectedRow(
@@ -7381,6 +7526,7 @@ public void testGroupByNumericStringsAsNumericWithDecoration()
     );
 
     Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
+    System.out.println(results);
     TestHelper.assertExpectedObjects(expectedResults, results, "");
   }
 
@@ -7412,21 +7558,38 @@ public void testGroupByDecorationOnNumerics()
         .setAggregatorSpecs(new CountAggregatorFactory("count"))
         .setGranularity(QueryRunnerTestHelper.allGran)
         .build();
-
-    List<Row> expectedResults = Arrays.asList(
-        GroupByQueryRunnerTestHelper.createExpectedRow(
-            "2011-04-01",
-            "ql", 0L,
-            "qf", 0.0,
-            "count", 2L
-        ),
-        GroupByQueryRunnerTestHelper.createExpectedRow(
-            "2011-04-01",
-            "ql", 1700L,
-            "qf", 17000.0,
-            "count", 2L
-        )
-    );
+    List<Row> expectedResults;
+    if (NullHandling.replaceWithDefault()) {
+      expectedResults = Arrays.asList(
+          GroupByQueryRunnerTestHelper.createExpectedRow(
+              "2011-04-01",
+              "ql", 0L,
+              "qf", 0.0,
+              "count", 2L
+          ),
+          GroupByQueryRunnerTestHelper.createExpectedRow(
+              "2011-04-01",
+              "ql", 1700L,
+              "qf", 17000.0,
+              "count", 2L
+          )
+      );
+    } else {
+      expectedResults = Arrays.asList(
+          GroupByQueryRunnerTestHelper.createExpectedRow(
+              "2011-04-01",
+              "ql", null,
+              "qf", null,
+              "count", 2L
+          ),
+          GroupByQueryRunnerTestHelper.createExpectedRow(
+              "2011-04-01",
+              "ql", 1700L,
+              "qf", 17000.0,
+              "count", 2L
+          )
+      );
+    }
 
     Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
     TestHelper.assertExpectedObjects(expectedResults, results, "");
diff --git a/processing/src/test/java/io/druid/query/groupby/epinephelinae/BufferArrayGrouperTest.java b/processing/src/test/java/io/druid/query/groupby/epinephelinae/BufferArrayGrouperTest.java
index d04c2b84a4e..5d28f7f6090 100644
--- a/processing/src/test/java/io/druid/query/groupby/epinephelinae/BufferArrayGrouperTest.java
+++ b/processing/src/test/java/io/druid/query/groupby/epinephelinae/BufferArrayGrouperTest.java
@@ -25,6 +25,7 @@
 import com.google.common.collect.Lists;
 import com.google.common.collect.Ordering;
 import com.google.common.primitives.Ints;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.MapBasedRow;
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.CountAggregatorFactory;
@@ -90,17 +91,23 @@ private BufferArrayGrouper newGrouper(
   @Test
   public void testRequiredBufferCapacity()
   {
-    int[] cardinalityArray = new int[] {1, 10, Integer.MAX_VALUE - 1};
-    AggregatorFactory[] aggregatorFactories = new AggregatorFactory[] {
+    int[] cardinalityArray = new int[]{1, 10, Integer.MAX_VALUE - 1};
+    AggregatorFactory[] aggregatorFactories = new AggregatorFactory[]{
         new LongSumAggregatorFactory("sum", "sum")
     };
-
-    long[] requiredSizes = new long[] {17, 90, 16911433721L};
+    long[] requiredSizes;
+    if (NullHandling.sqlCompatible()) {
+      // We need additional size to store nullability information.
+      requiredSizes = new long[]{19, 101, 19058917368L};
+    } else {
+      requiredSizes = new long[]{17, 90, 16911433721L};
+    }
 
     for (int i = 0; i < cardinalityArray.length; i++) {
       Assert.assertEquals(requiredSizes[i], BufferArrayGrouper.requiredBufferCapacity(
           cardinalityArray[i],
-          aggregatorFactories));
+          aggregatorFactories
+      ));
     }
   }
 }
diff --git a/processing/src/test/java/io/druid/query/groupby/epinephelinae/BufferHashGrouperTest.java b/processing/src/test/java/io/druid/query/groupby/epinephelinae/BufferHashGrouperTest.java
index 91cd3be6efe..07b61e7233a 100644
--- a/processing/src/test/java/io/druid/query/groupby/epinephelinae/BufferHashGrouperTest.java
+++ b/processing/src/test/java/io/druid/query/groupby/epinephelinae/BufferHashGrouperTest.java
@@ -27,6 +27,7 @@
 import com.google.common.collect.Ordering;
 import com.google.common.io.Files;
 import com.google.common.primitives.Ints;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.MapBasedRow;
 import io.druid.java.util.common.ByteBufferUtils;
 import io.druid.query.aggregation.AggregatorFactory;
@@ -111,7 +112,7 @@ public void testGrowing()
   {
     final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory();
     final Grouper<Integer> grouper = makeGrouper(columnSelectorFactory, 10000, 2);
-    final int expectedMaxSize = 219;
+    final int expectedMaxSize = NullHandling.replaceWithDefault() ? 219 : 210;
 
     columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L)));
     for (int i = 0; i < expectedMaxSize; i++) {
@@ -139,7 +140,7 @@ public void testGrowing2()
   {
     final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory();
     final Grouper<Integer> grouper = makeGrouper(columnSelectorFactory, 2_000_000_000, 2);
-    final int expectedMaxSize = 40988516;
+    final int expectedMaxSize = NullHandling.replaceWithDefault() ? 40988516 : 39141224;
 
     columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L)));
     for (int i = 0; i < expectedMaxSize; i++) {
@@ -153,7 +154,7 @@ public void testGrowing3()
   {
     final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory();
     final Grouper<Integer> grouper = makeGrouper(columnSelectorFactory, Integer.MAX_VALUE, 2);
-    final int expectedMaxSize = 44938972;
+    final int expectedMaxSize = NullHandling.replaceWithDefault() ? 44938972 : 42955456;
 
     columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L)));
     for (int i = 0; i < expectedMaxSize; i++) {
@@ -167,7 +168,7 @@ public void testNoGrowing()
   {
     final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory();
     final Grouper<Integer> grouper = makeGrouper(columnSelectorFactory, 10000, Integer.MAX_VALUE);
-    final int expectedMaxSize = 267;
+    final int expectedMaxSize = NullHandling.replaceWithDefault() ? 267 : 258;
 
     columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L)));
     for (int i = 0; i < expectedMaxSize; i++) {
diff --git a/processing/src/test/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouperTest.java b/processing/src/test/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouperTest.java
index ee1a117d290..c45d442e3b6 100644
--- a/processing/src/test/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouperTest.java
+++ b/processing/src/test/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouperTest.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Suppliers;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.MapBasedRow;
 import io.druid.java.util.common.IAE;
 import io.druid.query.aggregation.AggregatorFactory;
@@ -53,20 +54,37 @@ public void testLimitAndBufferSwapping()
     for (int i = 0; i < numRows; i++) {
       Assert.assertTrue(String.valueOf(i + keyBase), grouper.aggregate(i + keyBase).isOk());
     }
+    if (NullHandling.replaceWithDefault()) {
+      // bucket size is hash(int) + key(int) + aggs(2 longs) + heap offset(int) = 28 bytes
+      // limit is 100 so heap occupies 101 * 4 bytes = 404 bytes
+      // buffer is 20000 bytes, so table arena size is 20000 - 404 = 19596 bytes
+      // table arena is split in halves when doing push down, so each half is 9798 bytes
+      // each table arena half can hold 9798 / 28 = 349 buckets, with load factor of 0.5 max buckets per half is 174
+      // First buffer swap occurs when we hit 174 buckets
+      // Subsequent buffer swaps occur after every 74 buckets, since we keep 100 buckets due to the limit
+      // With 1000 keys inserted, this results in one swap at the first 174 buckets, then 11 swaps afterwards.
+      // After the last swap, we have 100 keys + 12 new keys inserted.
+      Assert.assertEquals(12, grouper.getGrowthCount());
+      Assert.assertEquals(112, grouper.getSize());
+      Assert.assertEquals(349, grouper.getBuckets());
+      Assert.assertEquals(174, grouper.getMaxSize());
+    } else {
+      // With Nullability enabled
+      // bucket size is hash(int) + key(int) + aggs(2 longs + 1 bytes for Long Agg nullability) + heap offset(int) = 29 bytes
+      // limit is 100 so heap occupies 101 * 4 bytes = 404 bytes
+      // buffer is 20000 bytes, so table arena size is 20000 - 404 = 19596 bytes
+      // table arena is split in halves when doing push down, so each half is 9798 bytes
+      // each table arena half can hold 9798 / 29 = 337 buckets, with load factor of 0.5 max buckets per half is 168
+      // First buffer swap occurs when we hit 168 buckets
+      // Subsequent buffer swaps occur after every 68 buckets, since we keep 100 buckets due to the limit
+      // With 1000 keys inserted, this results in one swap at the first 169 buckets, then 12 swaps afterwards.
+      // After the last swap, we have 100 keys + 16 new keys inserted.
+      Assert.assertEquals(13, grouper.getGrowthCount());
+      Assert.assertEquals(116, grouper.getSize());
+      Assert.assertEquals(337, grouper.getBuckets());
+      Assert.assertEquals(168, grouper.getMaxSize());
+    }
 
-    // bucket size is hash(int) + key(int) + aggs(2 longs) + heap offset(int) = 28 bytes
-    // limit is 100 so heap occupies 101 * 4 bytes = 404 bytes
-    // buffer is 20000 bytes, so table arena size is 20000 - 404 = 19596 bytes
-    // table arena is split in halves when doing push down, so each half is 9798 bytes
-    // each table arena half can hold 9798 / 28 = 349 buckets, with load factor of 0.5 max buckets per half is 174
-    // First buffer swap occurs when we hit 174 buckets
-    // Subsequent buffer swaps occur after every 74 buckets, since we keep 100 buckets due to the limit
-    // With 1000 keys inserted, this results in one swap at the first 174 buckets, then 11 swaps afterwards.
-    // After the last swap, we have 100 keys + 12 new keys inserted.
-    Assert.assertEquals(12, grouper.getGrowthCount());
-    Assert.assertEquals(112, grouper.getSize());
-    Assert.assertEquals(349, grouper.getBuckets());
-    Assert.assertEquals(174, grouper.getMaxSize());
     Assert.assertEquals(100, grouper.getLimit());
 
     // Aggregate slightly different row
@@ -77,14 +95,27 @@ public void testLimitAndBufferSwapping()
       Assert.assertTrue(String.valueOf(i), grouper.aggregate(i).isOk());
     }
 
-    // we added another 1000 unique keys
-    // previous size is 112, so next swap occurs after 62 rows
-    // after that, there are 1000 - 62 = 938 rows, 938 / 74 = 12 additional swaps after the first,
-    // with 50 keys being added after the final swap.
-    Assert.assertEquals(25, grouper.getGrowthCount());
-    Assert.assertEquals(150, grouper.getSize());
-    Assert.assertEquals(349, grouper.getBuckets());
-    Assert.assertEquals(174, grouper.getMaxSize());
+    if (NullHandling.replaceWithDefault()) {
+      // we added another 1000 unique keys
+      // previous size is 112, so next swap occurs after 62 rows
+      // after that, there are 1000 - 62 = 938 rows, 938 / 74 = 12 additional swaps after the first,
+      // with 50 keys being added after the final swap.
+      Assert.assertEquals(25, grouper.getGrowthCount());
+      Assert.assertEquals(150, grouper.getSize());
+      Assert.assertEquals(349, grouper.getBuckets());
+      Assert.assertEquals(174, grouper.getMaxSize());
+    } else {
+      // With Nullable Aggregator
+      // we added another 1000 unique keys
+      // previous size is 116, so next swap occurs after 52 rows
+      // after that, there are 1000 - 52 = 948 rows, 948 / 68 = 13 additional swaps after the first,
+      // with 64 keys being added after the final swap.
+      Assert.assertEquals(27, grouper.getGrowthCount());
+      Assert.assertEquals(164, grouper.getSize());
+      Assert.assertEquals(337, grouper.getBuckets());
+      Assert.assertEquals(168, grouper.getMaxSize());
+    }
+
     Assert.assertEquals(100, grouper.getLimit());
 
     final List<Grouper.Entry<Integer>> expected = Lists.newArrayList();
@@ -110,7 +141,7 @@ public void testMinBufferSize()
     final int limit = 100;
     final int keyBase = 100000;
     final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory();
-    final LimitedBufferHashGrouper<Integer> grouper = makeGrouper(columnSelectorFactory, 11716, 2, limit);
+    final LimitedBufferHashGrouper<Integer> grouper = makeGrouper(columnSelectorFactory, 12120, 2, limit);
     final int numRows = 1000;
 
     columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L)));
@@ -119,10 +150,17 @@ public void testMinBufferSize()
     }
 
     // With minimum buffer size, after the first swap, every new key added will result in a swap
-    Assert.assertEquals(899, grouper.getGrowthCount());
-    Assert.assertEquals(101, grouper.getSize());
-    Assert.assertEquals(202, grouper.getBuckets());
-    Assert.assertEquals(101, grouper.getMaxSize());
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(224, grouper.getGrowthCount());
+      Assert.assertEquals(104, grouper.getSize());
+      Assert.assertEquals(209, grouper.getBuckets());
+      Assert.assertEquals(104, grouper.getMaxSize());
+    } else {
+      Assert.assertEquals(899, grouper.getGrowthCount());
+      Assert.assertEquals(101, grouper.getSize());
+      Assert.assertEquals(202, grouper.getBuckets());
+      Assert.assertEquals(101, grouper.getMaxSize());
+    }
     Assert.assertEquals(100, grouper.getLimit());
 
     // Aggregate slightly different row
@@ -132,11 +170,17 @@ public void testMinBufferSize()
     for (int i = 0; i < numRows; i++) {
       Assert.assertTrue(String.valueOf(i), grouper.aggregate(i).isOk());
     }
-
-    Assert.assertEquals(1899, grouper.getGrowthCount());
-    Assert.assertEquals(101, grouper.getSize());
-    Assert.assertEquals(202, grouper.getBuckets());
-    Assert.assertEquals(101, grouper.getMaxSize());
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(474, grouper.getGrowthCount());
+      Assert.assertEquals(104, grouper.getSize());
+      Assert.assertEquals(209, grouper.getBuckets());
+      Assert.assertEquals(104, grouper.getMaxSize());
+    } else {
+      Assert.assertEquals(1899, grouper.getGrowthCount());
+      Assert.assertEquals(101, grouper.getSize());
+      Assert.assertEquals(202, grouper.getBuckets());
+      Assert.assertEquals(101, grouper.getMaxSize());
+    }
     Assert.assertEquals(100, grouper.getLimit());
 
     final List<Grouper.Entry<Integer>> expected = Lists.newArrayList();
diff --git a/processing/src/test/java/io/druid/query/groupby/epinephelinae/StreamingMergeSortedGrouperTest.java b/processing/src/test/java/io/druid/query/groupby/epinephelinae/StreamingMergeSortedGrouperTest.java
index 07cc2a0f720..2a053051aae 100644
--- a/processing/src/test/java/io/druid/query/groupby/epinephelinae/StreamingMergeSortedGrouperTest.java
+++ b/processing/src/test/java/io/druid/query/groupby/epinephelinae/StreamingMergeSortedGrouperTest.java
@@ -25,6 +25,7 @@
 import com.google.common.collect.Lists;
 import com.google.common.collect.Ordering;
 import com.google.common.primitives.Ints;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.MapBasedRow;
 import io.druid.java.util.common.concurrent.Execs;
 import io.druid.query.aggregation.AggregatorFactory;
@@ -100,7 +101,7 @@ public void testStreamingAggregateWithLargeBuffer() throws ExecutionException, I
   @Test(timeout = 60_000L)
   public void testStreamingAggregateWithMinimumBuffer() throws ExecutionException, InterruptedException
   {
-    testStreamingAggregate(60);
+    testStreamingAggregate(83);
   }
 
   private void testStreamingAggregate(int bufferSize) throws ExecutionException, InterruptedException
@@ -128,7 +129,10 @@ private void testStreamingAggregate(int bufferSize) throws ExecutionException, I
       });
 
       final List<Entry<Integer>> unsortedEntries = Lists.newArrayList(grouper.iterator(true));
-      final List<Entry<Integer>> actual = Ordering.from((Comparator<Entry<Integer>>) (o1, o2) -> Ints.compare(o1.getKey(), o2.getKey()))
+      final List<Entry<Integer>> actual = Ordering.from((Comparator<Entry<Integer>>) (o1, o2) -> Ints.compare(
+          o1.getKey(),
+          o2.getKey()
+      ))
                                                   .sortedCopy(unsortedEntries);
 
       if (!actual.equals(expected)) {
@@ -145,7 +149,11 @@ private void testStreamingAggregate(int bufferSize) throws ExecutionException, I
   public void testNotEnoughBuffer()
   {
     expectedException.expect(IllegalStateException.class);
-    expectedException.expectMessage("Buffer[50] should be large enough to store at least three records[20]");
+    if (NullHandling.replaceWithDefault()) {
+      expectedException.expectMessage("Buffer[50] should be large enough to store at least three records[20]");
+    } else {
+      expectedException.expectMessage("Buffer[50] should be large enough to store at least three records[21]");
+    }
 
     newGrouper(GrouperTestUtil.newColumnSelectorFactory(), 50);
   }
@@ -157,7 +165,7 @@ public void testTimeout()
     expectedException.expectCause(CoreMatchers.instanceOf(TimeoutException.class));
 
     final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory();
-    final StreamingMergeSortedGrouper<Integer> grouper = newGrouper(columnSelectorFactory, 60);
+    final StreamingMergeSortedGrouper<Integer> grouper = newGrouper(columnSelectorFactory, 100);
 
     columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L)));
     grouper.aggregate(6);
diff --git a/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnExpectationsTest.java b/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnExpectationsTest.java
index 326e59943d1..47b80324ae7 100644
--- a/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnExpectationsTest.java
+++ b/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnExpectationsTest.java
@@ -20,6 +20,7 @@
 package io.druid.query.lookup;
 
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.query.extraction.MapLookupExtractor;
 import org.junit.Assert;
 import org.junit.Test;
@@ -65,7 +66,11 @@ public void testNullKeyIsMappable()
         false,
         false
     );
-    Assert.assertEquals("bar", lookupExtractionFn.apply(null));
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals("bar", lookupExtractionFn.apply(null));
+    } else {
+      Assert.assertEquals("REPLACE", lookupExtractionFn.apply(null));
+    }
   }
 
   @Test
diff --git a/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnTest.java b/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnTest.java
index 81c47499518..dd8b7ca581d 100644
--- a/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnTest.java
+++ b/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnTest.java
@@ -20,13 +20,13 @@
 package io.druid.query.lookup;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
+import io.druid.common.config.NullHandling;
 import io.druid.jackson.DefaultObjectMapper;
 import io.druid.java.util.common.IAE;
 import io.druid.query.extraction.MapLookupExtractor;
@@ -66,7 +66,7 @@
 
   public LookupExtractionFnTest(boolean retainMissing, String replaceMissing, Optional<Boolean> injective)
   {
-    this.replaceMissing = Strings.emptyToNull(replaceMissing);
+    this.replaceMissing = NullHandling.emptyToNullIfNeeded(replaceMissing);
     this.retainMissing = retainMissing;
     this.injective = injective.orElse(null);
   }
@@ -74,7 +74,7 @@ public LookupExtractionFnTest(boolean retainMissing, String replaceMissing, Opti
   @Test
   public void testEqualsAndHash()
   {
-    if (retainMissing && !Strings.isNullOrEmpty(replaceMissing)) {
+    if (retainMissing && !NullHandling.isNullOrEquivalent(replaceMissing)) {
       // skip
       return;
     }
@@ -111,7 +111,7 @@ public void testEqualsAndHash()
   @Test
   public void testSimpleSerDe() throws IOException
   {
-    if (retainMissing && !Strings.isNullOrEmpty(replaceMissing)) {
+    if (retainMissing && !NullHandling.isNullOrEquivalent(replaceMissing)) {
       // skip
       return;
     }
@@ -146,12 +146,12 @@ public void testSimpleSerDe() throws IOException
   @Test(expected = IllegalArgumentException.class)
   public void testIllegalArgs()
   {
-    if (retainMissing && !Strings.isNullOrEmpty(replaceMissing)) {
+    if (retainMissing && !NullHandling.isNullOrEquivalent(replaceMissing)) {
       @SuppressWarnings("unused") // expected exception
       final LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(
           new MapLookupExtractor(ImmutableMap.of("foo", "bar"), false),
           retainMissing,
-          Strings.emptyToNull(replaceMissing),
+          NullHandling.emptyToNullIfNeeded(replaceMissing),
           injective,
           false
       );
@@ -163,7 +163,7 @@ public void testIllegalArgs()
   @Test
   public void testCacheKey()
   {
-    if (retainMissing && !Strings.isNullOrEmpty(replaceMissing)) {
+    if (retainMissing && !NullHandling.isNullOrEquivalent(replaceMissing)) {
       // skip
       return;
     }
@@ -178,7 +178,7 @@ public void testCacheKey()
         false
     );
 
-    if (Strings.isNullOrEmpty(replaceMissing) || retainMissing) {
+    if (NullHandling.isNullOrEquivalent(replaceMissing) || retainMissing) {
       Assert.assertFalse(
           Arrays.equals(
               lookupExtractionFn.getCacheKey(),
diff --git a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java
index 1903957c381..0925ee6ae54 100644
--- a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java
+++ b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Suppliers;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.MapBasedInputRow;
 import io.druid.java.util.common.DateTimes;
 import io.druid.java.util.common.Intervals;
@@ -752,7 +753,7 @@ public void testSearchWithNullValueInDimension() throws Exception
     QueryRunner runner = factory.createRunner(new QueryableIndexSegment("asdf", TestIndex.persistRealtimeAndLoadMMapped(index)));
     List<SearchHit> expectedHits = Lists.newLinkedList();
     expectedHits.add(new SearchHit("table", "table", 1));
-    expectedHits.add(new SearchHit("table", "", 1));
+    expectedHits.add(new SearchHit("table", NullHandling.defaultStringValue(), 1));
     checkSearchQuery(searchQuery, runner, expectedHits);
   }
 
diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java
index 9f748cb75cd..25251a0aa15 100644
--- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java
+++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java
@@ -23,7 +23,9 @@
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
 import com.google.common.primitives.Doubles;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.DateTimes;
 import io.druid.java.util.common.Intervals;
 import io.druid.java.util.common.StringUtils;
@@ -143,16 +145,15 @@ public void testEmptyTimeseries()
                                   )
                                   .descending(descending)
                                   .build();
-
+    Map<String, Object> resultMap = Maps.newHashMap();
+    resultMap.put("rows", 0L);
+    resultMap.put("index", NullHandling.defaultDoubleValue());
+    resultMap.put("first", NullHandling.defaultDoubleValue());
     List<Result<TimeseriesResultValue>> expectedResults = ImmutableList.of(
         new Result<>(
             DateTimes.of("2020-04-02"),
             new TimeseriesResultValue(
-                ImmutableMap.of(
-                    "rows", 0L,
-                    "index", 0D,
-                    "first", 0D
-                )
+                resultMap
             )
         )
     );
@@ -204,24 +205,61 @@ public void testFullOnTimeseries()
           QueryRunnerTestHelper.skippedDay.equals(current) ? 0L : 13L,
           value.getLongMetric("rows").longValue()
       );
-      Assert.assertEquals(
-          result.toString(),
-          Doubles.tryParse(expectedIndex[count]).doubleValue(),
-          value.getDoubleMetric("index").doubleValue(),
-          value.getDoubleMetric("index").doubleValue() * 1e-6
-      );
-      Assert.assertEquals(
-          result.toString(),
-          new Double(expectedIndex[count]) +
-          (QueryRunnerTestHelper.skippedDay.equals(current) ? 0L : 13L) + 1L,
-          value.getDoubleMetric("addRowsIndexConstant"),
-          value.getDoubleMetric("addRowsIndexConstant") * 1e-6
-      );
-      Assert.assertEquals(
-          value.getDoubleMetric("uniques"),
-          QueryRunnerTestHelper.skippedDay.equals(current) ? 0.0d : 9.0d,
-          0.02
-      );
+
+      if (!QueryRunnerTestHelper.skippedDay.equals(current)) {
+        Assert.assertEquals(
+            result.toString(),
+            Doubles.tryParse(expectedIndex[count]).doubleValue(),
+            value.getDoubleMetric("index").doubleValue(),
+            value.getDoubleMetric("index").doubleValue() * 1e-6
+        );
+        Assert.assertEquals(
+            result.toString(),
+            new Double(expectedIndex[count]) +
+            13L + 1L,
+            value.getDoubleMetric("addRowsIndexConstant"),
+            value.getDoubleMetric("addRowsIndexConstant") * 1e-6
+        );
+        Assert.assertEquals(
+            value.getDoubleMetric("uniques"),
+            9.0d,
+            0.02
+        );
+      } else {
+        if (NullHandling.replaceWithDefault()) {
+          Assert.assertEquals(
+              result.toString(),
+              0.0D,
+              value.getDoubleMetric("index").doubleValue(),
+              value.getDoubleMetric("index").doubleValue() * 1e-6
+          );
+          Assert.assertEquals(
+              result.toString(),
+              new Double(expectedIndex[count]) + 1L,
+              value.getDoubleMetric("addRowsIndexConstant"),
+              value.getDoubleMetric("addRowsIndexConstant") * 1e-6
+          );
+          Assert.assertEquals(
+              0.0D,
+              value.getDoubleMetric("uniques"),
+              0.02
+          );
+        } else {
+          Assert.assertNull(
+              result.toString(),
+              value.getDoubleMetric("index")
+          );
+          Assert.assertNull(
+              result.toString(),
+              value.getDoubleMetric("addRowsIndexConstant")
+          );
+          Assert.assertEquals(
+              value.getDoubleMetric("uniques"),
+              0.0d,
+              0.02
+          );
+        }
+      }
 
       lastResult = result;
       ++count;
@@ -507,13 +545,13 @@ public void testTimeseriesIntervalOutOfRanges()
         new Result<>(
             QueryRunnerTestHelper.emptyInterval.getIntervals().get(0).getStart(),
             new TimeseriesResultValue(
-                ImmutableMap.of(
+                TestHelper.createExpectedMap(
                     "rows",
                     0L,
                     "index",
-                    0L,
+                    NullHandling.defaultLongValue(),
                     QueryRunnerTestHelper.addRowsIndexConstantMetric,
-                    1.0
+                    NullHandling.sqlCompatible() ? null : 1.0
                 )
             )
         )
@@ -770,14 +808,15 @@ public void testTimeseriesQueryZeroFilling()
     final Iterable<Interval> iterable = Granularities.HOUR.getIterable(
         new Interval(DateTimes.of("2011-04-14T01"), DateTimes.of("2011-04-15"))
     );
+    Map noRowsResult = Maps.newHashMap();
+    noRowsResult.put("rows", 0L);
+    noRowsResult.put("idx", NullHandling.defaultLongValue());
     for (Interval interval : iterable) {
       lotsOfZeroes.add(
-              new Result<>(
-                      interval.getStart(),
-                      new TimeseriesResultValue(
-                              ImmutableMap.of("rows", 0L, "idx", 0L)
-                      )
-              )
+          new Result<>(
+              interval.getStart(),
+              new TimeseriesResultValue(noRowsResult)
+          )
       );
     }
 
@@ -1482,27 +1521,23 @@ public void testTimeseriesWithFilterOnNonExistentDimension()
                                   .descending(descending)
                                   .build();
 
+    Map<String, Object> resultMap = Maps.newHashMap();
+    resultMap.put("rows", 0L);
+    resultMap.put("index", NullHandling.defaultDoubleValue());
+    resultMap.put("addRowsIndexConstant", NullHandling.replaceWithDefault() ? 1.0 : null);
+    resultMap.put("uniques", 0.0);
+
     List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(
         new Result<>(
             DateTimes.of("2011-04-01"),
             new TimeseriesResultValue(
-                ImmutableMap.of(
-                    "rows", 0L,
-                    "index", 0.0,
-                    "addRowsIndexConstant", 1.0,
-                    "uniques", 0.0
-                )
+                resultMap
             )
         ),
         new Result<>(
             DateTimes.of("2011-04-02"),
             new TimeseriesResultValue(
-                ImmutableMap.of(
-                    "rows", 0L,
-                    "index", 0.0,
-                    "addRowsIndexConstant", 1.0,
-                    "uniques", 0.0
-                )
+                resultMap
             )
         )
     );
@@ -1627,28 +1662,23 @@ public void testTimeseriesWithNonExistentFilter()
                                   .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant)
                                   .descending(descending)
                                   .build();
+    Map<String, Object> resultMap = Maps.newHashMap();
+    resultMap.put("rows", 0L);
+    resultMap.put("index", NullHandling.defaultDoubleValue());
+    resultMap.put("addRowsIndexConstant", NullHandling.replaceWithDefault() ? 1.0 : null);
+    resultMap.put("uniques", 0.0);
 
     List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(
         new Result<>(
             DateTimes.of("2011-04-01"),
             new TimeseriesResultValue(
-                ImmutableMap.of(
-                    "rows", 0L,
-                    "index", 0.0,
-                    "addRowsIndexConstant", 1.0,
-                    "uniques", 0.0
-                )
+                resultMap
             )
         ),
         new Result<>(
             DateTimes.of("2011-04-02"),
             new TimeseriesResultValue(
-                ImmutableMap.of(
-                    "rows", 0L,
-                    "index", 0.0,
-                    "addRowsIndexConstant", 1.0,
-                    "uniques", 0.0
-                )
+                resultMap
             )
         )
     );
@@ -1673,28 +1703,23 @@ public void testTimeseriesWithNonExistentFilterAndMultiDim()
                                   .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant)
                                   .descending(descending)
                                   .build();
+    Map<String, Object> resultMap = Maps.newHashMap();
+    resultMap.put("rows", 0L);
+    resultMap.put("index", NullHandling.defaultDoubleValue());
+    resultMap.put("addRowsIndexConstant", NullHandling.replaceWithDefault() ? 1.0 : null);
+    resultMap.put("uniques", 0.0);
 
     List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(
         new Result<>(
             DateTimes.of("2011-04-01"),
             new TimeseriesResultValue(
-                ImmutableMap.of(
-                    "rows", 0L,
-                    "index", 0.0,
-                    "addRowsIndexConstant", 1.0,
-                    "uniques", 0.0
-                )
+                resultMap
             )
         ),
         new Result<>(
             DateTimes.of("2011-04-02"),
             new TimeseriesResultValue(
-                ImmutableMap.of(
-                    "rows", 0L,
-                    "index", 0.0,
-                    "addRowsIndexConstant", 1.0,
-                    "uniques", 0.0
-                )
+                resultMap
             )
         )
     );
diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java
index 547edc48c71..2f7aee6ac50 100644
--- a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java
+++ b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java
@@ -29,6 +29,7 @@
 import com.google.common.primitives.Doubles;
 import com.google.common.primitives.Longs;
 import io.druid.collections.StupidPool;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.DateTimes;
 import io.druid.java.util.common.IAE;
 import io.druid.java.util.common.ISE;
@@ -4206,10 +4207,15 @@ public void testTopNWithExtractionFilter()
   public void testTopNWithExtractionFilterAndFilteredAggregatorCaseNoExistingValue()
   {
     Map<String, String> extractionMap = new HashMap<>();
-    extractionMap.put("", "NULL");
 
     MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false);
-    LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, false);
+    LookupExtractionFn lookupExtractionFn;
+    if (NullHandling.replaceWithDefault()) {
+      lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, false);
+      extractionMap.put("", "NULL");
+    } else {
+      lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, "NULL", true, false);
+    }
     DimFilter extractionFilter = new ExtractionDimFilter("null_column", "NULL", lookupExtractionFn, null);
     TopNQueryBuilder topNQueryBuilder = new TopNQueryBuilder()
         .dataSource(QueryRunnerTestHelper.dataSource)
@@ -4279,10 +4285,16 @@ public void testTopNWithExtractionFilterAndFilteredAggregatorCaseNoExistingValue
   public void testTopNWithExtractionFilterNoExistingValue()
   {
     Map<String, String> extractionMap = new HashMap<>();
-    extractionMap.put("", "NULL");
 
     MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false);
-    LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, true);
+    LookupExtractionFn lookupExtractionFn;
+    if (NullHandling.replaceWithDefault()) {
+      lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, true);
+      extractionMap.put("", "NULL");
+    } else {
+      extractionMap.put("", "NOT_USED");
+      lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, "NULL", true, true);
+    }
     DimFilter extractionFilter = new ExtractionDimFilter("null_column", "NULL", lookupExtractionFn, null);
     TopNQueryBuilder topNQueryBuilder = new TopNQueryBuilder()
         .dataSource(QueryRunnerTestHelper.dataSource)
diff --git a/processing/src/test/java/io/druid/segment/ConstantDimensionSelectorTest.java b/processing/src/test/java/io/druid/segment/ConstantDimensionSelectorTest.java
index 6ca13b25aaa..9ba0eff774b 100644
--- a/processing/src/test/java/io/druid/segment/ConstantDimensionSelectorTest.java
+++ b/processing/src/test/java/io/druid/segment/ConstantDimensionSelectorTest.java
@@ -19,6 +19,7 @@
 
 package io.druid.segment;
 
+import io.druid.common.config.NullHandling;
 import io.druid.query.extraction.StringFormatExtractionFn;
 import io.druid.query.extraction.SubstringDimExtractionFn;
 import io.druid.segment.data.IndexedInts;
@@ -68,7 +69,7 @@ public void testLookupName()
   public void testLookupId()
   {
     Assert.assertEquals(0, NULL_SELECTOR.idLookup().lookupId(null));
-    Assert.assertEquals(0, NULL_SELECTOR.idLookup().lookupId(""));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? 0 : -1, NULL_SELECTOR.idLookup().lookupId(""));
     Assert.assertEquals(-1, NULL_SELECTOR.idLookup().lookupId("billy"));
     Assert.assertEquals(-1, NULL_SELECTOR.idLookup().lookupId("bob"));
 
diff --git a/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java b/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java
index 94ef6204f2e..046cd15621a 100644
--- a/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java
+++ b/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java
@@ -27,6 +27,7 @@
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
 import io.druid.collections.bitmap.RoaringBitmapFactory;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.MapBasedInputRow;
 import io.druid.data.input.impl.DimensionSchema;
@@ -221,11 +222,11 @@ public void testPersistWithDifferentDims() throws Exception
     Assert.assertEquals(ImmutableList.of("1", "2"), rowList.get(0).dimensionValues());
     Assert.assertEquals(Arrays.asList("3", null), rowList.get(1).dimensionValues());
 
-    checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("dim1", ""));
+    checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("dim1", null));
     checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("dim1", "1"));
     checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("dim1", "3"));
 
-    checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("dim2", ""));
+    checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("dim2", null));
     checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("dim2", "2"));
   }
 
@@ -804,17 +805,17 @@ public void testNonLexicographicDimOrderMerge() throws Exception
     Assert.assertEquals(Arrays.asList("50000", "200", "3000"), rowList.get(2).dimensionValues());
     Assert.assertEquals(Collections.singletonList(3L), rowList.get(2).metricValues());
 
-    checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d3", ""));
+    checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d3", null));
     checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("d3", "30000"));
     checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("d3", "40000"));
     checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("d3", "50000"));
 
-    checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d1", ""));
+    checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d1", null));
     checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("d1", "100"));
     checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("d1", "200"));
     checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("d1", "300"));
 
-    checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d2", ""));
+    checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d2", null));
     checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("d2", "2000"));
     checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("d2", "3000"));
     checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("d2", "4000"));
@@ -904,13 +905,13 @@ public void testMergeWithDimensionsList() throws Exception
     Assert.assertEquals(useBitmapIndexes, adapter.getCapabilities("dimC").hasBitmapIndexes());
 
     if (useBitmapIndexes) {
-      checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("dimA", ""));
+      checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("dimA", null));
       checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("dimA", "1"));
       checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("dimA", "2"));
 
-      checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("dimB", ""));
+      checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("dimB", null));
 
-      checkBitmapIndex(Arrays.asList(2, 3), adapter.getBitmapIndex("dimC", ""));
+      checkBitmapIndex(Arrays.asList(2, 3), adapter.getBitmapIndex("dimC", null));
       checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("dimC", "1"));
       checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("dimC", "2"));
     }
@@ -977,7 +978,7 @@ public void testDisjointDimMerge() throws Exception
 
       // dimA always has bitmap indexes, since it has them in indexA (it comes in through discovery).
       Assert.assertTrue(adapter.getCapabilities("dimA").hasBitmapIndexes());
-      checkBitmapIndex(Arrays.asList(0, 1, 2), adapter.getBitmapIndex("dimA", ""));
+      checkBitmapIndex(Arrays.asList(0, 1, 2), adapter.getBitmapIndex("dimA", null));
       checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("dimA", "1"));
       checkBitmapIndex(Collections.singletonList(4), adapter.getBitmapIndex("dimA", "2"));
 
@@ -989,7 +990,7 @@ public void testDisjointDimMerge() throws Exception
       }
       //noinspection ObjectEquality
       if (toPersistB != toPersistB2 || useBitmapIndexes) {
-        checkBitmapIndex(Arrays.asList(3, 4), adapter.getBitmapIndex("dimB", ""));
+        checkBitmapIndex(Arrays.asList(3, 4), adapter.getBitmapIndex("dimB", null));
         checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("dimB", "1"));
         checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("dimB", "2"));
         checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("dimB", "3"));
@@ -1086,52 +1087,82 @@ public void testJointDimMerge() throws Exception
       final QueryableIndexIndexableAdapter adapter = new QueryableIndexIndexableAdapter(merged);
       final List<DebugRow> rowList = RowIteratorHelper.toList(adapter.getRows());
 
-      Assert.assertEquals(
-          ImmutableList.of("d2", "d3", "d5", "d6", "d7", "d8", "d9"),
-          ImmutableList.copyOf(adapter.getDimensionNames())
-      );
+      if (NullHandling.replaceWithDefault()) {
+        Assert.assertEquals(
+            ImmutableList.of("d2", "d3", "d5", "d6", "d7", "d8", "d9"),
+            ImmutableList.copyOf(adapter.getDimensionNames())
+        );
+      } else {
+        Assert.assertEquals(
+            ImmutableList.of("d1", "d2", "d3", "d5", "d6", "d7", "d8", "d9"),
+            ImmutableList.copyOf(adapter.getDimensionNames())
+        );
+      }
       Assert.assertEquals(4, rowList.size());
-      Assert.assertEquals(
-          Arrays.asList(null, "310", null, null, null, null, "910"),
-          rowList.get(0).dimensionValues()
-      );
-      Assert.assertEquals(
-          Arrays.asList("210", "311", null, null, "710", "810", "911"),
-          rowList.get(1).dimensionValues()
-      );
-      Assert.assertEquals(
-          Arrays.asList(null, null, "520", "620", "720", "820", "920"),
-          rowList.get(2).dimensionValues()
-      );
-      Assert.assertEquals(
-          Arrays.asList(null, null, null, "621", null, "821", "921"),
-          rowList.get(3).dimensionValues()
-      );
+      if (NullHandling.replaceWithDefault()) {
+        Assert.assertEquals(
+            Arrays.asList(null, "310", null, null, null, null, "910"),
+            rowList.get(0).dimensionValues()
+        );
+        Assert.assertEquals(
+            Arrays.asList("210", "311", null, null, "710", "810", "911"),
+            rowList.get(1).dimensionValues()
+        );
+        Assert.assertEquals(
+            Arrays.asList(null, null, "520", "620", "720", "820", "920"),
+            rowList.get(2).dimensionValues()
+        );
+        Assert.assertEquals(
+            Arrays.asList(null, null, null, "621", null, "821", "921"),
+            rowList.get(3).dimensionValues()
+        );
+
+        checkBitmapIndex(Arrays.asList(0, 2, 3), adapter.getBitmapIndex("d2", null));
+        checkBitmapIndex(Arrays.asList(0, 1, 3), adapter.getBitmapIndex("d5", null));
+        checkBitmapIndex(Arrays.asList(0, 3), adapter.getBitmapIndex("d7", null));
+      } else {
+        Assert.assertEquals(
+            Arrays.asList("", "", "310", null, null, "", null, "910"),
+            rowList.get(0).dimensionValues()
+        );
+        Assert.assertEquals(
+            Arrays.asList(null, "210", "311", null, null, "710", "810", "911"),
+            rowList.get(1).dimensionValues()
+        );
+        Assert.assertEquals(
+            Arrays.asList(null, null, null, "520", "620", "720", "820", "920"),
+            rowList.get(2).dimensionValues()
+        );
+        Assert.assertEquals(
+            Arrays.asList(null, null, null, "", "621", "", "821", "921"),
+            rowList.get(3).dimensionValues()
+        );
+        checkBitmapIndex(Arrays.asList(2, 3), adapter.getBitmapIndex("d2", null));
+        checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("d5", null));
+        checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d7", null));
+      }
 
-      checkBitmapIndex(Arrays.asList(0, 2, 3), adapter.getBitmapIndex("d2", ""));
       checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("d2", "210"));
 
-      checkBitmapIndex(Arrays.asList(2, 3), adapter.getBitmapIndex("d3", ""));
+      checkBitmapIndex(Arrays.asList(2, 3), adapter.getBitmapIndex("d3", null));
       checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("d3", "310"));
       checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("d3", "311"));
 
-      checkBitmapIndex(Arrays.asList(0, 1, 3), adapter.getBitmapIndex("d5", ""));
       checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("d5", "520"));
 
-      checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("d6", ""));
+      checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("d6", null));
       checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("d6", "620"));
       checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("d6", "621"));
 
-      checkBitmapIndex(Arrays.asList(0, 3), adapter.getBitmapIndex("d7", ""));
       checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("d7", "710"));
       checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("d7", "720"));
 
-      checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("d8", ""));
+      checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("d8", null));
       checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("d8", "810"));
       checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("d8", "820"));
       checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("d8", "821"));
 
-      checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d9", ""));
+      checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d9", null));
       checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("d9", "910"));
       checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("d9", "911"));
       checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("d9", "920"));
@@ -1229,26 +1260,44 @@ public void testNoRollupMergeWithDuplicateRow() throws Exception
     final QueryableIndexIndexableAdapter adapter = new QueryableIndexIndexableAdapter(merged);
     final List<DebugRow> rowList = RowIteratorHelper.toList(adapter.getRows());
 
-    Assert.assertEquals(
-        ImmutableList.of("d3", "d6", "d8", "d9"),
-        ImmutableList.copyOf(adapter.getDimensionNames())
-    );
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(
+          ImmutableList.of("d3", "d6", "d8", "d9"),
+          ImmutableList.copyOf(adapter.getDimensionNames())
+      );
+    } else {
+      Assert.assertEquals(
+          ImmutableList.of("d1", "d2", "d3", "d5", "d6", "d7", "d8", "d9"),
+          ImmutableList.copyOf(adapter.getDimensionNames())
+      );
+    }
+
     Assert.assertEquals(4, rowList.size());
-    Assert.assertEquals(Arrays.asList("310", null, null, "910"), rowList.get(0).dimensionValues());
-    Assert.assertEquals(Arrays.asList("310", null, null, "910"), rowList.get(1).dimensionValues());
-    Assert.assertEquals(Arrays.asList("310", null, null, "910"), rowList.get(2).dimensionValues());
-    Assert.assertEquals(Arrays.asList(null, "621", "821", "921"), rowList.get(3).dimensionValues());
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(Arrays.asList("310", null, null, "910"), rowList.get(0).dimensionValues());
+      Assert.assertEquals(Arrays.asList("310", null, null, "910"), rowList.get(1).dimensionValues());
+      Assert.assertEquals(Arrays.asList("310", null, null, "910"), rowList.get(2).dimensionValues());
+      Assert.assertEquals(Arrays.asList(null, "621", "821", "921"), rowList.get(3).dimensionValues());
+    } else {
+      Assert.assertEquals(Arrays.asList("", "", "310", null, null, "", null, "910"), rowList.get(0).dimensionValues());
+      Assert.assertEquals(Arrays.asList("", "", "310", null, null, "", null, "910"), rowList.get(1).dimensionValues());
+      Assert.assertEquals(Arrays.asList("", "", "310", null, null, "", null, "910"), rowList.get(2).dimensionValues());
+      Assert.assertEquals(
+          Arrays.asList(null, null, null, "", "621", "", "821", "921"),
+          rowList.get(3).dimensionValues()
+      );
+    }
 
-    checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("d3", ""));
+    checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("d3", null));
     checkBitmapIndex(Arrays.asList(0, 1, 2), adapter.getBitmapIndex("d3", "310"));
 
-    checkBitmapIndex(Arrays.asList(0, 1, 2), adapter.getBitmapIndex("d6", ""));
+    checkBitmapIndex(Arrays.asList(0, 1, 2), adapter.getBitmapIndex("d6", null));
     checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("d6", "621"));
 
-    checkBitmapIndex(Arrays.asList(0, 1, 2), adapter.getBitmapIndex("d8", ""));
+    checkBitmapIndex(Arrays.asList(0, 1, 2), adapter.getBitmapIndex("d8", null));
     checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("d8", "821"));
 
-    checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d9", ""));
+    checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d9", null));
     checkBitmapIndex(Arrays.asList(0, 1, 2), adapter.getBitmapIndex("d9", "910"));
     checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("d9", "921"));
   }
@@ -1281,7 +1330,7 @@ public void testMergeWithSupersetOrdering() throws Exception
         new MapBasedInputRow(
             1,
             Arrays.asList("dimB", "dimA"),
-            ImmutableMap.of("dimB", "1", "dimA", "")
+            ImmutableMap.of("dimB", "1")
         )
     );
 
@@ -1289,7 +1338,7 @@ public void testMergeWithSupersetOrdering() throws Exception
         new MapBasedInputRow(
             1,
             Arrays.asList("dimB", "dimA"),
-            ImmutableMap.of("dimB", "", "dimA", "1")
+            ImmutableMap.of("dimA", "1")
         )
     );
 
@@ -1375,11 +1424,11 @@ public void testMergeWithSupersetOrdering() throws Exception
     Assert.assertEquals(Arrays.asList("3", null), rowList.get(4).dimensionValues());
     Assert.assertEquals(Collections.singletonList(2L), rowList.get(4).metricValues());
 
-    checkBitmapIndex(Arrays.asList(2, 3, 4), adapter.getBitmapIndex("dimA", ""));
+    checkBitmapIndex(Arrays.asList(2, 3, 4), adapter.getBitmapIndex("dimA", null));
     checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("dimA", "1"));
     checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("dimA", "2"));
 
-    checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("dimB", ""));
+    checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("dimB", null));
     checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("dimB", "1"));
     checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("dimB", "2"));
     checkBitmapIndex(Collections.singletonList(4), adapter.getBitmapIndex("dimB", "3"));
@@ -1417,16 +1466,16 @@ public void testMergeWithSupersetOrdering() throws Exception
     Assert.assertEquals(Arrays.asList("2", null, null), rowList2.get(11).dimensionValues());
     Assert.assertEquals(Collections.singletonList(2L), rowList2.get(11).metricValues());
 
-    checkBitmapIndex(Arrays.asList(0, 1, 2, 3, 4, 5, 8, 9, 10), adapter2.getBitmapIndex("dimA", ""));
+    checkBitmapIndex(Arrays.asList(0, 1, 2, 3, 4, 5, 8, 9, 10), adapter2.getBitmapIndex("dimA", null));
     checkBitmapIndex(Collections.singletonList(6), adapter2.getBitmapIndex("dimA", "1"));
     checkBitmapIndex(Arrays.asList(7, 11), adapter2.getBitmapIndex("dimA", "2"));
 
-    checkBitmapIndex(Arrays.asList(0, 1, 2, 6, 7, 11), adapter2.getBitmapIndex("dimB", ""));
+    checkBitmapIndex(Arrays.asList(0, 1, 2, 6, 7, 11), adapter2.getBitmapIndex("dimB", null));
     checkBitmapIndex(Arrays.asList(3, 8), adapter2.getBitmapIndex("dimB", "1"));
     checkBitmapIndex(Arrays.asList(4, 9), adapter2.getBitmapIndex("dimB", "2"));
     checkBitmapIndex(Arrays.asList(5, 10), adapter2.getBitmapIndex("dimB", "3"));
 
-    checkBitmapIndex(Arrays.asList(3, 4, 5, 6, 7, 8, 9, 10, 11), adapter2.getBitmapIndex("dimC", ""));
+    checkBitmapIndex(Arrays.asList(3, 4, 5, 6, 7, 8, 9, 10, 11), adapter2.getBitmapIndex("dimC", null));
     checkBitmapIndex(Collections.singletonList(0), adapter2.getBitmapIndex("dimC", "1"));
     checkBitmapIndex(Collections.singletonList(1), adapter2.getBitmapIndex("dimC", "2"));
     checkBitmapIndex(Collections.singletonList(2), adapter2.getBitmapIndex("dimC", "3"));
@@ -1747,7 +1796,14 @@ public void testMergeNumericDims() throws Exception
     Assert.assertEquals(ImmutableList.of("dimA", "dimB", "dimC"), ImmutableList.copyOf(adapter.getDimensionNames()));
     Assert.assertEquals(4, rowList.size());
 
-    Assert.assertEquals(Arrays.asList(0L, 0.0f, "Nully Row"), rowList.get(0).dimensionValues());
+    Assert.assertEquals(
+        Arrays.asList(
+            NullHandling.defaultLongValue(),
+            NullHandling.defaultFloatValue(),
+            "Nully Row"
+        ),
+        rowList.get(0).dimensionValues()
+    );
     Assert.assertEquals(Collections.singletonList(2L), rowList.get(0).metricValues());
 
     Assert.assertEquals(Arrays.asList(72L, 60000.789f, "World"), rowList.get(1).dimensionValues());
@@ -1829,13 +1885,13 @@ public void testPersistNullColumnSkipping() throws Exception
     index1.add(new MapBasedInputRow(
         1L,
         Arrays.asList("d1", "d2"),
-        ImmutableMap.of("d1", "a", "d2", "", "A", 1)
+        ImmutableMap.of("d1", "a", "A", 1)
     ));
 
     index1.add(new MapBasedInputRow(
         1L,
         Arrays.asList("d1", "d2"),
-        ImmutableMap.of("d1", "b", "d2", "", "A", 1)
+        ImmutableMap.of("d1", "b", "A", 1)
     ));
 
     final File tempDir = temporaryFolder.newFolder();
@@ -2033,7 +2089,7 @@ public void testMultiValueHandling() throws Exception
     Assert.assertEquals(useBitmapIndexes, adapter.getCapabilities("dim2").hasBitmapIndexes());
 
     if (useBitmapIndexes) {
-      checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("dim1", ""));
+      checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("dim1", null));
       checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("dim1", "a"));
       checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("dim1", "b"));
       checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("dim1", "x"));
@@ -2064,7 +2120,7 @@ public void testMultiValueHandling() throws Exception
     Assert.assertEquals(useBitmapIndexes, adapter.getCapabilities("dim2").hasBitmapIndexes());
 
     if (useBitmapIndexes) {
-      checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("dim1", ""));
+      checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("dim1", null));
       checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("dim1", "a"));
       checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("dim1", "b"));
       checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("dim1", "x"));
@@ -2099,7 +2155,7 @@ public void testMultiValueHandling() throws Exception
     Assert.assertEquals(useBitmapIndexes, adapter.getCapabilities("dim2").hasBitmapIndexes());
 
     if (useBitmapIndexes) {
-      checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("dim1", ""));
+      checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("dim1", null));
       checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("dim1", "a"));
       checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("dim1", "b"));
       checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("dim1", "x"));
diff --git a/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java b/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java
index 8f50947560b..95b1eaf6429 100644
--- a/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java
+++ b/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java
@@ -23,6 +23,7 @@
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.DateTimes;
 import io.druid.java.util.common.Intervals;
 import io.druid.java.util.common.Pair;
@@ -401,7 +402,7 @@ public void testNonIntersectingSchemas()
                     .put("addRowsIndexConstant", 103.0D)
                     .put("uniques", UNIQUES_1)
                     .put("maxIndex", 100.0D)
-                    .put("minIndex", 0.0D)
+                    .put("minIndex", NullHandling.replaceWithDefault() ? 0.0D : 100.0D)
                     .build()
             )
         )
@@ -748,7 +749,7 @@ public void testValueAndEmptySchemas()
                     .put("addRowsIndexConstant", 103.0D)
                     .put("uniques", UNIQUES_1)
                     .put("maxIndex", 100.0D)
-                    .put("minIndex", 0.0D)
+                    .put("minIndex", NullHandling.replaceWithDefault() ? 0.0D : 100.0D)
                     .build()
             )
         )
@@ -865,15 +866,14 @@ public void testEmptySchemas()
         new Result<>(
             DateTimes.of("2011-01-12T00:00:00.000Z"),
             new TimeseriesResultValue(
-                ImmutableMap.<String, Object>builder()
-                    .put("rows", 1L)
-                    .put("index", 0.0D)
-                    .put("addRowsIndexConstant", 2.0D)
-                    .put("uniques", 0.0D)
-                    .put("maxIndex", 0.0D)
-                    .put("minIndex", 0.0D)
-                    .build()
-            )
+                TestHelper.createExpectedMap(
+                    "rows", 1L,
+                    "index", NullHandling.replaceWithDefault() ? 0.0D : null,
+                    "addRowsIndexConstant", NullHandling.replaceWithDefault() ? 2.0D : null,
+                    "uniques", 0.0D,
+                    "maxIndex", NullHandling.replaceWithDefault() ? 0.0D : null,
+                    "minIndex", NullHandling.replaceWithDefault() ? 0.0D : null
+                ))
         )
     );
 
@@ -881,14 +881,14 @@ public void testEmptySchemas()
         new Result<>(
             DateTimes.of("2011-01-12T00:00:00.000Z"),
             new TimeseriesResultValue(
-                ImmutableMap.<String, Object>builder()
-                    .put("rows", 0L)
-                    .put("index", 0.0D)
-                    .put("addRowsIndexConstant", 1.0D)
-                    .put("uniques", 0.0D)
-                    .put("maxIndex", Double.NEGATIVE_INFINITY)
-                    .put("minIndex", Double.POSITIVE_INFINITY)
-                    .build()
+                TestHelper.createExpectedMap(
+                    "rows", 0L,
+                    "index", NullHandling.replaceWithDefault() ? 0.0D : null,
+                    "addRowsIndexConstant", NullHandling.replaceWithDefault() ? 1.0D : null,
+                    "uniques", 0.0D,
+                    "maxIndex", NullHandling.replaceWithDefault() ? Double.NEGATIVE_INFINITY : null,
+                    "minIndex", NullHandling.replaceWithDefault() ? Double.POSITIVE_INFINITY : null
+                )
             )
         )
     );
@@ -1182,12 +1182,12 @@ public void testDifferentMetrics()
             DateTimes.of("2011-01-12T00:00:00.000Z"),
             new TimeseriesResultValue(
                 ImmutableMap.<String, Object>builder()
-                    .put("rows", 10L)
+                    .put("rows", NullHandling.sqlCompatible() ? 11L : 10L)
                     .put("index", 900.0D)
-                    .put("addRowsIndexConstant", 911.0D)
+                    .put("addRowsIndexConstant", NullHandling.sqlCompatible() ? 912.0D : 911.0D)
                     .put("uniques", UNIQUES_1)
                     .put("maxIndex", 100.0D)
-                    .put("minIndex", 0.0D)
+                    .put("minIndex", NullHandling.replaceWithDefault() ? 0.0D : 100.0D)
                     .build()
             )
         )
diff --git a/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java b/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java
index 00fa23f2f23..09f92a42de5 100644
--- a/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java
+++ b/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java
@@ -22,6 +22,7 @@
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.DateTimes;
 import io.druid.java.util.common.Intervals;
 import io.druid.java.util.common.granularity.Granularities;
@@ -116,7 +117,8 @@
   public SchemalessTestSimpleTest(Segment segment, boolean coalesceAbsentAndEmptyDims)
   {
     this.segment = segment;
-    this.coalesceAbsentAndEmptyDims = coalesceAbsentAndEmptyDims;
+    // Empty and empty dims are equivalent only when replaceWithDefault is true
+    this.coalesceAbsentAndEmptyDims = coalesceAbsentAndEmptyDims && NullHandling.replaceWithDefault();
   }
 
   @Test
@@ -141,7 +143,7 @@ public void testFullOnTimeseries()
                                   .build();
 
     List<Result<TimeseriesResultValue>> expectedResults = Collections.singletonList(
-        new Result<TimeseriesResultValue>(
+        new Result(
             DateTimes.of("2011-01-12T00:00:00.000Z"),
             new TimeseriesResultValue(
                 ImmutableMap.<String, Object>builder()
@@ -150,13 +152,13 @@ public void testFullOnTimeseries()
                     .put("addRowsIndexConstant", coalesceAbsentAndEmptyDims ? 911.0 : 912.0)
                     .put("uniques", 2.000977198748901D)
                     .put("maxIndex", 100.0)
-                    .put("minIndex", 0.0)
+                    .put("minIndex", NullHandling.replaceWithDefault() ? 0.0 : 100.0)
                     .build()
             )
         )
     );
     QueryRunner runner = TestQueryRunners.makeTimeSeriesQueryRunner(segment);
-    HashMap<String, Object> context = new HashMap<String, Object>();
+    HashMap<String, Object> context = new HashMap();
     TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query), context));
   }
 
diff --git a/processing/src/test/java/io/druid/segment/TestHelper.java b/processing/src/test/java/io/druid/segment/TestHelper.java
index 761f99b36f5..b44c63523f2 100644
--- a/processing/src/test/java/io/druid/segment/TestHelper.java
+++ b/processing/src/test/java/io/druid/segment/TestHelper.java
@@ -21,7 +21,9 @@
 
 import com.fasterxml.jackson.databind.InjectableValues;
 import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
 import io.druid.data.input.MapBasedRow;
 import io.druid.data.input.Row;
 import io.druid.jackson.DefaultObjectMapper;
@@ -315,12 +317,16 @@ private static void assertRow(String msg, Row expected, Row actual)
       final Object actualValue = actualMap.get(key);
 
       if (expectedValue instanceof Float || expectedValue instanceof Double) {
-        Assert.assertEquals(
-            StringUtils.format("%s: key[%s]", msg, key),
-            ((Number) expectedValue).doubleValue(),
-            ((Number) actualValue).doubleValue(),
-            Math.abs(((Number) expectedValue).doubleValue() * 1e-6)
-        );
+        if (expectedValue == null) {
+          Assert.assertNull(actualValue);
+        } else {
+          Assert.assertEquals(
+              StringUtils.format("%s: key[%s]", msg, key),
+              ((Number) expectedValue).doubleValue(),
+              ((Number) actualValue).doubleValue(),
+              Math.abs(((Number) expectedValue).doubleValue() * 1e-6)
+          );
+        }
       } else {
         Assert.assertEquals(
             StringUtils.format("%s: key[%s]", msg, key),
@@ -330,4 +336,16 @@ private static void assertRow(String msg, Row expected, Row actual)
       }
     }
   }
+
+
+  public static Map<String, Object> createExpectedMap(Object... vals)
+  {
+    Preconditions.checkArgument(vals.length % 2 == 0);
+
+    Map<String, Object> theVals = Maps.newHashMap();
+    for (int i = 0; i < vals.length; i += 2) {
+      theVals.put(vals[i].toString(), vals[i + 1]);
+    }
+    return theVals;
+  }
 }
diff --git a/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java b/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java
index 129dfca1694..541886a373f 100644
--- a/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java
+++ b/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Function;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
 import io.druid.data.input.impl.InputRowParser;
@@ -88,6 +89,21 @@ public static void tearDown() throws Exception
 
   @Test
   public void testLexicographicMatchEverything()
+  {
+    final List<BoundDimFilter> filters = ImmutableList.of(
+        new BoundDimFilter("dim0", null, "z", false, false, false, null, StringComparators.LEXICOGRAPHIC),
+        new BoundDimFilter("dim1", null, "z", false, false, false, null, StringComparators.LEXICOGRAPHIC),
+        new BoundDimFilter("dim2", null, "z", false, false, false, null, StringComparators.LEXICOGRAPHIC),
+        new BoundDimFilter("dim3", null, "z", false, false, false, null, StringComparators.LEXICOGRAPHIC)
+    );
+
+    for (BoundDimFilter filter : filters) {
+      assertFilterMatches(filter, ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7"));
+    }
+  }
+
+  @Test
+  public void testLexicographicMatchWithEmptyString()
   {
     final List<BoundDimFilter> filters = ImmutableList.of(
         new BoundDimFilter("dim0", "", "z", false, false, false, null, StringComparators.LEXICOGRAPHIC),
@@ -95,9 +111,15 @@ public void testLexicographicMatchEverything()
         new BoundDimFilter("dim2", "", "z", false, false, false, null, StringComparators.LEXICOGRAPHIC),
         new BoundDimFilter("dim3", "", "z", false, false, false, null, StringComparators.LEXICOGRAPHIC)
     );
-
-    for (BoundDimFilter filter : filters) {
-      assertFilterMatches(filter, ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7"));
+    if (NullHandling.replaceWithDefault()) {
+      for (BoundDimFilter filter : filters) {
+        assertFilterMatches(filter, ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7"));
+      }
+    } else {
+      assertFilterMatches(filters.get(0), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7"));
+      assertFilterMatches(filters.get(1), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7"));
+      assertFilterMatches(filters.get(2), ImmutableList.of("0", "2", "3", "4", "6", "7"));
+      assertFilterMatches(filters.get(3), ImmutableList.of());
     }
   }
 
@@ -112,19 +134,49 @@ public void testLexicographicMatchNull()
         new BoundDimFilter("dim1", "", "", false, false, false, null, StringComparators.LEXICOGRAPHIC),
         ImmutableList.of("0")
     );
-    assertFilterMatches(
-        new BoundDimFilter("dim2", "", "", false, false, false, null, StringComparators.LEXICOGRAPHIC),
-        ImmutableList.of("1", "2", "5")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new BoundDimFilter("dim2", "", "", false, false, false, null, StringComparators.LEXICOGRAPHIC),
+          ImmutableList.of("1", "2", "5")
+      );
+    } else {
+      assertFilterMatches(
+          new BoundDimFilter("dim2", "", "", false, false, false, null, StringComparators.LEXICOGRAPHIC),
+          ImmutableList.of("2")
+      );
+    }
   }
 
   @Test
   public void testLexicographicMatchMissingColumn()
   {
-    assertFilterMatches(
-        new BoundDimFilter("dim3", "", "", false, false, false, null, StringComparators.LEXICOGRAPHIC),
-        ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new BoundDimFilter("dim3", "", "", false, false, false, null, StringComparators.LEXICOGRAPHIC),
+          ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
+      );
+      assertFilterMatches(
+          new BoundDimFilter("dim3", "", null, false, true, false, null, StringComparators.LEXICOGRAPHIC),
+          ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
+      );
+      assertFilterMatches(
+          new BoundDimFilter("dim3", null, "", false, true, false, null, StringComparators.LEXICOGRAPHIC),
+          ImmutableList.<String>of()
+      );
+    } else {
+      assertFilterMatches(
+          new BoundDimFilter("dim3", "", "", false, false, false, null, StringComparators.LEXICOGRAPHIC),
+          ImmutableList.of()
+      );
+      assertFilterMatches(
+          new BoundDimFilter("dim3", "", null, false, true, false, null, StringComparators.LEXICOGRAPHIC),
+          ImmutableList.of()
+      );
+      assertFilterMatches(
+          new BoundDimFilter("dim3", null, "", false, true, false, null, StringComparators.LEXICOGRAPHIC),
+          ImmutableList.<String>of("0", "1", "2", "3", "4", "5", "6", "7")
+      );
+    }
     assertFilterMatches(
         new BoundDimFilter("dim3", "", "", true, false, false, null, StringComparators.LEXICOGRAPHIC),
         ImmutableList.of()
@@ -133,18 +185,22 @@ public void testLexicographicMatchMissingColumn()
         new BoundDimFilter("dim3", "", "", false, true, false, null, StringComparators.LEXICOGRAPHIC),
         ImmutableList.of()
     );
-    assertFilterMatches(
-        new BoundDimFilter("dim3", "", null, false, true, false, null, StringComparators.LEXICOGRAPHIC),
-        ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
-    );
+
     assertFilterMatches(
         new BoundDimFilter("dim3", null, "", false, false, false, null, StringComparators.LEXICOGRAPHIC),
         ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
     );
-    assertFilterMatches(
-        new BoundDimFilter("dim3", null, "", false, true, false, null, StringComparators.LEXICOGRAPHIC),
-        ImmutableList.of()
-    );
+    if (NullHandling.sqlCompatible()) {
+      assertFilterMatches(
+          new BoundDimFilter("dim3", null, "", false, true, false, null, StringComparators.LEXICOGRAPHIC),
+          ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
+      );
+    } else {
+      assertFilterMatches(
+          new BoundDimFilter("dim3", null, "", false, true, false, null, StringComparators.LEXICOGRAPHIC),
+          ImmutableList.of()
+      );
+    }
   }
 
 
@@ -229,14 +285,25 @@ public void testAlphaNumericMatchNull()
         new BoundDimFilter("dim1", "", "", false, false, true, null, StringComparators.ALPHANUMERIC),
         ImmutableList.of("0")
     );
-    assertFilterMatches(
-        new BoundDimFilter("dim2", "", "", false, false, true, null, StringComparators.ALPHANUMERIC),
-        ImmutableList.of("1", "2", "5")
-    );
-    assertFilterMatches(
-        new BoundDimFilter("dim3", "", "", false, false, true, null, StringComparators.ALPHANUMERIC),
-        ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new BoundDimFilter("dim2", "", "", false, false, true, null, StringComparators.ALPHANUMERIC),
+          ImmutableList.of("1", "2", "5")
+      );
+      assertFilterMatches(
+          new BoundDimFilter("dim3", "", "", false, false, true, null, StringComparators.ALPHANUMERIC),
+          ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
+      );
+    } else {
+      assertFilterMatches(
+          new BoundDimFilter("dim2", "", "", false, false, true, null, StringComparators.ALPHANUMERIC),
+          ImmutableList.of("2")
+      );
+      assertFilterMatches(
+          new BoundDimFilter("dim3", "", "", false, false, true, null, StringComparators.ALPHANUMERIC),
+          ImmutableList.of()
+      );
+    }
   }
 
   @Test
@@ -327,14 +394,26 @@ public void testNumericMatchNull()
         new BoundDimFilter("dim1", "", "", false, false, false, null, StringComparators.NUMERIC),
         ImmutableList.of("0")
     );
-    assertFilterMatches(
-        new BoundDimFilter("dim2", "", "", false, false, false, null, StringComparators.NUMERIC),
-        ImmutableList.of("1", "2", "5")
-    );
-    assertFilterMatches(
-        new BoundDimFilter("dim3", "", "", false, false, false, null, StringComparators.NUMERIC),
-        ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new BoundDimFilter("dim2", "", "", false, false, false, null, StringComparators.NUMERIC),
+          ImmutableList.of("1", "2", "5")
+      );
+      assertFilterMatches(
+          new BoundDimFilter("dim3", "", "", false, false, false, null, StringComparators.NUMERIC),
+          ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
+      );
+    } else {
+      assertFilterMatches(
+          new BoundDimFilter("dim2", "", "", false, false, false, null, StringComparators.NUMERIC),
+          ImmutableList.of("2")
+      );
+      assertFilterMatches(
+          new BoundDimFilter("dim3", "", "", false, false, false, null, StringComparators.NUMERIC),
+          ImmutableList.of()
+      );
+    }
+
   }
 
   @Test
@@ -432,10 +511,17 @@ public void testMatchWithExtractionFn()
     String nullJsFn = "function(str) { return null; }";
     ExtractionFn makeNullFn = new JavaScriptExtractionFn(nullJsFn, false, JavaScriptConfig.getEnabledInstance());
 
-    assertFilterMatches(
-        new BoundDimFilter("dim0", "", "", false, false, false, makeNullFn, StringComparators.LEXICOGRAPHIC),
-        ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new BoundDimFilter("dim0", "", "", false, false, false, makeNullFn, StringComparators.LEXICOGRAPHIC),
+          ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
+      );
+    } else {
+      assertFilterMatches(
+          new BoundDimFilter("dim0", "", "", false, false, false, makeNullFn, StringComparators.LEXICOGRAPHIC),
+          ImmutableList.of()
+      );
+    }
 
     assertFilterMatches(
         new BoundDimFilter("dim1", "super-ab", "super-abd", true, true, false, superFn, StringComparators.LEXICOGRAPHIC),
@@ -452,10 +538,69 @@ public void testMatchWithExtractionFn()
         ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
     );
 
-    assertFilterMatches(
-        new BoundDimFilter("dim2", "super-null", "super-null", false, false, false, superFn, StringComparators.LEXICOGRAPHIC),
-        ImmutableList.of("1", "2", "5")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new BoundDimFilter(
+              "dim2",
+              "super-null",
+              "super-null",
+              false,
+              false,
+              false,
+              superFn,
+              StringComparators.LEXICOGRAPHIC
+          ),
+          ImmutableList.of("1", "2", "5")
+      );
+      assertFilterMatches(
+          new BoundDimFilter(
+              "dim2",
+              "super-null",
+              "super-null",
+              false,
+              false,
+              false,
+              superFn,
+              StringComparators.NUMERIC
+          ),
+          ImmutableList.of("1", "2", "5")
+      );
+    } else {
+      assertFilterMatches(
+          new BoundDimFilter(
+              "dim2",
+              "super-null",
+              "super-null",
+              false,
+              false,
+              false,
+              superFn,
+              StringComparators.LEXICOGRAPHIC
+          ),
+          ImmutableList.of("1", "5")
+      );
+      assertFilterMatches(
+          new BoundDimFilter("dim2", "super-", "super-", false, false, false, superFn, StringComparators.NUMERIC),
+          ImmutableList.of("2")
+      );
+      assertFilterMatches(
+          new BoundDimFilter(
+              "dim2",
+              "super-null",
+              "super-null",
+              false,
+              false,
+              false,
+              superFn,
+              StringComparators.LEXICOGRAPHIC
+          ),
+          ImmutableList.of("1", "5")
+      );
+      assertFilterMatches(
+          new BoundDimFilter("dim2", "super-", "super-", false, false, false, superFn, StringComparators.NUMERIC),
+          ImmutableList.of("2")
+      );
+    }
 
     assertFilterMatches(
         new BoundDimFilter("dim3", "super-null", "super-null", false, false, false, superFn, StringComparators.LEXICOGRAPHIC),
@@ -467,11 +612,6 @@ public void testMatchWithExtractionFn()
         ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
     );
 
-    assertFilterMatches(
-        new BoundDimFilter("dim2", "super-null", "super-null", false, false, false, superFn, StringComparators.NUMERIC),
-        ImmutableList.of("1", "2", "5")
-    );
-
     assertFilterMatches(
         new BoundDimFilter("dim4", "super-null", "super-null", false, false, false, superFn, StringComparators.NUMERIC),
         ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
diff --git a/processing/src/test/java/io/druid/segment/filter/ColumnComparisonFilterTest.java b/processing/src/test/java/io/druid/segment/filter/ColumnComparisonFilterTest.java
index ab5c9641664..4fbfcefec74 100644
--- a/processing/src/test/java/io/druid/segment/filter/ColumnComparisonFilterTest.java
+++ b/processing/src/test/java/io/druid/segment/filter/ColumnComparisonFilterTest.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Function;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
 import io.druid.data.input.impl.InputRowParser;
@@ -118,18 +119,41 @@ public void testColumnsWithoutNulls()
   @Test
   public void testMissingColumnNotSpecifiedInDimensionList()
   {
-    assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.of(
-        DefaultDimensionSpec.of("dim6"),
-        DefaultDimensionSpec.of("dim7")
-    )), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
-    assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.of(
-        DefaultDimensionSpec.of("dim1"),
-        DefaultDimensionSpec.of("dim6")
-    )), ImmutableList.of("0"));
-    assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.of(
-        DefaultDimensionSpec.of("dim2"),
-        DefaultDimensionSpec.of("dim6")
-    )), ImmutableList.of("1", "2", "6", "7", "8"));
+    assertFilterMatches(
+        new ColumnComparisonDimFilter(
+            ImmutableList.of(DefaultDimensionSpec.of("dim6"), DefaultDimensionSpec.of("dim7"))
+        ),
+        ImmutableList.<String>of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")
+    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new ColumnComparisonDimFilter(
+              ImmutableList.of(DefaultDimensionSpec.of("dim1"), DefaultDimensionSpec.of("dim6"))
+          ),
+          ImmutableList.<String>of("0")
+      );
+
+      assertFilterMatches(
+          new ColumnComparisonDimFilter(
+              ImmutableList.of(DefaultDimensionSpec.of("dim2"), DefaultDimensionSpec.of("dim6"))
+          ),
+          ImmutableList.<String>of("1", "2", "6", "7", "8")
+      );
+    } else {
+      assertFilterMatches(
+          new ColumnComparisonDimFilter(
+              ImmutableList.of(DefaultDimensionSpec.of("dim1"), DefaultDimensionSpec.of("dim6"))
+          ),
+          ImmutableList.<String>of()
+      );
+
+      assertFilterMatches(
+          new ColumnComparisonDimFilter(
+              ImmutableList.of(DefaultDimensionSpec.of("dim2"), DefaultDimensionSpec.of("dim6"))
+          ),
+          ImmutableList.<String>of("1", "6", "7", "8")
+      );
+    }
   }
 
   @Test
diff --git a/processing/src/test/java/io/druid/segment/filter/ExpressionFilterTest.java b/processing/src/test/java/io/druid/segment/filter/ExpressionFilterTest.java
index b45cc9147a3..a9730d8dc11 100644
--- a/processing/src/test/java/io/druid/segment/filter/ExpressionFilterTest.java
+++ b/processing/src/test/java/io/druid/segment/filter/ExpressionFilterTest.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Function;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import com.google.common.collect.Sets;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
@@ -122,8 +123,14 @@ public void testOneSingleValuedStringColumn()
     assertFilterMatches(EDF("dim3 == 1.0"), ImmutableList.of("3", "4", "6"));
     assertFilterMatches(EDF("dim3 == 1.234"), ImmutableList.of("9"));
     assertFilterMatches(EDF("dim3 < '2'"), ImmutableList.of("0", "1", "3", "4", "6", "9"));
-    assertFilterMatches(EDF("dim3 < 2"), ImmutableList.of("0", "3", "4", "6", "7", "9"));
-    assertFilterMatches(EDF("dim3 < 2.0"), ImmutableList.of("0", "3", "4", "6", "7", "9"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(EDF("dim3 < 2"), ImmutableList.of("0", "3", "4", "6", "7", "9"));
+      assertFilterMatches(EDF("dim3 < 2.0"), ImmutableList.of("0", "3", "4", "6", "7", "9"));
+    } else {
+      // Empty String and "a" will not match
+      assertFilterMatches(EDF("dim3 < 2"), ImmutableList.of("3", "4", "6", "9"));
+      assertFilterMatches(EDF("dim3 < 2.0"), ImmutableList.of("3", "4", "6", "9"));
+    }
     assertFilterMatches(EDF("like(dim3, '1%')"), ImmutableList.of("1", "3", "4", "6", "9"));
   }
 
@@ -132,8 +139,13 @@ public void testOneMultiValuedStringColumn()
   {
     // Expressions currently treat multi-valued arrays as nulls.
     // This test is just documenting the current behavior, not necessarily saying it makes sense.
-
-    assertFilterMatches(EDF("dim4 == ''"), ImmutableList.of("0", "1", "2", "4", "5", "6", "7", "8"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(EDF("dim4 == ''"), ImmutableList.of("0", "1", "2", "4", "5", "6", "7", "8"));
+    } else {
+      assertFilterMatches(EDF("dim4 == ''"), ImmutableList.of("2"));
+      // AS per SQL standard null == null returns false.
+      assertFilterMatches(EDF("dim4 == null"), ImmutableList.of());
+    }
     assertFilterMatches(EDF("dim4 == '1'"), ImmutableList.of());
     assertFilterMatches(EDF("dim4 == '3'"), ImmutableList.of("3"));
   }
@@ -141,7 +153,12 @@ public void testOneMultiValuedStringColumn()
   @Test
   public void testOneLongColumn()
   {
-    assertFilterMatches(EDF("dim1 == ''"), ImmutableList.of("0"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(EDF("dim1 == ''"), ImmutableList.of("0"));
+    } else {
+      // A long does not match empty string
+      assertFilterMatches(EDF("dim1 == ''"), ImmutableList.of());
+    }
     assertFilterMatches(EDF("dim1 == '1'"), ImmutableList.of("1"));
     assertFilterMatches(EDF("dim1 == 2"), ImmutableList.of("2"));
     assertFilterMatches(EDF("dim1 < '2'"), ImmutableList.of("0", "1"));
@@ -153,7 +170,12 @@ public void testOneLongColumn()
   @Test
   public void testOneFloatColumn()
   {
-    assertFilterMatches(EDF("dim2 == ''"), ImmutableList.of("0"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(EDF("dim2 == ''"), ImmutableList.of("0"));
+    } else {
+      // A float does not match empty string
+      assertFilterMatches(EDF("dim2 == ''"), ImmutableList.of());
+    }
     assertFilterMatches(EDF("dim2 == '1'"), ImmutableList.of("1"));
     assertFilterMatches(EDF("dim2 == 2"), ImmutableList.of("2"));
     assertFilterMatches(EDF("dim2 < '2'"), ImmutableList.of("0", "1"));
@@ -175,11 +197,19 @@ public void testCompareColumns()
     // String vs string
     assertFilterMatches(EDF("dim0 == dim3"), ImmutableList.of("2", "5", "8"));
 
-    // String vs long
-    assertFilterMatches(EDF("dim1 == dim3"), ImmutableList.of("0", "2", "5", "8"));
+    if (NullHandling.replaceWithDefault()) {
+      // String vs long
+      assertFilterMatches(EDF("dim1 == dim3"), ImmutableList.of("0", "2", "5", "8"));
+
+      // String vs float
+      assertFilterMatches(EDF("dim2 == dim3"), ImmutableList.of("0", "2", "5", "8"));
+    } else {
+      // String vs long
+      assertFilterMatches(EDF("dim1 == dim3"), ImmutableList.of("2", "5", "8"));
 
-    // String vs float
-    assertFilterMatches(EDF("dim2 == dim3"), ImmutableList.of("0", "2", "5", "8"));
+      // String vs float
+      assertFilterMatches(EDF("dim2 == dim3"), ImmutableList.of("2", "5", "8"));
+    }
 
     // String vs. multi-value string
     // Expressions currently treat multi-valued arrays as nulls.
@@ -190,12 +220,25 @@ public void testCompareColumns()
   @Test
   public void testMissingColumn()
   {
-    assertFilterMatches(EDF("missing == ''"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(EDF("missing == ''"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
+    } else {
+      // AS per SQL standard null == null returns false.
+      assertFilterMatches(EDF("missing == null"), ImmutableList.of());
+    }
     assertFilterMatches(EDF("missing == '1'"), ImmutableList.of());
     assertFilterMatches(EDF("missing == 2"), ImmutableList.of());
-    assertFilterMatches(EDF("missing < '2'"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
-    assertFilterMatches(EDF("missing < 2"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
-    assertFilterMatches(EDF("missing < 2.0"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
+    if (NullHandling.replaceWithDefault()) {
+      // missing equivaluent to 0
+      assertFilterMatches(EDF("missing < '2'"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
+      assertFilterMatches(EDF("missing < 2"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
+      assertFilterMatches(EDF("missing < 2.0"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
+    } else {
+      // missing equivalent to null
+      assertFilterMatches(EDF("missing < '2'"), ImmutableList.of());
+      assertFilterMatches(EDF("missing < 2"), ImmutableList.of());
+      assertFilterMatches(EDF("missing < 2.0"), ImmutableList.of());
+    }
     assertFilterMatches(EDF("missing > '2'"), ImmutableList.of());
     assertFilterMatches(EDF("missing > 2"), ImmutableList.of());
     assertFilterMatches(EDF("missing > 2.0"), ImmutableList.of());
diff --git a/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java b/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java
index 2c07107b5a7..26289718961 100644
--- a/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java
+++ b/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java
@@ -21,9 +21,9 @@
 
 import com.google.common.base.Function;
 import com.google.common.base.Predicate;
-import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
 import io.druid.data.input.impl.InputRowParser;
@@ -116,7 +116,7 @@ public Filter toFilter()
       if (extractionFn == null) {
         return new NoBitmapSelectorFilter(dimension, value);
       } else {
-        final String valueOrNull = Strings.emptyToNull(value);
+        final String valueOrNull = NullHandling.emptyToNullIfNeeded(value);
         final DruidPredicateFactory predicateFactory = new DruidPredicateFactory()
         {
           @Override
@@ -199,7 +199,11 @@ public static void tearDown() throws Exception
   @Test
   public void testSinglePreFilterWithNulls()
   {
-    assertFilterMatches(new SelectorDimFilter("dim1", null, null), ImmutableList.of("0"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(new SelectorDimFilter("dim1", null, null), ImmutableList.of("0"));
+    } else {
+      assertFilterMatches(new SelectorDimFilter("dim1", null, null), ImmutableList.of());
+    }
     assertFilterMatches(new SelectorDimFilter("dim1", "", null), ImmutableList.of("0"));
     assertFilterMatches(new SelectorDimFilter("dim1", "10", null), ImmutableList.of("1"));
     assertFilterMatches(new SelectorDimFilter("dim1", "2", null), ImmutableList.of("2"));
@@ -212,7 +216,11 @@ public void testSinglePreFilterWithNulls()
   @Test
   public void testSinglePostFilterWithNulls()
   {
-    assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", null, null), ImmutableList.of("0"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", null, null), ImmutableList.of("0"));
+    } else {
+      assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", null, null), ImmutableList.of());
+    }
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "", null), ImmutableList.of("0"));
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "10", null), ImmutableList.of("1"));
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "2", null), ImmutableList.of("2"));
@@ -221,8 +229,11 @@ public void testSinglePostFilterWithNulls()
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "abc", null), ImmutableList.of("5", "8"));
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "ab", null), ImmutableList.of());
 
-    assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "super-null", JS_EXTRACTION_FN), ImmutableList.of("0"));
-    assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "super-null", JS_EXTRACTION_FN), ImmutableList.of("0"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "super-null", JS_EXTRACTION_FN), ImmutableList.of("0"));
+    } else {
+      assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "super-", JS_EXTRACTION_FN), ImmutableList.of("0"));
+    }
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "super-10", JS_EXTRACTION_FN), ImmutableList.of("1"));
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "super-2", JS_EXTRACTION_FN), ImmutableList.of("2"));
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "super-1", JS_EXTRACTION_FN), ImmutableList.of("3", "9"));
@@ -234,13 +245,23 @@ public void testSinglePostFilterWithNulls()
   @Test
   public void testBasicPreAndPostFilterWithNulls()
   {
-    assertFilterMatches(
-        new AndDimFilter(Arrays.asList(
-            new SelectorDimFilter("dim2", "a", null),
-            new NoBitmapSelectorDimFilter("dim1", null, null)
-        )),
-        ImmutableList.of("0")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new AndDimFilter(Arrays.asList(
+              new SelectorDimFilter("dim2", "a", null),
+              new NoBitmapSelectorDimFilter("dim1", null, null)
+          )),
+          ImmutableList.of("0")
+      );
+    } else {
+      assertFilterMatches(
+          new AndDimFilter(Arrays.asList(
+              new SelectorDimFilter("dim2", "a", null),
+              new NoBitmapSelectorDimFilter("dim1", null, null)
+          )),
+          ImmutableList.of()
+      );
+    }
 
     assertFilterMatches(
         new AndDimFilter(Arrays.asList(
@@ -274,14 +295,51 @@ public void testBasicPreAndPostFilterWithNulls()
         ImmutableList.of()
     );
 
-
-    assertFilterMatches(
-        new AndDimFilter(Arrays.asList(
-            new SelectorDimFilter("dim2", "super-a", JS_EXTRACTION_FN),
-            new NoBitmapSelectorDimFilter("dim1", "super-null", JS_EXTRACTION_FN)
-        )),
-        ImmutableList.of("0")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new AndDimFilter(Arrays.asList(
+              new SelectorDimFilter("dim2", "super-a", JS_EXTRACTION_FN),
+              new NoBitmapSelectorDimFilter("dim1", "super-null", JS_EXTRACTION_FN)
+          )),
+          ImmutableList.of("0")
+      );
+      assertFilterMatches(
+          new AndDimFilter(Arrays.asList(
+              new SelectorDimFilter("dim1", "super-2", JS_EXTRACTION_FN),
+              new NoBitmapSelectorDimFilter("dim2", "super-null", JS_EXTRACTION_FN)
+          )),
+          ImmutableList.of("2")
+      );
+    } else {
+      assertFilterMatches(
+          new AndDimFilter(Arrays.asList(
+              new SelectorDimFilter("dim2", "super-a", JS_EXTRACTION_FN),
+              new NoBitmapSelectorDimFilter("dim1", "super-", JS_EXTRACTION_FN)
+          )),
+          ImmutableList.of("0")
+      );
+      assertFilterMatches(
+          new AndDimFilter(Arrays.asList(
+              new SelectorDimFilter("dim2", "super-a", JS_EXTRACTION_FN),
+              new NoBitmapSelectorDimFilter("dim1", "super-null", JS_EXTRACTION_FN)
+          )),
+          ImmutableList.of()
+      );
+      assertFilterMatches(
+          new AndDimFilter(Arrays.asList(
+              new SelectorDimFilter("dim1", "super-2", JS_EXTRACTION_FN),
+              new NoBitmapSelectorDimFilter("dim2", "super-", JS_EXTRACTION_FN)
+          )),
+          ImmutableList.of("2")
+      );
+      assertFilterMatches(
+          new AndDimFilter(Arrays.asList(
+              new SelectorDimFilter("dim1", "super-2", JS_EXTRACTION_FN),
+              new NoBitmapSelectorDimFilter("dim2", "super-null", JS_EXTRACTION_FN)
+          )),
+          ImmutableList.of()
+      );
+    }
 
     assertFilterMatches(
         new AndDimFilter(Arrays.asList(
@@ -291,14 +349,6 @@ public void testBasicPreAndPostFilterWithNulls()
         ImmutableList.of("1")
     );
 
-    assertFilterMatches(
-        new AndDimFilter(Arrays.asList(
-            new SelectorDimFilter("dim1", "super-2", JS_EXTRACTION_FN),
-            new NoBitmapSelectorDimFilter("dim2", "super-null", JS_EXTRACTION_FN)
-        )),
-        ImmutableList.of("2")
-    );
-
     assertFilterMatches(
         new AndDimFilter(Arrays.asList(
             new SelectorDimFilter("dim1", "super-1", JS_EXTRACTION_FN),
@@ -327,13 +377,23 @@ public void testOrPostFilterWithNulls()
         ImmutableList.of("0", "3")
     );
 
-    assertFilterMatches(
-        new OrDimFilter(Arrays.asList(
-            new SelectorDimFilter("dim1", "abc", null),
-            new NoBitmapSelectorDimFilter("dim2", null, null)
-        )),
-        ImmutableList.of("1", "2", "5", "8")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new OrDimFilter(Arrays.asList(
+              new SelectorDimFilter("dim1", "abc", null),
+              new NoBitmapSelectorDimFilter("dim2", null, null)
+          )),
+          ImmutableList.of("1", "2", "5", "8")
+      );
+    } else {
+      assertFilterMatches(
+          new OrDimFilter(Arrays.asList(
+              new SelectorDimFilter("dim1", "abc", null),
+              new NoBitmapSelectorDimFilter("dim2", null, null)
+          )),
+          ImmutableList.of("1", "5", "8")
+      );
+    }
 
     assertFilterMatches(
         new OrDimFilter(Arrays.asList(
@@ -382,13 +442,32 @@ public void testOrPostFilterWithNulls()
         )),
         ImmutableList.of("0", "3")
     );
-    assertFilterMatches(
-        new OrDimFilter(Arrays.asList(
-            new SelectorDimFilter("dim1", "super-abc", JS_EXTRACTION_FN),
-            new NoBitmapSelectorDimFilter("dim2", "super-null", JS_EXTRACTION_FN)
-        )),
-        ImmutableList.of("1", "2", "5", "8")
-    );
+
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new OrDimFilter(Arrays.asList(
+              new SelectorDimFilter("dim1", "super-abc", JS_EXTRACTION_FN),
+              new NoBitmapSelectorDimFilter("dim2", "super-null", JS_EXTRACTION_FN)
+          )),
+          ImmutableList.of("1", "2", "5", "8")
+      );
+    } else {
+      assertFilterMatches(
+          new OrDimFilter(Arrays.asList(
+              new SelectorDimFilter("dim1", "super-abc", JS_EXTRACTION_FN),
+              new NoBitmapSelectorDimFilter("dim2", "super-null", JS_EXTRACTION_FN)
+          )),
+          ImmutableList.of("1", "5", "8")
+      );
+      assertFilterMatches(
+          new OrDimFilter(Arrays.asList(
+              new SelectorDimFilter("dim1", "super-abc", JS_EXTRACTION_FN),
+              new NoBitmapSelectorDimFilter("dim2", "super-", JS_EXTRACTION_FN)
+          )),
+          ImmutableList.of("2", "5", "8")
+      );
+    }
+
     assertFilterMatches(
         new OrDimFilter(Arrays.asList(
             new SelectorDimFilter("dim1", "super-2", JS_EXTRACTION_FN),
@@ -432,7 +511,14 @@ public void testOrPostFilterWithNulls()
   public void testMissingColumnSpecifiedInDimensionList()
   {
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", null, null), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
-    assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "", null), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new NoBitmapSelectorDimFilter("dim3", "", null),
+          ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")
+      );
+    } else {
+      assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "", null), ImmutableList.of());
+    }
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "a", null), ImmutableList.of());
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "b", null), ImmutableList.of());
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "c", null), ImmutableList.of());
@@ -482,7 +568,17 @@ public void testMissingColumnSpecifiedInDimensionList()
   public void testMissingColumnNotSpecifiedInDimensionList()
   {
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", null, null), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
-    assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", "", null), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new NoBitmapSelectorDimFilter("dim4", "", null),
+          ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")
+      );
+    } else {
+      assertFilterMatches(
+          new NoBitmapSelectorDimFilter("dim4", "", null),
+          ImmutableList.of()
+      );
+    }
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", "a", null), ImmutableList.of());
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", "b", null), ImmutableList.of());
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", "c", null), ImmutableList.of());
diff --git a/processing/src/test/java/io/druid/segment/filter/InFilterTest.java b/processing/src/test/java/io/druid/segment/filter/InFilterTest.java
index 510ec9da7bf..91c8692b16d 100644
--- a/processing/src/test/java/io/druid/segment/filter/InFilterTest.java
+++ b/processing/src/test/java/io/druid/segment/filter/InFilterTest.java
@@ -23,6 +23,7 @@
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
 import io.druid.data.input.impl.InputRowParser;
@@ -125,10 +126,17 @@ public void testSingleValueStringColumnWithNulls()
         ImmutableList.of("a")
     );
 
-    assertFilterMatches(
-        toInFilter("dim1", null, "10", "abc"),
-        ImmutableList.of("a", "b", "f")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          toInFilter("dim1", null, "10", "abc"),
+          ImmutableList.of("a", "b", "f")
+      );
+    } else {
+      assertFilterMatches(
+          toInFilter("dim1", null, "10", "abc"),
+          ImmutableList.of("b", "f")
+      );
+    }
 
     assertFilterMatches(
         toInFilter("dim1", "-1", "ab", "de"),
@@ -139,28 +147,47 @@ public void testSingleValueStringColumnWithNulls()
   @Test
   public void testMultiValueStringColumn()
   {
-    assertFilterMatches(
-        toInFilter("dim2", null),
-        ImmutableList.of("b", "c", "f")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          toInFilter("dim2", null),
+          ImmutableList.of("b", "c", "f")
+      );
+      assertFilterMatches(
+          toInFilter("dim2", null, "a"),
+          ImmutableList.of("a", "b", "c", "d", "f")
+      );
+      assertFilterMatches(
+          toInFilter("dim2", null, "b"),
+          ImmutableList.of("a", "b", "c", "f")
+      );
+      assertFilterMatches(
+          toInFilter("dim2", ""),
+          ImmutableList.of("b", "c", "f")
+      );
+    } else {
+      assertFilterMatches(
+          toInFilter("dim2", null),
+          ImmutableList.of("b", "f")
+      );
+      assertFilterMatches(
+          toInFilter("dim2", null, "a"),
+          ImmutableList.of("a", "b", "d", "f")
+      );
+      assertFilterMatches(
+          toInFilter("dim2", null, "b"),
+          ImmutableList.of("a", "b", "f")
+      );
+      assertFilterMatches(
+          toInFilter("dim2", ""),
+          ImmutableList.of("c")
+      );
+    }
 
     assertFilterMatches(
         toInFilter("dim2", "", (String) null),
         ImmutableList.of("b", "c", "f")
     );
 
-    assertFilterMatches(
-        toInFilter("dim2", null, "a"),
-        ImmutableList.of("a", "b", "c", "d", "f")
-
-    );
-
-    assertFilterMatches(
-        toInFilter("dim2", null, "b"),
-        ImmutableList.of("a", "b", "c", "f")
-
-    );
-
     assertFilterMatches(
         toInFilter("dim2", "c"),
         ImmutableList.of("e")
@@ -180,10 +207,17 @@ public void testMissingColumn()
         ImmutableList.of("a", "b", "c", "d", "e", "f")
     );
 
-    assertFilterMatches(
-        toInFilter("dim3", ""),
-        ImmutableList.of("a", "b", "c", "d", "e", "f")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          toInFilter("dim3", ""),
+          ImmutableList.of("a", "b", "c", "d", "e", "f")
+      );
+    } else {
+      assertFilterMatches(
+          toInFilter("dim3", ""),
+          ImmutableList.of()
+      );
+    }
 
     assertFilterMatches(
         toInFilter("dim3", null, "a"),
@@ -215,20 +249,43 @@ public void testMatchWithExtractionFn()
     String nullJsFn = "function(str) { if (str === null) { return 'YES'; } else { return 'NO';} }";
     ExtractionFn yesNullFn = new JavaScriptExtractionFn(nullJsFn, false, JavaScriptConfig.getEnabledInstance());
 
-    assertFilterMatches(
-        toInFilterWithFn("dim2", superFn, "super-null", "super-a", "super-b"),
-        ImmutableList.of("a", "b", "c", "d", "f")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          toInFilterWithFn("dim2", superFn, "super-null", "super-a", "super-b"),
+          ImmutableList.of("a", "b", "c", "d", "f")
+      );
+      assertFilterMatches(
+          toInFilterWithFn("dim1", superFn, "super-null", "super-10", "super-def"),
+          ImmutableList.of("a", "b", "e")
+      );
+      assertFilterMatches(
+          toInFilterWithFn("dim2", yesNullFn, "YES"),
+          ImmutableList.of("b", "c", "f")
+      );
+      assertFilterMatches(
+          toInFilterWithFn("dim1", yesNullFn, "NO"),
+          ImmutableList.of("b", "c", "d", "e", "f")
+      );
+    } else {
+      assertFilterMatches(
+          toInFilterWithFn("dim2", superFn, "super-null", "super-a", "super-b"),
+          ImmutableList.of("a", "b", "d", "f")
+      );
+      assertFilterMatches(
+          toInFilterWithFn("dim1", superFn, "super-null", "super-10", "super-def"),
+          ImmutableList.of("b", "e")
+      );
+      assertFilterMatches(
+          toInFilterWithFn("dim2", yesNullFn, "YES"),
+          ImmutableList.of("b", "f")
+      );
+
+      assertFilterMatches(
+          toInFilterWithFn("dim1", yesNullFn, "NO"),
+          ImmutableList.of("a", "b", "c", "d", "e", "f")
+      );
+    }
 
-    assertFilterMatches(
-        toInFilterWithFn("dim2", yesNullFn, "YES"),
-        ImmutableList.of("b", "c", "f")
-    );
-
-    assertFilterMatches(
-        toInFilterWithFn("dim1", superFn, "super-null", "super-10", "super-def"),
-        ImmutableList.of("a", "b", "e")
-    );
 
     assertFilterMatches(
         toInFilterWithFn("dim3", yesNullFn, "NO"),
@@ -240,10 +297,6 @@ public void testMatchWithExtractionFn()
         ImmutableList.of("a", "b", "c", "d", "e", "f")
     );
 
-    assertFilterMatches(
-        toInFilterWithFn("dim1", yesNullFn, "NO"),
-        ImmutableList.of("b", "c", "d", "e", "f")
-    );
   }
 
   @Test
diff --git a/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java b/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java
index 0a3989e7780..a280eabbeab 100644
--- a/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java
+++ b/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Function;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
 import io.druid.data.input.impl.InputRowParser;
@@ -109,7 +110,12 @@ public void testSingleValueStringColumnWithoutNulls()
   @Test
   public void testSingleValueStringColumnWithNulls()
   {
-    assertFilterMatches(newJavaScriptDimFilter("dim1", jsNullFilter, null), ImmutableList.of("0"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(newJavaScriptDimFilter("dim1", jsNullFilter, null), ImmutableList.of("0"));
+    } else {
+      assertFilterMatches(newJavaScriptDimFilter("dim1", jsNullFilter, null), ImmutableList.of());
+      assertFilterMatches(newJavaScriptDimFilter("dim1", jsValueFilter(""), null), ImmutableList.of("0"));
+    }
     assertFilterMatches(newJavaScriptDimFilter("dim1", jsValueFilter("10"), null), ImmutableList.of("1"));
     assertFilterMatches(newJavaScriptDimFilter("dim1", jsValueFilter("2"), null), ImmutableList.of("2"));
     assertFilterMatches(newJavaScriptDimFilter("dim1", jsValueFilter("1"), null), ImmutableList.of("3"));
@@ -122,7 +128,12 @@ public void testSingleValueStringColumnWithNulls()
   public void testMultiValueStringColumn()
   {
     // multi-val null......
-    assertFilterMatches(newJavaScriptDimFilter("dim2", jsNullFilter, null), ImmutableList.of("1", "2", "5"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(newJavaScriptDimFilter("dim2", jsNullFilter, null), ImmutableList.of("1", "2", "5"));
+    } else {
+      assertFilterMatches(newJavaScriptDimFilter("dim2", jsNullFilter, null), ImmutableList.of("1", "5"));
+      assertFilterMatches(newJavaScriptDimFilter("dim2", jsValueFilter(""), null), ImmutableList.of("2"));
+    }
     assertFilterMatches(newJavaScriptDimFilter("dim2", jsValueFilter("a"), null), ImmutableList.of("0", "3"));
     assertFilterMatches(newJavaScriptDimFilter("dim2", jsValueFilter("b"), null), ImmutableList.of("0"));
     assertFilterMatches(newJavaScriptDimFilter("dim2", jsValueFilter("c"), null), ImmutableList.of("4"));
diff --git a/processing/src/test/java/io/druid/segment/filter/LikeFilterTest.java b/processing/src/test/java/io/druid/segment/filter/LikeFilterTest.java
index c26113cef28..9d02542c0f8 100644
--- a/processing/src/test/java/io/druid/segment/filter/LikeFilterTest.java
+++ b/processing/src/test/java/io/druid/segment/filter/LikeFilterTest.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Function;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
 import io.druid.data.input.impl.InputRowParser;
@@ -156,10 +157,17 @@ public void testMatchEmptyString()
   @Test
   public void testMatchEmptyStringWithExtractionFn()
   {
-    assertFilterMatches(
-        new LikeDimFilter("dim1", "", null, new SubstringDimExtractionFn(100, 1)),
-        ImmutableList.of("0", "1", "2", "3", "4", "5")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new LikeDimFilter("dim1", "", null, new SubstringDimExtractionFn(100, 1)),
+          ImmutableList.of("0", "1", "2", "3", "4", "5")
+      );
+    } else {
+      assertFilterMatches(
+          new LikeDimFilter("dim1", "", null, new SubstringDimExtractionFn(100, 1)),
+          ImmutableList.of()
+      );
+    }
   }
 
   @Test
diff --git a/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java b/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java
index 1bbeb81c4ee..a269574a5e9 100644
--- a/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java
+++ b/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Function;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
 import io.druid.data.input.impl.InputRowParser;
@@ -99,7 +100,11 @@ public void testSingleValueStringColumnWithoutNulls()
   public void testSingleValueStringColumnWithNulls()
   {
     // RegexFilter always returns false for null row values.
-    assertFilterMatches(new RegexDimFilter("dim1", ".*", null), ImmutableList.of("1", "2", "3", "4", "5"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(new RegexDimFilter("dim1", ".*", null), ImmutableList.of("1", "2", "3", "4", "5"));
+    } else {
+      assertFilterMatches(new RegexDimFilter("dim1", ".*", null), ImmutableList.of("0", "1", "2", "3", "4", "5"));
+    }
     assertFilterMatches(new RegexDimFilter("dim1", "10", null), ImmutableList.of("1"));
     assertFilterMatches(new RegexDimFilter("dim1", "2", null), ImmutableList.of("2"));
     assertFilterMatches(new RegexDimFilter("dim1", "1", null), ImmutableList.of("1", "3"));
@@ -111,7 +116,11 @@ public void testSingleValueStringColumnWithNulls()
   @Test
   public void testMultiValueStringColumn()
   {
-    assertFilterMatches(new RegexDimFilter("dim2", ".*", null), ImmutableList.of("0", "3", "4"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(new RegexDimFilter("dim2", ".*", null), ImmutableList.of("0", "3", "4"));
+    } else {
+      assertFilterMatches(new RegexDimFilter("dim2", ".*", null), ImmutableList.of("0", "2", "3", "4"));
+    }
     assertFilterMatches(new RegexDimFilter("dim2", "a", null), ImmutableList.of("0", "3"));
     assertFilterMatches(new RegexDimFilter("dim2", "b", null), ImmutableList.of("0"));
     assertFilterMatches(new RegexDimFilter("dim2", "c", null), ImmutableList.of("4"));
@@ -141,11 +150,15 @@ public void testRegexWithExtractionFn()
   {
     String nullJsFn = "function(str) { if (str === null) { return 'NOT_NULL_ANYMORE'; } else { return str;} }";
     ExtractionFn changeNullFn = new JavaScriptExtractionFn(nullJsFn, false, JavaScriptConfig.getEnabledInstance());
-
-    assertFilterMatches(new RegexDimFilter("dim1", ".*ANYMORE", changeNullFn), ImmutableList.of("0"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(new RegexDimFilter("dim1", ".*ANYMORE", changeNullFn), ImmutableList.of("0"));
+      assertFilterMatches(new RegexDimFilter("dim2", ".*ANYMORE", changeNullFn), ImmutableList.of("1", "2", "5"));
+    } else {
+      assertFilterMatches(new RegexDimFilter("dim1", ".*ANYMORE", changeNullFn), ImmutableList.of());
+      assertFilterMatches(new RegexDimFilter("dim2", ".*ANYMORE", changeNullFn), ImmutableList.of("1", "5"));
+    }
     assertFilterMatches(new RegexDimFilter("dim1", "ab.*", changeNullFn), ImmutableList.of("4", "5"));
 
-    assertFilterMatches(new RegexDimFilter("dim2", ".*ANYMORE", changeNullFn), ImmutableList.of("1", "2", "5"));
     assertFilterMatches(new RegexDimFilter("dim2", "a.*", changeNullFn), ImmutableList.of("0", "3"));
 
     assertFilterMatches(new RegexDimFilter("dim3", ".*ANYMORE", changeNullFn), ImmutableList.of("0", "1", "2", "3", "4", "5"));
diff --git a/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java b/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java
index 40f36b9bca0..791f8e8f0a2 100644
--- a/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java
+++ b/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Function;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
 import io.druid.data.input.impl.InputRowParser;
@@ -105,8 +106,18 @@ public void testSingleValueStringColumnWithoutNulls()
   @Test
   public void testSingleValueStringColumnWithNulls()
   {
-    // SearchQueryFilter always returns false for null row values.
-    assertFilterMatches(new SearchQueryDimFilter("dim1", specForValue(""), null), ImmutableList.of("1", "2", "3", "4", "5"));
+    if (NullHandling.replaceWithDefault()) {
+      // SearchQueryFilter always returns false for null row values.
+      assertFilterMatches(
+          new SearchQueryDimFilter("dim1", specForValue(""), null),
+          ImmutableList.of("1", "2", "3", "4", "5")
+      );
+    } else {
+      assertFilterMatches(
+          new SearchQueryDimFilter("dim1", specForValue(""), null),
+          ImmutableList.of("0", "1", "2", "3", "4", "5")
+      );
+    }
     assertFilterMatches(new SearchQueryDimFilter("dim1", specForValue("10"), null), ImmutableList.of("1"));
     assertFilterMatches(new SearchQueryDimFilter("dim1", specForValue("2"), null), ImmutableList.of("2"));
     assertFilterMatches(new SearchQueryDimFilter("dim1", specForValue("1"), null), ImmutableList.of("1", "3"));
@@ -118,7 +129,14 @@ public void testSingleValueStringColumnWithNulls()
   @Test
   public void testMultiValueStringColumn()
   {
-    assertFilterMatches(new SearchQueryDimFilter("dim2", specForValue(""), null), ImmutableList.of("0", "3", "4"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(new SearchQueryDimFilter("dim2", specForValue(""), null), ImmutableList.of("0", "3", "4"));
+    } else {
+      assertFilterMatches(
+          new SearchQueryDimFilter("dim2", specForValue(""), null),
+          ImmutableList.of("0", "2", "3", "4")
+      );
+    }
     assertFilterMatches(new SearchQueryDimFilter("dim2", specForValue("a"), null), ImmutableList.of("0", "3"));
     assertFilterMatches(new SearchQueryDimFilter("dim2", specForValue("b"), null), ImmutableList.of("0"));
     assertFilterMatches(new SearchQueryDimFilter("dim2", specForValue("c"), null), ImmutableList.of("4"));
@@ -151,10 +169,33 @@ public void testSearchQueryWithExtractionFn()
     String nullJsFn = "function(str) { if (str === null) { return 'NOT_NULL_ANYMORE'; } else { return str;} }";
     ExtractionFn changeNullFn = new JavaScriptExtractionFn(nullJsFn, false, JavaScriptConfig.getEnabledInstance());
 
-    assertFilterMatches(new SearchQueryDimFilter("dim1", specForValue("ANYMORE"), changeNullFn), ImmutableList.of("0"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new SearchQueryDimFilter("dim1", specForValue("ANYMORE"), changeNullFn),
+          ImmutableList.of("0")
+      );
+      assertFilterMatches(
+          new SearchQueryDimFilter("dim2", specForValue("ANYMORE"), changeNullFn),
+          ImmutableList.of("1", "2", "5")
+      );
+
+    } else {
+      assertFilterMatches(
+          new SearchQueryDimFilter("dim1", specForValue("ANYMORE"), changeNullFn),
+          ImmutableList.of()
+      );
+      assertFilterMatches(
+          new SearchQueryDimFilter("dim2", specForValue("ANYMORE"), changeNullFn),
+          ImmutableList.of("1", "5")
+      );
+    }
+
+    assertFilterMatches(
+        new SearchQueryDimFilter("dim1", specForValue("ab"), changeNullFn),
+        ImmutableList.<String>of("4", "5")
+    );
     assertFilterMatches(new SearchQueryDimFilter("dim1", specForValue("ab"), changeNullFn), ImmutableList.of("4", "5"));
 
-    assertFilterMatches(new SearchQueryDimFilter("dim2", specForValue("ANYMORE"), changeNullFn), ImmutableList.of("1", "2", "5"));
     assertFilterMatches(new SearchQueryDimFilter("dim2", specForValue("a"), changeNullFn), ImmutableList.of("0", "3"));
 
     assertFilterMatches(new SearchQueryDimFilter("dim3", specForValue("ANYMORE"), changeNullFn), ImmutableList.of("0", "1", "2", "3", "4", "5"));
diff --git a/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java b/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java
index 98aa304cecf..00027e06588 100644
--- a/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java
+++ b/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Function;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
 import io.druid.data.input.impl.InputRowParser;
@@ -124,8 +125,13 @@ public void testSingleValueStringColumnWithoutNulls()
   @Test
   public void testSingleValueStringColumnWithNulls()
   {
-    assertFilterMatches(new SelectorDimFilter("dim1", null, null), ImmutableList.of("0"));
-    assertFilterMatches(new SelectorDimFilter("dim1", "", null), ImmutableList.of("0"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(new SelectorDimFilter("dim1", null, null), ImmutableList.of("0"));
+      assertFilterMatches(new SelectorDimFilter("dim1", "", null), ImmutableList.of("0"));
+    } else {
+      assertFilterMatches(new SelectorDimFilter("dim1", null, null), ImmutableList.of());
+      assertFilterMatches(new SelectorDimFilter("dim1", "", null), ImmutableList.of("0"));
+    }
     assertFilterMatches(new SelectorDimFilter("dim1", "10", null), ImmutableList.of("1"));
     assertFilterMatches(new SelectorDimFilter("dim1", "2", null), ImmutableList.of("2"));
     assertFilterMatches(new SelectorDimFilter("dim1", "1", null), ImmutableList.of("3"));
@@ -137,8 +143,13 @@ public void testSingleValueStringColumnWithNulls()
   @Test
   public void testMultiValueStringColumn()
   {
-    assertFilterMatches(new SelectorDimFilter("dim2", null, null), ImmutableList.of("1", "2", "5"));
-    assertFilterMatches(new SelectorDimFilter("dim2", "", null), ImmutableList.of("1", "2", "5"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(new SelectorDimFilter("dim2", null, null), ImmutableList.of("1", "2", "5"));
+      assertFilterMatches(new SelectorDimFilter("dim2", "", null), ImmutableList.of("1", "2", "5"));
+    } else {
+      assertFilterMatches(new SelectorDimFilter("dim2", null, null), ImmutableList.of("1", "5"));
+      assertFilterMatches(new SelectorDimFilter("dim2", "", null), ImmutableList.of("2"));
+    }
     assertFilterMatches(new SelectorDimFilter("dim2", "a", null), ImmutableList.of("0", "3"));
     assertFilterMatches(new SelectorDimFilter("dim2", "b", null), ImmutableList.of("0"));
     assertFilterMatches(new SelectorDimFilter("dim2", "c", null), ImmutableList.of("4"));
@@ -149,7 +160,11 @@ public void testMultiValueStringColumn()
   public void testMissingColumnSpecifiedInDimensionList()
   {
     assertFilterMatches(new SelectorDimFilter("dim3", null, null), ImmutableList.of("0", "1", "2", "3", "4", "5"));
-    assertFilterMatches(new SelectorDimFilter("dim3", "", null), ImmutableList.of("0", "1", "2", "3", "4", "5"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(new SelectorDimFilter("dim3", "", null), ImmutableList.of("0", "1", "2", "3", "4", "5"));
+    } else {
+      assertFilterMatches(new SelectorDimFilter("dim3", "", null), ImmutableList.of());
+    }
     assertFilterMatches(new SelectorDimFilter("dim3", "a", null), ImmutableList.of());
     assertFilterMatches(new SelectorDimFilter("dim3", "b", null), ImmutableList.of());
     assertFilterMatches(new SelectorDimFilter("dim3", "c", null), ImmutableList.of());
@@ -159,7 +174,11 @@ public void testMissingColumnSpecifiedInDimensionList()
   public void testMissingColumnNotSpecifiedInDimensionList()
   {
     assertFilterMatches(new SelectorDimFilter("dim4", null, null), ImmutableList.of("0", "1", "2", "3", "4", "5"));
-    assertFilterMatches(new SelectorDimFilter("dim4", "", null), ImmutableList.of("0", "1", "2", "3", "4", "5"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(new SelectorDimFilter("dim4", "", null), ImmutableList.of("0", "1", "2", "3", "4", "5"));
+    } else {
+      assertFilterMatches(new SelectorDimFilter("dim4", "", null), ImmutableList.of());
+    }
     assertFilterMatches(new SelectorDimFilter("dim4", "a", null), ImmutableList.of());
     assertFilterMatches(new SelectorDimFilter("dim4", "b", null), ImmutableList.of());
     assertFilterMatches(new SelectorDimFilter("dim4", "c", null), ImmutableList.of());
@@ -211,7 +230,23 @@ public void testSelectorWithLookupExtractionFn()
     );
     LookupExtractor mapExtractor3 = new MapLookupExtractor(stringMap3, false);
     LookupExtractionFn lookupFn3 = new LookupExtractionFn(mapExtractor3, false, null, false, true);
-    assertFilterMatches(new SelectorDimFilter("dim0", null, lookupFn3), ImmutableList.of("0", "1", "2", "3", "4", "5"));
+    if (NullHandling.replaceWithDefault()) {
+      // Nulls and empty strings are considered equivalent
+      assertFilterMatches(
+          new SelectorDimFilter("dim0", null, lookupFn3),
+          ImmutableList.of("0", "1", "2", "3", "4", "5")
+      );
+    } else {
+      assertFilterMatches(
+          new SelectorDimFilter("dim0", null, lookupFn3),
+          ImmutableList.of("0", "2", "3", "4", "5")
+      );
+      assertFilterMatches(
+          new SelectorDimFilter("dim0", "", lookupFn3),
+          ImmutableList.of("1")
+      );
+    }
+
 
     final Map<String, String> stringMap4 = ImmutableMap.of(
         "9", "4"
@@ -252,7 +287,12 @@ public void testSelectorWithLookupExtractionFn()
 
     assertFilterMatches(optFilter1, ImmutableList.of("0", "1", "2", "5"));
     assertFilterMatches(optFilter2, ImmutableList.of("2", "5"));
-    assertFilterMatches(optFilter3, ImmutableList.of("0", "1", "2", "3", "4", "5"));
+    if (NullHandling.replaceWithDefault()) {
+      // Null and Empty strings are same
+      assertFilterMatches(optFilter3, ImmutableList.of("0", "1", "2", "3", "4", "5"));
+    } else {
+      assertFilterMatches(optFilter3, ImmutableList.of("0", "2", "3", "4", "5"));
+    }
     assertFilterMatches(optFilter4, ImmutableList.of("5"));
     assertFilterMatches(optFilter5, ImmutableList.of());
     assertFilterMatches(optFilter6, ImmutableList.of("5"));
@@ -261,6 +301,20 @@ public void testSelectorWithLookupExtractionFn()
     // remove these when ExtractionDimFilter is removed.
     assertFilterMatches(new ExtractionDimFilter("dim1", "UNKNOWN", lookupFn, null), ImmutableList.of("0", "1", "2", "5"));
     assertFilterMatches(new ExtractionDimFilter("dim0", "5", lookupFn2, null), ImmutableList.of("2", "5"));
-    assertFilterMatches(new ExtractionDimFilter("dim0", null, lookupFn3, null), ImmutableList.of("0", "1", "2", "3", "4", "5"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new ExtractionDimFilter("dim0", null, lookupFn3, null),
+          ImmutableList.of("0", "1", "2", "3", "4", "5")
+      );
+    } else {
+      assertFilterMatches(
+          new ExtractionDimFilter("dim0", null, lookupFn3, null),
+          ImmutableList.of("0", "2", "3", "4", "5")
+      );
+      assertFilterMatches(
+          new ExtractionDimFilter("dim0", "", lookupFn3, null),
+          ImmutableList.of("1")
+      );
+    }
   }
 }
diff --git a/processing/src/test/java/io/druid/segment/virtual/ExpressionVirtualColumnTest.java b/processing/src/test/java/io/druid/segment/virtual/ExpressionVirtualColumnTest.java
index 67526c6e769..26c66ebba76 100644
--- a/processing/src/test/java/io/druid/segment/virtual/ExpressionVirtualColumnTest.java
+++ b/processing/src/test/java/io/druid/segment/virtual/ExpressionVirtualColumnTest.java
@@ -134,7 +134,11 @@ public void testLongSelector()
     final BaseLongColumnValueSelector selector = XPLUSY.makeColumnValueSelector("expr", COLUMN_SELECTOR_FACTORY);
 
     CURRENT_ROW.set(ROW0);
-    Assert.assertEquals(0L, selector.getLong());
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(0L, selector.getLong());
+    } else {
+      Assert.assertTrue(selector.isNull());
+    }
 
     CURRENT_ROW.set(ROW1);
     if (NullHandling.replaceWithDefault()) {
@@ -157,7 +161,11 @@ public void testLongSelectorUsingStringFunction()
     final BaseLongColumnValueSelector selector = ZCONCATX.makeColumnValueSelector("expr", COLUMN_SELECTOR_FACTORY);
 
     CURRENT_ROW.set(ROW0);
-    Assert.assertEquals(0L, selector.getLong());
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(0L, selector.getLong());
+    } else {
+      Assert.assertTrue(selector.isNull());
+    }
 
     CURRENT_ROW.set(ROW1);
     if (NullHandling.replaceWithDefault()) {
@@ -168,10 +176,18 @@ public void testLongSelectorUsingStringFunction()
     }
 
     CURRENT_ROW.set(ROW2);
-    Assert.assertEquals(0L, selector.getLong());
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(0L, selector.getLong());
+    } else {
+      Assert.assertTrue(selector.isNull());
+    }
 
     CURRENT_ROW.set(ROW3);
-    Assert.assertEquals(0L, selector.getLong());
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(0L, selector.getLong());
+    } else {
+      Assert.assertTrue(selector.isNull());
+    }
   }
 
   @Test
@@ -180,7 +196,11 @@ public void testFloatSelector()
     final BaseFloatColumnValueSelector selector = XPLUSY.makeColumnValueSelector("expr", COLUMN_SELECTOR_FACTORY);
 
     CURRENT_ROW.set(ROW0);
-    Assert.assertEquals(0.0f, selector.getFloat(), 0.0f);
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(0.0f, selector.getFloat(), 0.0f);
+    } else {
+      Assert.assertTrue(selector.isNull());
+    }
 
     CURRENT_ROW.set(ROW1);
     if (NullHandling.replaceWithDefault()) {
diff --git a/server/src/main/java/io/druid/query/dimension/LookupDimensionSpec.java b/server/src/main/java/io/druid/query/dimension/LookupDimensionSpec.java
index 76d2dd990e3..e684f73d6dd 100644
--- a/server/src/main/java/io/druid/query/dimension/LookupDimensionSpec.java
+++ b/server/src/main/java/io/druid/query/dimension/LookupDimensionSpec.java
@@ -24,6 +24,7 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 import io.druid.query.extraction.ExtractionFn;
 import io.druid.query.filter.DimFilterUtils;
@@ -53,6 +54,7 @@
   private final boolean retainMissingValue;
 
   @JsonProperty
+  @Nullable
   private final String replaceMissingValueWith;
 
   @JsonProperty
@@ -77,7 +79,7 @@ public LookupDimensionSpec(
   {
     this.retainMissingValue = retainMissingValue;
     this.optimize = optimize == null ? true : optimize;
-    this.replaceMissingValueWith = Strings.emptyToNull(replaceMissingValueWith);
+    this.replaceMissingValueWith = NullHandling.emptyToNullIfNeeded(replaceMissingValueWith);
     this.dimension = Preconditions.checkNotNull(dimension, "dimension can not be Null");
     this.outputName = Preconditions.checkNotNull(outputName, "outputName can not be Null");
     this.lookupReferencesManager = lookupReferencesManager;
@@ -166,13 +168,13 @@ public boolean mustDecorate()
   @Override
   public byte[] getCacheKey()
   {
+
     byte[] dimensionBytes = StringUtils.toUtf8(dimension);
     byte[] dimExtractionFnBytes = Strings.isNullOrEmpty(name)
                                   ? getLookup().getCacheKey()
                                   : StringUtils.toUtf8(name);
     byte[] outputNameBytes = StringUtils.toUtf8(outputName);
-    byte[] replaceWithBytes = StringUtils.toUtf8(Strings.nullToEmpty(replaceMissingValueWith));
-
+    byte[] replaceWithBytes = StringUtils.toUtf8(StringUtils.nullToEmptyNonDruidDataString(replaceMissingValueWith));
 
     return ByteBuffer.allocate(6
                                + dimensionBytes.length
diff --git a/server/src/main/java/io/druid/query/expression/LookupExprMacro.java b/server/src/main/java/io/druid/query/expression/LookupExprMacro.java
index faf9dc2be6c..47098138cc8 100644
--- a/server/src/main/java/io/druid/query/expression/LookupExprMacro.java
+++ b/server/src/main/java/io/druid/query/expression/LookupExprMacro.java
@@ -19,8 +19,8 @@
 
 package io.druid.query.expression;
 
-import com.google.common.base.Strings;
 import com.google.inject.Inject;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.IAE;
 import io.druid.math.expr.Expr;
 import io.druid.math.expr.ExprEval;
@@ -77,7 +77,7 @@ public Expr apply(final List<Expr> args)
       @Override
       public ExprEval eval(final ObjectBinding bindings)
       {
-        return ExprEval.of(extractionFn.apply(Strings.emptyToNull(arg.eval(bindings).asString())));
+        return ExprEval.of(extractionFn.apply(NullHandling.emptyToNullIfNeeded(arg.eval(bindings).asString())));
       }
 
       @Override
diff --git a/server/src/main/java/io/druid/query/lookup/LookupModule.java b/server/src/main/java/io/druid/query/lookup/LookupModule.java
index 1e7f0cf4efb..b59e0b7e2a3 100644
--- a/server/src/main/java/io/druid/query/lookup/LookupModule.java
+++ b/server/src/main/java/io/druid/query/lookup/LookupModule.java
@@ -28,7 +28,6 @@
 import com.fasterxml.jackson.databind.jsontype.NamedType;
 import com.fasterxml.jackson.databind.module.SimpleModule;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import com.google.inject.Binder;
@@ -48,6 +47,7 @@
 import io.druid.guice.annotations.Self;
 import io.druid.guice.annotations.Smile;
 import io.druid.initialization.DruidModule;
+import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.common.logger.Logger;
 import io.druid.query.dimension.LookupDimensionSpec;
 import io.druid.query.expression.LookupExprMacro;
@@ -259,8 +259,9 @@ public String getLookupTier()
         "Cannot specify both `lookupTier` and `lookupTierIsDatasource`"
     );
     final String lookupTier = lookupTierIsDatasource ? dataSourceTaskIdHolder.getDataSource() : this.lookupTier;
+
     return Preconditions.checkNotNull(
-        lookupTier == null ? DEFAULT_TIER : Strings.emptyToNull(lookupTier),
+        lookupTier == null ? DEFAULT_TIER : StringUtils.emptyToNullNonDruidDataString(lookupTier),
         "Cannot have empty lookup tier from %s",
         lookupTierIsDatasource ? "bound value" : LookupModule.PROPERTY_BASE
     );
diff --git a/server/src/main/java/io/druid/server/QueryLifecycle.java b/server/src/main/java/io/druid/server/QueryLifecycle.java
index 1d46630bda8..07d32906470 100644
--- a/server/src/main/java/io/druid/server/QueryLifecycle.java
+++ b/server/src/main/java/io/druid/server/QueryLifecycle.java
@@ -25,6 +25,7 @@
 import io.druid.client.DirectDruidClient;
 import io.druid.java.util.common.DateTimes;
 import io.druid.java.util.common.ISE;
+import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.common.guava.Sequence;
 import io.druid.java.util.common.guava.SequenceWrapper;
 import io.druid.java.util.common.guava.Sequences;
@@ -285,11 +286,12 @@ public void emitLogsAndMetrics(
 
     try {
       final long queryTimeNs = System.nanoTime() - startNs;
+
       QueryMetrics queryMetrics = DruidMetrics.makeRequestMetrics(
           queryMetricsFactory,
           toolChest,
           baseQuery,
-          Strings.nullToEmpty(remoteAddress)
+          StringUtils.nullToEmptyNonDruidDataString(remoteAddress)
       );
       queryMetrics.success(success);
       queryMetrics.reportQueryTime(queryTimeNs);
@@ -323,11 +325,10 @@ public void emitLogsAndMetrics(
           statsMap.put("reason", e.toString());
         }
       }
-
       requestLogger.log(
           new RequestLogLine(
               DateTimes.utc(startMs),
-              Strings.nullToEmpty(remoteAddress),
+              StringUtils.nullToEmptyNonDruidDataString(remoteAddress),
               baseQuery,
               new QueryStats(statsMap)
           )
diff --git a/server/src/main/java/io/druid/server/emitter/EmitterModule.java b/server/src/main/java/io/druid/server/emitter/EmitterModule.java
index 249ea86417e..2f55f8def5e 100644
--- a/server/src/main/java/io/druid/server/emitter/EmitterModule.java
+++ b/server/src/main/java/io/druid/server/emitter/EmitterModule.java
@@ -19,7 +19,6 @@
 
 package io.druid.server.emitter;
 
-import com.google.common.base.Strings;
 import com.google.common.base.Supplier;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
@@ -34,14 +33,15 @@
 import com.google.inject.multibindings.MapBinder;
 import com.google.inject.name.Named;
 import com.google.inject.name.Names;
-import io.druid.java.util.emitter.EmittingLogger;
-import io.druid.java.util.emitter.core.Emitter;
-import io.druid.java.util.emitter.service.ServiceEmitter;
 import io.druid.guice.LazySingleton;
 import io.druid.guice.ManageLifecycle;
 import io.druid.guice.annotations.Self;
 import io.druid.java.util.common.ISE;
+import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.common.logger.Logger;
+import io.druid.java.util.emitter.EmittingLogger;
+import io.druid.java.util.emitter.core.Emitter;
+import io.druid.java.util.emitter.service.ServiceEmitter;
 import io.druid.server.DruidNode;
 
 import java.lang.annotation.Annotation;
@@ -88,7 +88,7 @@ public void configure(Binder binder)
     String version = getClass().getPackage().getImplementationVersion();
     extraServiceDimensions
         .addBinding("version")
-        .toInstance(Strings.nullToEmpty(version)); // Version is null during `mvn test`.
+        .toInstance(StringUtils.nullToEmptyNonDruidDataString(version)); // Version is null during `mvn test`.
   }
 
   @Provides
diff --git a/server/src/main/java/io/druid/server/listener/announcer/ListeningAnnouncerConfig.java b/server/src/main/java/io/druid/server/listener/announcer/ListeningAnnouncerConfig.java
index da6146af270..f74ae2ffabb 100644
--- a/server/src/main/java/io/druid/server/listener/announcer/ListeningAnnouncerConfig.java
+++ b/server/src/main/java/io/druid/server/listener/announcer/ListeningAnnouncerConfig.java
@@ -22,8 +22,8 @@
 import com.fasterxml.jackson.annotation.JacksonInject;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.inject.Inject;
+import io.druid.java.util.common.StringUtils;
 import io.druid.server.initialization.ZkPathsConfig;
 import org.apache.curator.utils.ZKPaths;
 
@@ -92,8 +92,9 @@ public String toString()
   public String getAnnouncementPath(String listenerName)
   {
     return ZKPaths.makePath(
-        getListenersPath(), Preconditions.checkNotNull(
-            Strings.emptyToNull(listenerName), "Listener name cannot be null"
+        getListenersPath(),
+        Preconditions.checkNotNull(
+            StringUtils.emptyToNullNonDruidDataString(listenerName), "Listener name cannot be null"
         )
     );
   }
diff --git a/server/src/test/java/io/druid/query/dimension/LookupDimensionSpecTest.java b/server/src/test/java/io/druid/query/dimension/LookupDimensionSpecTest.java
index 5bb0e1b9b47..7fdf11296b5 100644
--- a/server/src/test/java/io/druid/query/dimension/LookupDimensionSpecTest.java
+++ b/server/src/test/java/io/druid/query/dimension/LookupDimensionSpecTest.java
@@ -22,8 +22,8 @@
 import com.fasterxml.jackson.databind.InjectableValues;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.jsontype.NamedType;
-import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.jackson.DefaultObjectMapper;
 import io.druid.query.extraction.ExtractionFn;
 import io.druid.query.extraction.MapLookupExtractor;
@@ -31,6 +31,7 @@
 import io.druid.query.lookup.LookupExtractorFactoryContainer;
 import io.druid.query.lookup.LookupReferencesManager;
 import io.druid.query.lookup.MapLookupExtractorFactory;
+import io.druid.segment.TestHelper;
 import junitparams.JUnitParamsRunner;
 import junitparams.Parameters;
 import org.easymock.EasyMock;
@@ -127,11 +128,11 @@ public void testGetOutputName()
         },
         new Object[]{
             new LookupDimensionSpec("dimName", "outputName", MAP_LOOKUP_EXTRACTOR, false, null, null, null, true),
-            ImmutableMap.of("not there", "")
+            TestHelper.createExpectedMap("not there", null)
         },
         new Object[]{
             new LookupDimensionSpec("dimName", "outputName", null, false, null, "lookupName", LOOKUP_REF_MANAGER, true),
-            ImmutableMap.of("not there", "")
+            TestHelper.createExpectedMap("not there", null)
         },
         new Object[]{
             new LookupDimensionSpec("dimName", "outputName", MAP_LOOKUP_EXTRACTOR, false, "Missing_value", null, null,
@@ -162,7 +163,10 @@ public void testGetOutputName()
   public void testApply(DimensionSpec dimensionSpec, Map<String, String> map)
   {
     for (Map.Entry<String, String> entry : map.entrySet()) {
-      Assert.assertEquals(Strings.emptyToNull(entry.getValue()), dimensionSpec.getExtractionFn().apply(entry.getKey()));
+      Assert.assertEquals(
+          NullHandling.emptyToNullIfNeeded(entry.getValue()),
+          dimensionSpec.getExtractionFn().apply(entry.getKey())
+      );
     }
   }
 
diff --git a/server/src/test/java/io/druid/query/expression/ExprMacroTest.java b/server/src/test/java/io/druid/query/expression/ExprMacroTest.java
index 414f32d99bd..4ba3cc3ef4c 100644
--- a/server/src/test/java/io/druid/query/expression/ExprMacroTest.java
+++ b/server/src/test/java/io/druid/query/expression/ExprMacroTest.java
@@ -20,6 +20,7 @@
 package io.druid.query.expression;
 
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.DateTimes;
 import io.druid.math.expr.Expr;
 import io.druid.math.expr.Parser;
@@ -84,8 +85,8 @@ public void testRegexpExtract()
   public void testTimestampCeil()
   {
     assertExpr("timestamp_ceil(t, 'P1M')", DateTimes.of("2000-03-01").getMillis());
-    assertExpr("timestamp_ceil(t, 'P1D','','America/Los_Angeles')", DateTimes.of("2000-02-03T08").getMillis());
-    assertExpr("timestamp_ceil(t, 'P1D','',CityOfAngels)", DateTimes.of("2000-02-03T08").getMillis());
+    assertExpr("timestamp_ceil(t, 'P1D',null,'America/Los_Angeles')", DateTimes.of("2000-02-03T08").getMillis());
+    assertExpr("timestamp_ceil(t, 'P1D',null,CityOfAngels)", DateTimes.of("2000-02-03T08").getMillis());
     assertExpr("timestamp_ceil(t, 'P1D','1970-01-01T01','Etc/UTC')", DateTimes.of("2000-02-04T01").getMillis());
   }
 
@@ -93,8 +94,8 @@ public void testTimestampCeil()
   public void testTimestampFloor()
   {
     assertExpr("timestamp_floor(t, 'P1M')", DateTimes.of("2000-02-01").getMillis());
-    assertExpr("timestamp_floor(t, 'P1D','','America/Los_Angeles')", DateTimes.of("2000-02-02T08").getMillis());
-    assertExpr("timestamp_floor(t, 'P1D','',CityOfAngels)", DateTimes.of("2000-02-02T08").getMillis());
+    assertExpr("timestamp_floor(t, 'P1D',null,'America/Los_Angeles')", DateTimes.of("2000-02-02T08").getMillis());
+    assertExpr("timestamp_floor(t, 'P1D',null,CityOfAngels)", DateTimes.of("2000-02-02T08").getMillis());
     assertExpr("timestamp_floor(t, 'P1D','1970-01-01T01','Etc/UTC')", DateTimes.of("2000-02-03T01").getMillis());
   }
 
@@ -122,12 +123,12 @@ public void testTimestampParse()
     assertExpr("timestamp_parse(tstr)", DateTimes.of("2000-02-03T04:05:06").getMillis());
     assertExpr("timestamp_parse(tstr_sql)", DateTimes.of("2000-02-03T04:05:06").getMillis());
     assertExpr(
-        "timestamp_parse(tstr_sql,'','America/Los_Angeles')",
+        "timestamp_parse(tstr_sql,null,'America/Los_Angeles')",
         DateTimes.of("2000-02-03T04:05:06-08:00").getMillis()
     );
     assertExpr("timestamp_parse('2000-02-03')", DateTimes.of("2000-02-03").getMillis());
     assertExpr("timestamp_parse('2000-02')", DateTimes.of("2000-02-01").getMillis());
-    assertExpr("timestamp_parse('')", null);
+    assertExpr("timestamp_parse(null)", null);
     assertExpr("timestamp_parse('z2000')", null);
     assertExpr("timestamp_parse(tstr_sql,'yyyy-MM-dd HH:mm:ss')", DateTimes.of("2000-02-03T04:05:06").getMillis());
     assertExpr("timestamp_parse('02/03/2000','MM/dd/yyyy')", DateTimes.of("2000-02-03").getMillis());
@@ -148,36 +149,39 @@ public void testTimestampFormat()
   @Test
   public void testTrim()
   {
-    assertExpr("trim('')", null);
+    String emptyString = NullHandling.replaceWithDefault() ? null : "";
+    assertExpr("trim('')", emptyString);
     assertExpr("trim(concat(' ',x,' '))", "foo");
     assertExpr("trim(spacey)", "hey there");
     assertExpr("trim(spacey, '')", "  hey there  ");
     assertExpr("trim(spacey, 'he ')", "y ther");
-    assertExpr("trim(spacey, spacey)", null);
+    assertExpr("trim(spacey, spacey)", emptyString);
     assertExpr("trim(spacey, substring(spacey, 0, 4))", "y ther");
   }
 
   @Test
   public void testLTrim()
   {
-    assertExpr("ltrim('')", null);
+    String emptyString = NullHandling.replaceWithDefault() ? null : "";
+    assertExpr("ltrim('')", emptyString);
     assertExpr("ltrim(concat(' ',x,' '))", "foo ");
     assertExpr("ltrim(spacey)", "hey there  ");
     assertExpr("ltrim(spacey, '')", "  hey there  ");
     assertExpr("ltrim(spacey, 'he ')", "y there  ");
-    assertExpr("ltrim(spacey, spacey)", null);
+    assertExpr("ltrim(spacey, spacey)", emptyString);
     assertExpr("ltrim(spacey, substring(spacey, 0, 4))", "y there  ");
   }
 
   @Test
   public void testRTrim()
   {
-    assertExpr("rtrim('')", null);
+    String emptyString = NullHandling.replaceWithDefault() ? null : "";
+    assertExpr("rtrim('')", emptyString);
     assertExpr("rtrim(concat(' ',x,' '))", " foo");
     assertExpr("rtrim(spacey)", "  hey there");
     assertExpr("rtrim(spacey, '')", "  hey there  ");
     assertExpr("rtrim(spacey, 'he ')", "  hey ther");
-    assertExpr("rtrim(spacey, spacey)", null);
+    assertExpr("rtrim(spacey, spacey)", emptyString);
     assertExpr("rtrim(spacey, substring(spacey, 0, 4))", "  hey ther");
   }
 
diff --git a/server/src/test/java/io/druid/query/lookup/RegisteredLookupExtractionFnTest.java b/server/src/test/java/io/druid/query/lookup/RegisteredLookupExtractionFnTest.java
index 8b5288f01f6..7b899207ae2 100644
--- a/server/src/test/java/io/druid/query/lookup/RegisteredLookupExtractionFnTest.java
+++ b/server/src/test/java/io/druid/query/lookup/RegisteredLookupExtractionFnTest.java
@@ -66,7 +66,7 @@ public void testSimpleDelegation()
     Assert.assertEquals(false, fn.isInjective());
     Assert.assertEquals(ExtractionFn.ExtractionType.MANY_TO_ONE, fn.getExtractionType());
 
-    for (String orig : Arrays.asList("", "foo", "bat")) {
+    for (String orig : Arrays.asList(null, "foo", "bat")) {
       Assert.assertEquals(LOOKUP_EXTRACTOR.apply(orig), fn.apply(orig));
     }
     Assert.assertEquals("not in the map", fn.apply("not in the map"));
diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java
index a3046957bcb..88e2f940766 100644
--- a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java
+++ b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java
@@ -25,6 +25,7 @@
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.Committer;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.MapBasedInputRow;
@@ -81,21 +82,24 @@ public void testSimpleIngestion() throws Exception
 
       // add
       commitMetadata.put("x", "1");
-      Assert.assertEquals(1,
-                          appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 1), committerSupplier)
-                                      .getNumRowsInSegment()
+      Assert.assertEquals(
+          1,
+          appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 1), committerSupplier)
+                      .getNumRowsInSegment()
       );
 
       commitMetadata.put("x", "2");
-      Assert.assertEquals(2,
-                          appenderator.add(IDENTIFIERS.get(0), IR("2000", "bar", 2), committerSupplier)
-                                      .getNumRowsInSegment()
+      Assert.assertEquals(
+          2,
+          appenderator.add(IDENTIFIERS.get(0), IR("2000", "bar", 2), committerSupplier)
+                      .getNumRowsInSegment()
       );
 
       commitMetadata.put("x", "3");
-      Assert.assertEquals(1,
-                          appenderator.add(IDENTIFIERS.get(1), IR("2000", "qux", 4), committerSupplier)
-                                      .getNumRowsInSegment()
+      Assert.assertEquals(
+          1,
+          appenderator.add(IDENTIFIERS.get(1), IR("2000", "qux", 4), committerSupplier)
+                      .getNumRowsInSegment()
       );
 
       // getSegments
@@ -171,10 +175,17 @@ public void run()
 
       appenderator.startJob();
       appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 1), committerSupplier);
-      //expectedSizeInBytes = 44(map overhead) + 28 (TimeAndDims overhead) + 56 (aggregator metrics) + 10 (dimsKeySize) = 138
-      Assert.assertEquals(138, ((AppenderatorImpl) appenderator).getBytesInMemory(IDENTIFIERS.get(0)));
+      //expectedSizeInBytes = 44(map overhead) + 28 (TimeAndDims overhead) + 56 (aggregator metrics) + 10 (dimsKeySize) = 138 + 1 byte when null handling is enabled
+      int nullHandlingOverhead = NullHandling.sqlCompatible() ? 1 : 0;
+      Assert.assertEquals(
+          138 + nullHandlingOverhead,
+          ((AppenderatorImpl) appenderator).getBytesInMemory(IDENTIFIERS.get(0))
+      );
       appenderator.add(IDENTIFIERS.get(1), IR("2000", "bar", 1), committerSupplier);
-      Assert.assertEquals(138, ((AppenderatorImpl) appenderator).getBytesInMemory(IDENTIFIERS.get(1)));
+      Assert.assertEquals(
+          138 + nullHandlingOverhead,
+          ((AppenderatorImpl) appenderator).getBytesInMemory(IDENTIFIERS.get(1))
+      );
       appenderator.close();
       Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory());
     }
@@ -208,9 +219,13 @@ public void run()
       appenderator.startJob();
       appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 1), committerSupplier);
       //expectedSizeInBytes = 44(map overhead) + 28 (TimeAndDims overhead) + 56 (aggregator metrics) + 10 (dimsKeySize) = 138
-      Assert.assertEquals(138, ((AppenderatorImpl) appenderator).getBytesCurrentlyInMemory());
+      int nullHandlingOverhead = NullHandling.sqlCompatible() ? 1 : 0;
+      Assert.assertEquals(138 + nullHandlingOverhead, ((AppenderatorImpl) appenderator).getBytesCurrentlyInMemory());
       appenderator.add(IDENTIFIERS.get(1), IR("2000", "bar", 1), committerSupplier);
-      Assert.assertEquals(276, ((AppenderatorImpl) appenderator).getBytesCurrentlyInMemory());
+      Assert.assertEquals(
+          276 + 2 * nullHandlingOverhead,
+          ((AppenderatorImpl) appenderator).getBytesCurrentlyInMemory()
+      );
       appenderator.close();
       Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory());
     }
@@ -246,10 +261,17 @@ public void run()
       Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory());
       appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 1), committerSupplier);
       //we still calculate the size even when ignoring it to make persist decision
-      Assert.assertEquals(138, ((AppenderatorImpl) appenderator).getBytesInMemory(IDENTIFIERS.get(0)));
+      int nullHandlingOverhead = NullHandling.sqlCompatible() ? 1 : 0;
+      Assert.assertEquals(
+          138 + nullHandlingOverhead,
+          ((AppenderatorImpl) appenderator).getBytesInMemory(IDENTIFIERS.get(0))
+      );
       Assert.assertEquals(1, ((AppenderatorImpl) appenderator).getRowsInMemory());
       appenderator.add(IDENTIFIERS.get(1), IR("2000", "bar", 1), committerSupplier);
-      Assert.assertEquals(276, ((AppenderatorImpl) appenderator).getBytesCurrentlyInMemory());
+      Assert.assertEquals(
+          276 + 2 * nullHandlingOverhead,
+          ((AppenderatorImpl) appenderator).getBytesCurrentlyInMemory()
+      );
       Assert.assertEquals(2, ((AppenderatorImpl) appenderator).getRowsInMemory());
       appenderator.close();
       Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory());
diff --git a/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java
index 195772a9b8e..8d744e6814f 100644
--- a/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java
+++ b/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java
@@ -68,9 +68,6 @@
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
-//CHECKSTYLE.OFF: Regexp
-//CHECKSTYLE.ON: Regexp
-
 /**
  */
 public class LoadRuleTest
diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/DruidExpression.java b/sql/src/main/java/io/druid/sql/calcite/expression/DruidExpression.java
index 3b4722f5ebb..898d5300f6d 100644
--- a/sql/src/main/java/io/druid/sql/calcite/expression/DruidExpression.java
+++ b/sql/src/main/java/io/druid/sql/calcite/expression/DruidExpression.java
@@ -90,7 +90,7 @@ public static String stringLiteral(final String s)
 
   public static String nullLiteral()
   {
-    return "''";
+    return "null";
   }
 
   public static String functionCall(final String functionName, final List<DruidExpression> args)
diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/Expressions.java b/sql/src/main/java/io/druid/sql/calcite/expression/Expressions.java
index a7dcfd7bc75..63dec1979de 100644
--- a/sql/src/main/java/io/druid/sql/calcite/expression/Expressions.java
+++ b/sql/src/main/java/io/druid/sql/calcite/expression/Expressions.java
@@ -33,14 +33,15 @@
 import io.druid.query.extraction.ExtractionFn;
 import io.druid.query.extraction.TimeFormatExtractionFn;
 import io.druid.query.filter.AndDimFilter;
-import io.druid.query.filter.BoundDimFilter;
 import io.druid.query.filter.DimFilter;
 import io.druid.query.filter.ExpressionDimFilter;
 import io.druid.query.filter.LikeDimFilter;
 import io.druid.query.filter.NotDimFilter;
 import io.druid.query.filter.OrDimFilter;
+import io.druid.query.filter.SelectorDimFilter;
 import io.druid.query.ordering.StringComparator;
 import io.druid.query.ordering.StringComparators;
+import io.druid.common.config.NullHandling;
 import io.druid.segment.column.Column;
 import io.druid.segment.column.ValueType;
 import io.druid.sql.calcite.filtration.BoundRefKey;
@@ -311,13 +312,10 @@ private static DimFilter toSimpleLeafFilter(
         return null;
       }
 
-      final BoundDimFilter equalFilter = Bounds.equalTo(
-          new BoundRefKey(
-              druidExpression.getSimpleExtraction().getColumn(),
-              druidExpression.getSimpleExtraction().getExtractionFn(),
-              StringComparators.LEXICOGRAPHIC
-          ),
-          ""
+      final DimFilter equalFilter = new SelectorDimFilter(
+          druidExpression.getSimpleExtraction().getColumn(),
+          NullHandling.defaultStringValue(),
+          druidExpression.getSimpleExtraction().getExtractionFn()
       );
 
       return kind == SqlKind.IS_NOT_NULL ? new NotDimFilter(equalFilter) : equalFilter;
diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/UnaryFunctionOperatorConversion.java b/sql/src/main/java/io/druid/sql/calcite/expression/UnaryFunctionOperatorConversion.java
new file mode 100644
index 00000000000..d946b534577
--- /dev/null
+++ b/sql/src/main/java/io/druid/sql/calcite/expression/UnaryFunctionOperatorConversion.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package io.druid.sql.calcite.expression;
+
+import com.google.common.collect.Iterables;
+import io.druid.java.util.common.StringUtils;
+import io.druid.sql.calcite.planner.PlannerContext;
+import io.druid.sql.calcite.table.RowSignature;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.sql.SqlOperator;
+
+public class UnaryFunctionOperatorConversion implements SqlOperatorConversion
+{
+  private final SqlOperator operator;
+  private final String druidOperator;
+
+  public UnaryFunctionOperatorConversion(final SqlOperator operator, final String druidOperator)
+  {
+    this.operator = operator;
+    this.druidOperator = druidOperator;
+  }
+
+  @Override
+  public SqlOperator calciteOperator()
+  {
+    return operator;
+  }
+
+  @Override
+  public DruidExpression toDruidExpression(
+      final PlannerContext plannerContext,
+      final RowSignature rowSignature,
+      final RexNode rexNode
+  )
+  {
+    return OperatorConversions.convertCall(
+        plannerContext,
+        rowSignature,
+        rexNode,
+        operands -> DruidExpression.fromExpression(
+            StringUtils.format(
+                "%s(%s)",
+                druidOperator,
+                Iterables.getOnlyElement(operands).getExpression()
+            )
+        )
+    );
+  }
+}
diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/builtin/CeilOperatorConversion.java b/sql/src/main/java/io/druid/sql/calcite/expression/builtin/CeilOperatorConversion.java
index 6ba5b2b8803..d8c556a41af 100644
--- a/sql/src/main/java/io/druid/sql/calcite/expression/builtin/CeilOperatorConversion.java
+++ b/sql/src/main/java/io/druid/sql/calcite/expression/builtin/CeilOperatorConversion.java
@@ -19,7 +19,6 @@
 
 package io.druid.sql.calcite.expression.builtin;
 
-import com.google.common.collect.ImmutableList;
 import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.common.granularity.PeriodGranularity;
 import io.druid.sql.calcite.expression.DruidExpression;
@@ -35,6 +34,7 @@
 import org.apache.calcite.sql.SqlOperator;
 import org.apache.calcite.sql.fun.SqlStdOperatorTable;
 
+import java.util.Arrays;
 import java.util.stream.Collectors;
 
 public class CeilOperatorConversion implements SqlOperatorConversion
@@ -80,7 +80,7 @@ public DruidExpression toDruidExpression(
       // So there is no simple extraction for this operator.
       return DruidExpression.fromFunctionCall(
           "timestamp_ceil",
-          ImmutableList.of(
+          Arrays.asList(
               druidExpression.getExpression(),
               DruidExpression.stringLiteral(granularity.getPeriod().toString()),
               DruidExpression.numberLiteral(
diff --git a/sql/src/main/java/io/druid/sql/calcite/planner/Calcites.java b/sql/src/main/java/io/druid/sql/calcite/planner/Calcites.java
index b365034decc..e59c554027a 100644
--- a/sql/src/main/java/io/druid/sql/calcite/planner/Calcites.java
+++ b/sql/src/main/java/io/druid/sql/calcite/planner/Calcites.java
@@ -19,6 +19,7 @@
 
 package io.druid.sql.calcite.planner;
 
+import com.google.common.base.Preconditions;
 import com.google.common.io.BaseEncoding;
 import com.google.common.primitives.Chars;
 import io.druid.java.util.common.DateTimes;
@@ -107,26 +108,24 @@ public static SchemaPlus createRootSchema(final Schema druidSchema, final Author
 
   public static String escapeStringLiteral(final String s)
   {
-    if (s == null) {
-      return "''";
-    } else {
-      boolean isPlainAscii = true;
-      final StringBuilder builder = new StringBuilder("'");
-      for (int i = 0; i < s.length(); i++) {
-        final char c = s.charAt(i);
-        if (Character.isLetterOrDigit(c) || c == ' ') {
-          builder.append(c);
-          if (c > 127) {
-            isPlainAscii = false;
-          }
-        } else {
-          builder.append("\\").append(BaseEncoding.base16().encode(Chars.toByteArray(c)));
+    Preconditions.checkNotNull(s);
+    boolean isPlainAscii = true;
+    final StringBuilder builder = new StringBuilder("'");
+    for (int i = 0; i < s.length(); i++) {
+      final char c = s.charAt(i);
+      if (Character.isLetterOrDigit(c) || c == ' ') {
+        builder.append(c);
+        if (c > 127) {
           isPlainAscii = false;
         }
+      } else {
+        builder.append("\\").append(BaseEncoding.base16().encode(Chars.toByteArray(c)));
+        isPlainAscii = false;
       }
-      builder.append("'");
-      return isPlainAscii ? builder.toString() : "U&" + builder.toString();
     }
+    builder.append("'");
+    return isPlainAscii ? builder.toString() : "U&" + builder.toString();
+
   }
 
   public static ValueType getValueTypeForSqlTypeName(SqlTypeName sqlTypeName)
diff --git a/sql/src/main/java/io/druid/sql/calcite/planner/DruidOperatorTable.java b/sql/src/main/java/io/druid/sql/calcite/planner/DruidOperatorTable.java
index 9ebc87bf659..3ecd62e7d88 100644
--- a/sql/src/main/java/io/druid/sql/calcite/planner/DruidOperatorTable.java
+++ b/sql/src/main/java/io/druid/sql/calcite/planner/DruidOperatorTable.java
@@ -37,6 +37,7 @@
 import io.druid.sql.calcite.expression.BinaryOperatorConversion;
 import io.druid.sql.calcite.expression.DirectOperatorConversion;
 import io.druid.sql.calcite.expression.SqlOperatorConversion;
+import io.druid.sql.calcite.expression.UnaryFunctionOperatorConversion;
 import io.druid.sql.calcite.expression.UnaryPrefixOperatorConversion;
 import io.druid.sql.calcite.expression.UnarySuffixOperatorConversion;
 import io.druid.sql.calcite.expression.builtin.BTrimOperatorConversion;
@@ -119,8 +120,8 @@
           .add(new DirectOperatorConversion(SqlStdOperatorTable.UPPER, "upper"))
           .add(new UnaryPrefixOperatorConversion(SqlStdOperatorTable.NOT, "!"))
           .add(new UnaryPrefixOperatorConversion(SqlStdOperatorTable.UNARY_MINUS, "-"))
-          .add(new UnarySuffixOperatorConversion(SqlStdOperatorTable.IS_NULL, "== ''"))
-          .add(new UnarySuffixOperatorConversion(SqlStdOperatorTable.IS_NOT_NULL, "!= ''"))
+          .add(new UnaryFunctionOperatorConversion(SqlStdOperatorTable.IS_NULL, "isnull"))
+          .add(new UnaryFunctionOperatorConversion(SqlStdOperatorTable.IS_NOT_NULL, "notnull"))
           .add(new UnarySuffixOperatorConversion(SqlStdOperatorTable.IS_FALSE, "<= 0")) // Matches Evals.asBoolean
           .add(new UnarySuffixOperatorConversion(SqlStdOperatorTable.IS_NOT_TRUE, "<= 0")) // Matches Evals.asBoolean
           .add(new UnarySuffixOperatorConversion(SqlStdOperatorTable.IS_TRUE, "> 0")) // Matches Evals.asBoolean
diff --git a/sql/src/main/java/io/druid/sql/calcite/planner/DruidRexExecutor.java b/sql/src/main/java/io/druid/sql/calcite/planner/DruidRexExecutor.java
index 16692e765e5..dc1ed23d4ae 100644
--- a/sql/src/main/java/io/druid/sql/calcite/planner/DruidRexExecutor.java
+++ b/sql/src/main/java/io/druid/sql/calcite/planner/DruidRexExecutor.java
@@ -84,7 +84,10 @@ public void reduce(
         if (sqlTypeName == SqlTypeName.BOOLEAN) {
           literal = rexBuilder.makeLiteral(exprResult.asBoolean(), constExp.getType(), true);
         } else if (sqlTypeName == SqlTypeName.DATE) {
-          if (!constExp.getType().isNullable() && exprResult.isNull()) {
+          // It is possible for an expression to have a non-null String value but it can return null when parsed
+          // as a primitive long/float/double.
+          // ExprEval.isNumericNull checks whether the parsed primitive value is null or not.
+          if (!constExp.getType().isNullable() && exprResult.isNumericNull()) {
             throw new IAE("Illegal DATE constant: %s", constExp);
           }
 
@@ -95,7 +98,10 @@ public void reduce(
               )
           );
         } else if (sqlTypeName == SqlTypeName.TIMESTAMP) {
-          if (!constExp.getType().isNullable() && exprResult.isNull()) {
+          // It is possible for an expression to have a non-null String value but it can return null when parsed
+          // as a primitive long/float/double.
+          // ExprEval.isNumericNull checks whether the parsed primitive value is null or not.
+          if (!constExp.getType().isNullable() && exprResult.isNumericNull()) {
             throw new IAE("Illegal TIMESTAMP constant: %s", constExp);
           }
 
diff --git a/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java b/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java
index f1b668d7689..65fe55f9545 100644
--- a/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java
+++ b/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java
@@ -28,6 +28,7 @@
 import io.druid.java.util.common.guava.Sequence;
 import io.druid.java.util.common.guava.Sequences;
 import io.druid.query.ResourceLimitExceededException;
+import io.druid.segment.DimensionHandlerUtils;
 import io.druid.sql.calcite.planner.PlannerContext;
 import org.apache.calcite.interpreter.BindableConvention;
 import org.apache.calcite.plan.RelOptCluster;
@@ -294,7 +295,11 @@ public RelOptCost computeSelfCost(final RelOptPlanner planner, final RelMetadata
 
             for (int i : rightKeys) {
               final Object value = row[i];
-              final String stringValue = value != null ? String.valueOf(value) : "";
+              if (value == null) {
+                // NULLs are not supposed to match NULLs in a join. So ignore them.
+                continue;
+              }
+              final String stringValue = DimensionHandlerUtils.convertObjectToString(value);
               values.add(stringValue);
               if (values.size() > maxSemiJoinRowsInMemory) {
                 throw new ResourceLimitExceededException(
@@ -308,16 +313,18 @@ public RelOptCost computeSelfCost(final RelOptPlanner planner, final RelMetadata
 
               for (int i = 0; i < values.size(); i++) {
                 final String value = values.get(i);
-                subConditions.add(
-                    getCluster().getRexBuilder().makeCall(
-                        SqlStdOperatorTable.EQUALS,
-                        leftExpressions.get(i),
-                        getCluster().getRexBuilder().makeLiteral(value)
-                    )
-                );
+                // NULLs are not supposed to match NULLs in a join. So ignore them.
+                if (value != null) {
+                  subConditions.add(
+                      getCluster().getRexBuilder().makeCall(
+                          SqlStdOperatorTable.EQUALS,
+                          leftExpressions.get(i),
+                          getCluster().getRexBuilder().makeLiteral(value)
+                      )
+                  );
+                }
+                theConditions.add(makeAnd(subConditions));
               }
-
-              theConditions.add(makeAnd(subConditions));
             }
             return theConditions;
           }
diff --git a/sql/src/main/java/io/druid/sql/calcite/rel/QueryMaker.java b/sql/src/main/java/io/druid/sql/calcite/rel/QueryMaker.java
index b8315f31876..205d2be7f7d 100644
--- a/sql/src/main/java/io/druid/sql/calcite/rel/QueryMaker.java
+++ b/sql/src/main/java/io/druid/sql/calcite/rel/QueryMaker.java
@@ -22,7 +22,6 @@
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.base.Function;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Maps;
 import com.google.common.primitives.Ints;
@@ -46,6 +45,7 @@
 import io.druid.query.topn.TopNQuery;
 import io.druid.query.topn.TopNResultValue;
 import io.druid.segment.DimensionHandlerUtils;
+import io.druid.common.config.NullHandling;
 import io.druid.segment.column.Column;
 import io.druid.server.QueryLifecycleFactory;
 import io.druid.server.security.AuthenticationResult;
@@ -400,7 +400,7 @@ private Object coerce(final Object value, final SqlTypeName sqlType)
 
     if (SqlTypeName.CHAR_TYPES.contains(sqlType)) {
       if (value == null || value instanceof String) {
-        coercedValue = Strings.nullToEmpty((String) value);
+        coercedValue = NullHandling.nullToEmptyIfNeeded((String) value);
       } else if (value instanceof NlsString) {
         coercedValue = ((NlsString) value).getValue();
       } else if (value instanceof Number) {
diff --git a/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java b/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java
index 10b3e80cab2..50bd53e4ffa 100644
--- a/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java
+++ b/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java
@@ -21,6 +21,7 @@
 
 import com.google.common.base.Function;
 import com.google.common.collect.Lists;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.DateTimes;
 import io.druid.math.expr.ExprMacroTable;
 import io.druid.server.security.AllowAllAuthenticator;
@@ -139,11 +140,17 @@ public void testSelectAllInFirstFrame()
             true,
             Lists.newArrayList(
                 new Object[]{DateTimes.of("2000-01-01").getMillis(), 1L, "", "a", 1.0f},
-                new Object[]{DateTimes.of("2000-01-02").getMillis(), 1L, "10.1", "", 2.0f},
+                new Object[]{
+                    DateTimes.of("2000-01-02").getMillis(),
+                    1L,
+                    "10.1",
+                    NullHandling.defaultStringValue(),
+                    2.0f
+                },
                 new Object[]{DateTimes.of("2000-01-03").getMillis(), 1L, "2", "", 3.0f},
                 new Object[]{DateTimes.of("2001-01-01").getMillis(), 1L, "1", "a", 4.0f},
                 new Object[]{DateTimes.of("2001-01-02").getMillis(), 1L, "def", "abc", 5.0f},
-                new Object[]{DateTimes.of("2001-01-03").getMillis(), 1L, "abc", "", 6.0f}
+                new Object[]{DateTimes.of("2001-01-03").getMillis(), 1L, "abc", NullHandling.defaultStringValue(), 6.0f}
             )
         ),
         frame
@@ -166,7 +173,13 @@ public void testSelectSplitOverTwoFrames()
             false,
             Lists.newArrayList(
                 new Object[]{DateTimes.of("2000-01-01").getMillis(), 1L, "", "a", 1.0f},
-                new Object[]{DateTimes.of("2000-01-02").getMillis(), 1L, "10.1", "", 2.0f}
+                new Object[]{
+                    DateTimes.of("2000-01-02").getMillis(),
+                    1L,
+                    "10.1",
+                    NullHandling.defaultStringValue(),
+                    2.0f
+                }
             )
         ),
         frame
@@ -183,7 +196,7 @@ public void testSelectSplitOverTwoFrames()
                 new Object[]{DateTimes.of("2000-01-03").getMillis(), 1L, "2", "", 3.0f},
                 new Object[]{DateTimes.of("2001-01-01").getMillis(), 1L, "1", "a", 4.0f},
                 new Object[]{DateTimes.of("2001-01-02").getMillis(), 1L, "def", "abc", 5.0f},
-                new Object[]{DateTimes.of("2001-01-03").getMillis(), 1L, "abc", "", 6.0f}
+                new Object[]{DateTimes.of("2001-01-03").getMillis(), 1L, "abc", NullHandling.defaultStringValue(), 6.0f}
             )
         ),
         frame
diff --git a/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java
index 0fd471edaef..c37a8252dd2 100644
--- a/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java
+++ b/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java
@@ -23,6 +23,7 @@
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Maps;
+import io.druid.common.config.NullHandling;
 import io.druid.hll.HLLCV1;
 import io.druid.java.util.common.DateTimes;
 import io.druid.java.util.common.Intervals;
@@ -87,6 +88,7 @@
 import io.druid.segment.virtual.ExpressionVirtualColumn;
 import io.druid.server.security.AuthenticationResult;
 import io.druid.server.security.ForbiddenException;
+import io.druid.sql.calcite.expression.DruidExpression;
 import io.druid.sql.calcite.filtration.Filtration;
 import io.druid.sql.calcite.planner.Calcites;
 import io.druid.sql.calcite.planner.DruidOperatorTable;
@@ -126,6 +128,7 @@
 
 public class CalciteQueryTest extends CalciteTestBase
 {
+
   private static final Logger log = new Logger(CalciteQueryTest.class);
 
   private static final PlannerConfig PLANNER_CONFIG_DEFAULT = new PlannerConfig();
@@ -310,7 +313,7 @@ public void testSelectCountStart() throws Exception
                                .context(QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS)
                                .build()),
         ImmutableList.of(
-            new Object[]{11.0, 0.0}
+            new Object[]{11.0, NullHandling.defaultDoubleValue()}
         )
     );
 
@@ -333,7 +336,7 @@ public void testSelectCountStart() throws Exception
                                .context(QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS)
                                .build()),
         ImmutableList.of(
-            new Object[]{11.0, 0.0}
+            new Object[]{11.0, NullHandling.defaultDoubleValue()}
         )
     );
 
@@ -552,6 +555,7 @@ public void testExplainInformationSchemaColumns() throws Exception
   @Test
   public void testSelectStar() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
     testQuery(
         "SELECT * FROM druid.foo",
         ImmutableList.of(
@@ -565,11 +569,11 @@ public void testSelectStar() throws Exception
         ),
         ImmutableList.of(
             new Object[]{T("2000-01-01"), 1L, "", "a", 1f, 1.0, HLLCV1.class.getName()},
-            new Object[]{T("2000-01-02"), 1L, "10.1", "", 2f, 2.0, HLLCV1.class.getName()},
+            new Object[]{T("2000-01-02"), 1L, "10.1", nullValue, 2f, 2.0, HLLCV1.class.getName()},
             new Object[]{T("2000-01-03"), 1L, "2", "", 3f, 3.0, HLLCV1.class.getName()},
             new Object[]{T("2001-01-01"), 1L, "1", "a", 4f, 4.0, HLLCV1.class.getName()},
             new Object[]{T("2001-01-02"), 1L, "def", "abc", 5f, 5.0, HLLCV1.class.getName()},
-            new Object[]{T("2001-01-03"), 1L, "abc", "", 6f, 6.0, HLLCV1.class.getName()}
+            new Object[]{T("2001-01-03"), 1L, "abc", nullValue, 6f, 6.0, HLLCV1.class.getName()}
         )
     );
   }
@@ -596,7 +600,15 @@ public void testSelectStarOnForbiddenTable() throws Exception
                 .build()
         ),
         ImmutableList.of(
-            new Object[]{T("2000-01-01"), 1L, "forbidden", "abcd", 9999.0f, 0.0, HLLCV1.class.getName()}
+            new Object[]{
+                T("2000-01-01"),
+                1L,
+                "forbidden",
+                "abcd",
+                9999.0f,
+                NullHandling.defaultDoubleValue(),
+                HLLCV1.class.getName()
+            }
         )
     );
   }
@@ -638,6 +650,8 @@ public void testExplainSelectStar() throws Exception
   @Test
   public void testSelectStarWithLimit() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
+
     testQuery(
         "SELECT * FROM druid.foo LIMIT 2",
         ImmutableList.of(
@@ -652,7 +666,7 @@ public void testSelectStarWithLimit() throws Exception
         ),
         ImmutableList.of(
             new Object[]{T("2000-01-01"), 1L, "", "a", 1.0f, 1.0, HLLCV1.class.getName()},
-            new Object[]{T("2000-01-02"), 1L, "10.1", "", 2.0f, 2.0, HLLCV1.class.getName()}
+            new Object[]{T("2000-01-02"), 1L, "10.1", nullValue, 2.0f, 2.0, HLLCV1.class.getName()}
         )
     );
   }
@@ -660,6 +674,7 @@ public void testSelectStarWithLimit() throws Exception
   @Test
   public void testSelectWithProjection() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
     testQuery(
         "SELECT SUBSTRING(dim2, 1, 1) FROM druid.foo LIMIT 2",
         ImmutableList.of(
@@ -677,7 +692,7 @@ public void testSelectWithProjection() throws Exception
         ),
         ImmutableList.of(
             new Object[]{"a"},
-            new Object[]{""}
+            new Object[]{nullValue}
         )
     );
   }
@@ -685,6 +700,8 @@ public void testSelectWithProjection() throws Exception
   @Test
   public void testSelectStarWithLimitTimeDescending() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
+
     testQuery(
         "SELECT * FROM druid.foo ORDER BY __time DESC LIMIT 2",
         ImmutableList.of(
@@ -700,7 +717,7 @@ public void testSelectStarWithLimitTimeDescending() throws Exception
                   .build()
         ),
         ImmutableList.of(
-            new Object[]{T("2001-01-03"), 1L, "abc", "", 6f, 6d, HLLCV1.class.getName()},
+            new Object[]{T("2001-01-03"), 1L, "abc", nullValue, 6f, 6d, HLLCV1.class.getName()},
             new Object[]{T("2001-01-02"), 1L, "def", "abc", 5f, 5d, HLLCV1.class.getName()}
         )
     );
@@ -709,6 +726,7 @@ public void testSelectStarWithLimitTimeDescending() throws Exception
   @Test
   public void testSelectStarWithoutLimitTimeAscending() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
     testQuery(
         "SELECT * FROM druid.foo ORDER BY __time",
         ImmutableList.of(
@@ -741,11 +759,11 @@ public void testSelectStarWithoutLimitTimeAscending() throws Exception
         ),
         ImmutableList.of(
             new Object[]{T("2000-01-01"), 1L, "", "a", 1f, 1.0, HLLCV1.class.getName()},
-            new Object[]{T("2000-01-02"), 1L, "10.1", "", 2f, 2.0, HLLCV1.class.getName()},
+            new Object[]{T("2000-01-02"), 1L, "10.1", nullValue, 2f, 2.0, HLLCV1.class.getName()},
             new Object[]{T("2000-01-03"), 1L, "2", "", 3f, 3.0, HLLCV1.class.getName()},
             new Object[]{T("2001-01-01"), 1L, "1", "a", 4f, 4.0, HLLCV1.class.getName()},
             new Object[]{T("2001-01-02"), 1L, "def", "abc", 5f, 5.0, HLLCV1.class.getName()},
-            new Object[]{T("2001-01-03"), 1L, "abc", "", 6f, 6.0, HLLCV1.class.getName()}
+            new Object[]{T("2001-01-03"), 1L, "abc", nullValue, 6f, 6.0, HLLCV1.class.getName()}
         )
     );
   }
@@ -753,6 +771,7 @@ public void testSelectStarWithoutLimitTimeAscending() throws Exception
   @Test
   public void testSelectSingleColumnTwice() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
     testQuery(
         "SELECT dim2 x, dim2 y FROM druid.foo LIMIT 2",
         ImmutableList.of(
@@ -767,7 +786,7 @@ public void testSelectSingleColumnTwice() throws Exception
         ),
         ImmutableList.of(
             new Object[]{"a", "a"},
-            new Object[]{"", ""}
+            new Object[]{nullValue, nullValue}
         )
     );
   }
@@ -873,9 +892,12 @@ public void testSelfJoinWithFallback() throws Exception
   @Test
   public void testExplainSelfJoinWithFallback() throws Exception
   {
+    String emptyStringEq = NullHandling.replaceWithDefault() ? null : "\"\"";
     final String explanation =
         "BindableJoin(condition=[=($0, $2)], joinType=[inner])\n"
-        + "  DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"compactedList\",\"batchSize\":20480,\"limit\":9223372036854775807,\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"selector\",\"dimension\":\"dim1\",\"value\":\"\",\"extractionFn\":null}},\"columns\":[\"dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"},\"descending\":false,\"granularity\":{\"type\":\"all\"}}], signature=[{dim1:STRING}])\n"
+        + "  DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"compactedList\",\"batchSize\":20480,\"limit\":9223372036854775807,\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"selector\",\"dimension\":\"dim1\",\"value\":"
+        + emptyStringEq
+        + ",\"extractionFn\":null}},\"columns\":[\"dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"},\"descending\":false,\"granularity\":{\"type\":\"all\"}}], signature=[{dim1:STRING}])\n"
         + "  DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"compactedList\",\"batchSize\":20480,\"limit\":9223372036854775807,\"filter\":null,\"columns\":[\"dim1\",\"dim2\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"},\"descending\":false,\"granularity\":{\"type\":\"all\"}}], signature=[{dim1:STRING, dim2:STRING}])\n";
 
     testQuery(
@@ -1222,9 +1244,14 @@ public void testHavingOnApproximateCountDistinct() throws Exception
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{"", 3L},
             new Object[]{"a", 2L}
+        ) :
+        ImmutableList.of(
+            new Object[]{null, 2L},
+            new Object[]{"a", 2L}
         )
     );
   }
@@ -1274,9 +1301,14 @@ public void testHavingOnExactCountDistinct() throws Exception
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{"", 3L},
             new Object[]{"a", 2L}
+        ) :
+        ImmutableList.of(
+            new Object[]{null, 2L},
+            new Object[]{"a", 2L}
         )
     );
   }
@@ -1341,9 +1373,13 @@ public void testColumnComparison() throws Exception
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{"", 1.0f, 1L},
             new Object[]{"2", 3.0f, 1L}
+        ) :
+        ImmutableList.of(
+            new Object[]{"2", 3.0f, 1L}
         )
     );
   }
@@ -1392,6 +1428,7 @@ public void testHavingOnRatio() throws Exception
   @Test
   public void testGroupByWithSelectProjections() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
     testQuery(
         "SELECT\n"
         + "  dim1,"
@@ -1411,10 +1448,10 @@ public void testGroupByWithSelectProjections() throws Exception
                         .build()
         ),
         ImmutableList.of(
-            new Object[]{"", ""},
-            new Object[]{"1", ""},
+            new Object[]{"", nullValue},
+            new Object[]{"1", nullValue},
             new Object[]{"10.1", "0.1"},
-            new Object[]{"2", ""},
+            new Object[]{"2", nullValue},
             new Object[]{"abc", "bc"},
             new Object[]{"def", "ef"}
         )
@@ -1424,6 +1461,7 @@ public void testGroupByWithSelectProjections() throws Exception
   @Test
   public void testGroupByWithSelectAndOrderByProjections() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
     testQuery(
         "SELECT\n"
         + "  dim1,"
@@ -1463,9 +1501,9 @@ public void testGroupByWithSelectAndOrderByProjections() throws Exception
             new Object[]{"10.1", "0.1"},
             new Object[]{"abc", "bc"},
             new Object[]{"def", "ef"},
-            new Object[]{"1", ""},
-            new Object[]{"2", ""},
-            new Object[]{"", ""}
+            new Object[]{"1", nullValue},
+            new Object[]{"2", nullValue},
+            new Object[]{"", nullValue}
         )
     );
   }
@@ -1473,6 +1511,8 @@ public void testGroupByWithSelectAndOrderByProjections() throws Exception
   @Test
   public void testTopNWithSelectProjections() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
+
     testQuery(
         "SELECT\n"
         + "  dim1,"
@@ -1495,10 +1535,10 @@ public void testTopNWithSelectProjections() throws Exception
                 .build()
         ),
         ImmutableList.of(
-            new Object[]{"", ""},
-            new Object[]{"1", ""},
+            new Object[]{"", nullValue},
+            new Object[]{"1", nullValue},
             new Object[]{"10.1", "0.1"},
-            new Object[]{"2", ""},
+            new Object[]{"2", nullValue},
             new Object[]{"abc", "bc"},
             new Object[]{"def", "ef"}
         )
@@ -1508,6 +1548,8 @@ public void testTopNWithSelectProjections() throws Exception
   @Test
   public void testTopNWithSelectAndOrderByProjections() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
+
     testQuery(
         "SELECT\n"
         + "  dim1,"
@@ -1535,9 +1577,9 @@ public void testTopNWithSelectAndOrderByProjections() throws Exception
             new Object[]{"10.1", "0.1"},
             new Object[]{"abc", "bc"},
             new Object[]{"def", "ef"},
-            new Object[]{"1", ""},
-            new Object[]{"2", ""},
-            new Object[]{"", ""}
+            new Object[]{"1", nullValue},
+            new Object[]{"2", nullValue},
+            new Object[]{"", nullValue}
         )
     );
   }
@@ -1656,7 +1698,7 @@ public void testGroupByCaseWhen() throws Exception
                                 + "'match-cnt',"
                                 + "(timestamp_extract(\"__time\",'DAY','UTC') == 0),"
                                 + "'zero     ',"
-                                + "'')",
+                                + DruidExpression.nullLiteral() + ")",
                                 ValueType.STRING
                             )
                         )
@@ -1666,7 +1708,7 @@ public void testGroupByCaseWhen() throws Exception
                         .build()
         ),
         ImmutableList.of(
-            new Object[]{"", 2L},
+            new Object[]{NullHandling.defaultStringValue(), 2L},
             new Object[]{"match-cnt", 1L},
             new Object[]{"match-m1 ", 3L}
         )
@@ -1690,7 +1732,7 @@ public void testGroupByCaseWhenOfTripleAnd() throws Exception
                         .setVirtualColumns(
                             EXPRESSION_VIRTUAL_COLUMN(
                                 "d0:v",
-                                "case_searched(((\"m1\" > 1) && (\"m1\" < 5) && (\"cnt\" == 1)),'x','')",
+                                "case_searched(((\"m1\" > 1) && (\"m1\" < 5) && (\"cnt\" == 1)),'x',null)",
                                 ValueType.STRING
                             )
                         )
@@ -1700,7 +1742,7 @@ public void testGroupByCaseWhenOfTripleAnd() throws Exception
                         .build()
         ),
         ImmutableList.of(
-            new Object[]{"", 3L},
+            new Object[]{NullHandling.defaultStringValue(), 3L},
             new Object[]{"x", 3L}
         )
     );
@@ -1709,35 +1751,85 @@ public void testGroupByCaseWhenOfTripleAnd() throws Exception
   @Test
   public void testNullEmptyStringEquality() throws Exception
   {
-    // Doesn't conform to the SQL standard, but it's how we do it.
-    // This example is used in the sql.md doc.
+    testQuery(
+        "SELECT COUNT(*)\n"
+        + "FROM druid.foo\n"
+        + "WHERE NULLIF(dim2, 'a') IS NULL",
+        ImmutableList.of(
+            Druids.newTimeseriesQueryBuilder()
+                  .dataSource(CalciteTests.DATASOURCE1)
+                  .intervals(QSS(Filtration.eternity()))
+                  .granularity(Granularities.ALL)
+                  .filters(EXPRESSION_FILTER("case_searched((\"dim2\" == 'a'),1,isnull(\"dim2\"))"))
+                  .aggregators(AGGS(new CountAggregatorFactory("a0")))
+                  .context(TIMESERIES_CONTEXT_DEFAULT)
+                  .build()
+        ),
+        ImmutableList.of(
+            NullHandling.replaceWithDefault() ?
+            // Matches everything but "abc"
+            new Object[]{5L} :
+            // match only null values
+            new Object[]{4L}
+        )
+    );
+  }
+
+  @Test
+  public void testEmptyStringEquality() throws Exception
+  {
+    testQuery(
+        "SELECT COUNT(*)\n"
+        + "FROM druid.foo\n"
+        + "WHERE NULLIF(dim2, 'a') = ''",
+        ImmutableList.of(
+            Druids.newTimeseriesQueryBuilder()
+                  .dataSource(CalciteTests.DATASOURCE1)
+                  .intervals(QSS(Filtration.eternity()))
+                  .granularity(Granularities.ALL)
+                  .filters(EXPRESSION_FILTER("case_searched((\"dim2\" == 'a'),"
+                                             + (NullHandling.replaceWithDefault() ? "1" : "0")
+                                             + ",(\"dim2\" == ''))"))
+                  .aggregators(AGGS(new CountAggregatorFactory("a0")))
+                  .context(TIMESERIES_CONTEXT_DEFAULT)
+                  .build()
+        ),
+        ImmutableList.of(
+            NullHandling.replaceWithDefault() ?
+            // Matches everything but "abc"
+            new Object[]{5L} :
+            // match only empty string
+            new Object[]{1L}
+        )
+    );
+  }
+
+  @Test
+  public void testNullStringEquality() throws Exception
+  {
+    testQuery(
+        "SELECT COUNT(*)\n"
+        + "FROM druid.foo\n"
+        + "WHERE NULLIF(dim2, 'a') = null",
+        ImmutableList.of(
+            Druids.newTimeseriesQueryBuilder()
+                  .dataSource(CalciteTests.DATASOURCE1)
+                  .intervals(QSS(Filtration.eternity()))
+                  .granularity(Granularities.ALL)
+                  .filters(EXPRESSION_FILTER("case_searched((\"dim2\" == 'a'),"
+                                             + (NullHandling.replaceWithDefault() ? "1" : "0")
+                                             + ",(\"dim2\" == null))"))
+                  .aggregators(AGGS(new CountAggregatorFactory("a0")))
+                  .context(TIMESERIES_CONTEXT_DEFAULT)
+                  .build()
+        ),
+        NullHandling.replaceWithDefault() ?
+        // Matches everything but "abc"
+        ImmutableList.of(new Object[]{5L}) :
+        // null is not eqaual to null or any other value
+        ImmutableList.of()
+    );
 
-    final ImmutableList<String> wheres = ImmutableList.of(
-        "NULLIF(dim2, 'a') = ''",
-        "NULLIF(dim2, 'a') IS NULL"
-    );
-
-    for (String where : wheres) {
-      testQuery(
-          "SELECT COUNT(*)\n"
-          + "FROM druid.foo\n"
-          + "WHERE " + where,
-          ImmutableList.of(
-              Druids.newTimeseriesQueryBuilder()
-                    .dataSource(CalciteTests.DATASOURCE1)
-                    .intervals(QSS(Filtration.eternity()))
-                    .granularity(Granularities.ALL)
-                    .filters(EXPRESSION_FILTER("case_searched((\"dim2\" == 'a'),1,(\"dim2\" == ''))"))
-                    .aggregators(AGGS(new CountAggregatorFactory("a0")))
-                    .context(TIMESERIES_CONTEXT_DEFAULT)
-                    .build()
-          ),
-          ImmutableList.of(
-              // Matches everything but "abc"
-              new Object[]{5L}
-          )
-      );
-    }
   }
 
   @Test
@@ -1756,7 +1848,7 @@ public void testCoalesceColumns() throws Exception
                         .setVirtualColumns(
                             EXPRESSION_VIRTUAL_COLUMN(
                                 "d0:v",
-                                "case_searched((\"dim2\" != ''),\"dim2\",\"dim1\")",
+                                "case_searched(notnull(\"dim2\"),\"dim2\",\"dim1\")",
                                 ValueType.STRING
                             )
                         )
@@ -1765,11 +1857,18 @@ public void testCoalesceColumns() throws Exception
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{"10.1", 1L},
             new Object[]{"2", 1L},
             new Object[]{"a", 2L},
             new Object[]{"abc", 2L}
+        ) :
+        ImmutableList.of(
+            new Object[]{"", 1L},
+            new Object[]{"10.1", 1L},
+            new Object[]{"a", 2L},
+            new Object[]{"abc", 2L}
         )
     );
   }
@@ -1793,7 +1892,7 @@ public void testColumnIsNull() throws Exception
                   .build()
         ),
         ImmutableList.of(
-            new Object[]{3L}
+            new Object[]{NullHandling.replaceWithDefault() ? 3L : 2L}
         )
     );
   }
@@ -2014,14 +2113,18 @@ public void testCountNullableColumn() throws Exception
                   .aggregators(AGGS(
                       new FilteredAggregatorFactory(
                           new CountAggregatorFactory("a0"),
-                          NOT(SELECTOR("dim2", "", null))
+                          NOT(SELECTOR("dim2", null, null))
                       )
                   ))
                   .context(TIMESERIES_CONTEXT_DEFAULT)
                   .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{3L}
+        ) :
+        ImmutableList.of(
+            new Object[]{4L}
         )
     );
   }
@@ -2040,7 +2143,9 @@ public void testCountNullableExpression() throws Exception
                       new FilteredAggregatorFactory(
                           new CountAggregatorFactory("a0"),
                           EXPRESSION_FILTER(
-                              "(case_searched((\"dim2\" == 'abc'),'yes',(\"dim2\" == 'def'),'yes','') != '')"
+                              "notnull(case_searched((\"dim2\" == 'abc'),'yes',(\"dim2\" == 'def'),'yes',"
+                              + DruidExpression.nullLiteral()
+                              + "))"
                           )
                       )
                   ))
@@ -2333,7 +2438,7 @@ public void testFilterOnStringAsNumber() throws Exception
   public void testSimpleAggregations() throws Exception
   {
     testQuery(
-        "SELECT COUNT(*), COUNT(cnt), COUNT(dim1), AVG(cnt), SUM(cnt), SUM(cnt) + MIN(cnt) + MAX(cnt) FROM druid.foo",
+        "SELECT COUNT(*), COUNT(cnt), COUNT(dim1), AVG(cnt), SUM(cnt), SUM(cnt) + MIN(cnt) + MAX(cnt), COUNT(dim2) FROM druid.foo",
         ImmutableList.of(
             Druids.newTimeseriesQueryBuilder()
                   .dataSource(CalciteTests.DATASOURCE1)
@@ -2344,13 +2449,17 @@ public void testSimpleAggregations() throws Exception
                           new CountAggregatorFactory("a0"),
                           new FilteredAggregatorFactory(
                               new CountAggregatorFactory("a1"),
-                              NOT(SELECTOR("dim1", "", null))
+                              NOT(SELECTOR("dim1", null, null))
                           ),
                           new LongSumAggregatorFactory("a2:sum", "cnt"),
                           new CountAggregatorFactory("a2:count"),
                           new LongSumAggregatorFactory("a3", "cnt"),
                           new LongMinAggregatorFactory("a4", "cnt"),
-                          new LongMaxAggregatorFactory("a5", "cnt")
+                          new LongMaxAggregatorFactory("a5", "cnt"),
+                          new FilteredAggregatorFactory(
+                              new CountAggregatorFactory("a6"),
+                              NOT(SELECTOR("dim2", null, null))
+                          )
                       )
                   )
                   .postAggregators(
@@ -2367,8 +2476,12 @@ public void testSimpleAggregations() throws Exception
                   .context(TIMESERIES_CONTEXT_DEFAULT)
                   .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
-            new Object[]{6L, 6L, 5L, 1L, 6L, 8L}
+            new Object[]{6L, 6L, 5L, 1L, 6L, 8L, 3L}
+        ) :
+        ImmutableList.of(
+            new Object[]{6L, 6L, 6L, 1L, 6L, 8L, 4L}
         )
     );
   }
@@ -2533,7 +2646,7 @@ public void testFilteredAggregations() throws Exception
                       new FilteredAggregatorFactory(
                           new CountAggregatorFactory("a3"),
                           AND(
-                              NOT(SELECTOR("dim2", "", null)),
+                              NOT(SELECTOR("dim2", null, null)),
                               NOT(SELECTOR("dim1", "1", null))
                           )
                       ),
@@ -2578,8 +2691,12 @@ public void testFilteredAggregations() throws Exception
                   .context(TIMESERIES_CONTEXT_DEFAULT)
                   .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{1L, 5L, 1L, 2L, 5L, 5L, 2L, 1L, 5L, 1L, 5L}
+        ) :
+        ImmutableList.of(
+            new Object[]{1L, 5L, 1L, 3L, 5L, 5L, 2L, 1L, 5L, 1L, 5L}
         )
     );
   }
@@ -2647,8 +2764,12 @@ public void testFilteredAggregationWithNotIn() throws Exception
                   .context(TIMESERIES_CONTEXT_DEFAULT)
                   .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{5L, 2L}
+        ) :
+        ImmutableList.of(
+            new Object[]{5L, 3L}
         )
     );
   }
@@ -2828,10 +2949,16 @@ public void testExpressionFilteringAndGroupingOnStringCastToNumber() throws Exce
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{10.0f, 1L},
             new Object[]{2.0f, 1L},
             new Object[]{0.0f, 4L}
+        ) :
+        ImmutableList.of(
+            new Object[]{10.0f, 1L},
+            new Object[]{2.0f, 1L},
+            new Object[]{0.0f, 1L}
         )
     );
   }
@@ -3462,8 +3589,6 @@ public void testCountStarWithTimeFilterOnLongColumnUsingTimestampToMillis() thro
   @Test
   public void testSumOfString() throws Exception
   {
-    // Perhaps should be 13, but dim1 has "1", "2" and "10.1"; and CAST('10.1' AS INTEGER) = 0 since parsing is strict.
-
     testQuery(
         "SELECT SUM(CAST(dim1 AS INTEGER)) FROM druid.foo",
         ImmutableList.of(
@@ -3483,7 +3608,7 @@ public void testSumOfString() throws Exception
                   .build()
         ),
         ImmutableList.of(
-            new Object[]{3L}
+            new Object[]{13L}
         )
     );
   }
@@ -3491,8 +3616,6 @@ public void testSumOfString() throws Exception
   @Test
   public void testSumOfExtractionFn() throws Exception
   {
-    // Perhaps should be 13, but dim1 has "1", "2" and "10.1"; and CAST('10.1' AS INTEGER) = 0 since parsing is strict.
-
     testQuery(
         "SELECT SUM(CAST(SUBSTRING(dim1, 1, 10) AS INTEGER)) FROM druid.foo",
         ImmutableList.of(
@@ -3512,7 +3635,7 @@ public void testSumOfExtractionFn() throws Exception
                   .build()
         ),
         ImmutableList.of(
-            new Object[]{3L}
+            new Object[]{13L}
         )
     );
   }
@@ -3537,7 +3660,7 @@ public void testTimeseriesWithTimeFilterOnLongColumnUsingMillisToTimestamp() thr
                 .setInterval(QSS(Filtration.eternity()))
                 .setGranularity(Granularities.ALL)
                 .setVirtualColumns(
-                    EXPRESSION_VIRTUAL_COLUMN("d0:v", "timestamp_floor(\"cnt\",'P1Y','','UTC')", ValueType.LONG)
+                    EXPRESSION_VIRTUAL_COLUMN("d0:v", "timestamp_floor(\"cnt\",'P1Y',null,'UTC')", ValueType.LONG)
                 )
                 .setDimFilter(
                     BOUND(
@@ -3642,10 +3765,17 @@ public void testSelectDistinctWithLimit() throws Exception
                 .context(QUERY_CONTEXT_DEFAULT)
                 .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{""},
             new Object[]{"a"},
             new Object[]{"abc"}
+        ) :
+        ImmutableList.of(
+            new Object[]{null},
+            new Object[]{""},
+            new Object[]{"a"},
+            new Object[]{"abc"}
         )
     );
   }
@@ -3666,10 +3796,17 @@ public void testSelectDistinctWithSortAsOuterQuery() throws Exception
                 .context(QUERY_CONTEXT_DEFAULT)
                 .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{""},
             new Object[]{"a"},
             new Object[]{"abc"}
+        ) :
+        ImmutableList.of(
+            new Object[]{null},
+            new Object[]{""},
+            new Object[]{"a"},
+            new Object[]{"abc"}
         )
     );
   }
@@ -3690,7 +3827,14 @@ public void testSelectDistinctWithSortAsOuterQuery2() throws Exception
                 .context(QUERY_CONTEXT_DEFAULT)
                 .build()
         ),
+        NullHandling.replaceWithDefault() ?
+        ImmutableList.of(
+            new Object[]{""},
+            new Object[]{"a"},
+            new Object[]{"abc"}
+        ) :
         ImmutableList.of(
+            new Object[]{null},
             new Object[]{""},
             new Object[]{"a"},
             new Object[]{"abc"}
@@ -3726,10 +3870,17 @@ public void testSelectDistinctWithSortAsOuterQuery4() throws Exception
                 .context(QUERY_CONTEXT_DEFAULT)
                 .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{""},
             new Object[]{"abc"},
             new Object[]{"a"}
+        ) :
+        ImmutableList.of(
+            new Object[]{null},
+            new Object[]{"abc"},
+            new Object[]{"a"},
+            new Object[]{""}
         )
     );
   }
@@ -3844,14 +3995,14 @@ public void testExactCountDistinct() throws Exception
                         .setAggregatorSpecs(AGGS(
                             new FilteredAggregatorFactory(
                                 new CountAggregatorFactory("a0"),
-                                NOT(SELECTOR("d0", "", null))
+                                NOT(SELECTOR("d0", null, null))
                             )
                         ))
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
         ImmutableList.of(
-            new Object[]{2L}
+            new Object[]{NullHandling.replaceWithDefault() ? 2L : 3L}
         )
     );
   }
@@ -3923,16 +4074,23 @@ public void testExactCountDistinctWithGroupingAndOtherAggregators() throws Excep
                             new LongSumAggregatorFactory("_a0", "a0"),
                             new FilteredAggregatorFactory(
                                 new CountAggregatorFactory("_a1"),
-                                NOT(SELECTOR("d0", "", null))
+                                NOT(SELECTOR("d0", null, null))
                             )
                         ))
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{"", 3L, 3L},
             new Object[]{"a", 2L, 1L},
             new Object[]{"abc", 1L, 1L}
+        ) :
+        ImmutableList.of(
+            new Object[]{null, 2L, 2L},
+            new Object[]{"", 1L, 1L},
+            new Object[]{"a", 2L, 2L},
+            new Object[]{"abc", 1L, 1L}
         )
     );
   }
@@ -4004,8 +4162,12 @@ public void testApproxCountDistinct() throws Exception
                   .context(TIMESERIES_CONTEXT_DEFAULT)
                   .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{6L, 3L, 2L, 2L, 2L, 6L}
+        ) :
+        ImmutableList.of(
+            new Object[]{6L, 3L, 2L, 1L, 1L, 6L}
         )
     );
   }
@@ -4050,7 +4212,7 @@ public void testNestedGroupBy() throws Exception
                         .setVirtualColumns(
                             EXPRESSION_VIRTUAL_COLUMN(
                                 "_d0:v",
-                                "timestamp_floor(\"a0\",'PT1H','','UTC')",
+                                "timestamp_floor(\"a0\",'PT1H',null,'UTC')",
                                 ValueType.LONG
                             )
                         )
@@ -4118,8 +4280,12 @@ public void testDoubleNestedGroupBy() throws Exception
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{6L, 3L}
+        ) :
+        ImmutableList.of(
+            new Object[]{6L, 4L}
         )
     );
   }
@@ -4184,8 +4350,12 @@ public void testExactCountDistinctUsingSubquery() throws Exception
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{6L, 3L}
+        ) :
+        ImmutableList.of(
+            new Object[]{6L, 4L}
         )
     );
   }
@@ -4193,6 +4363,9 @@ public void testExactCountDistinctUsingSubquery() throws Exception
   @Test
   public void testTopNFilterJoin() throws Exception
   {
+    DimFilter filter = NullHandling.replaceWithDefault() ?
+                       IN("dim2", Arrays.asList(null, "a"), null)
+                                                         : SELECTOR("dim2", "a", null);
     // Filters on top N values of some dimension by using an inner join.
     testQuery(
         "SELECT t1.dim1, SUM(t1.cnt)\n"
@@ -4223,7 +4396,7 @@ public void testTopNFilterJoin() throws Exception
                         .setDataSource(CalciteTests.DATASOURCE1)
                         .setInterval(QSS(Filtration.eternity()))
                         .setGranularity(Granularities.ALL)
-                        .setDimFilter(IN("dim2", ImmutableList.of("", "a"), null))
+                        .setDimFilter(filter)
                         .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0")))
                         .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt")))
                         .setLimitSpec(
@@ -4241,12 +4414,17 @@ public void testTopNFilterJoin() throws Exception
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{"", 1L},
             new Object[]{"1", 1L},
             new Object[]{"10.1", 1L},
             new Object[]{"2", 1L},
             new Object[]{"abc", 1L}
+        ) :
+        ImmutableList.of(
+            new Object[]{"", 1L},
+            new Object[]{"1", 1L}
         )
     );
   }
@@ -4427,7 +4605,7 @@ public void testExplainExactCountDistinctOfSemiJoinResult() throws Exception
     final String explanation =
         "DruidOuterQueryRel(query=[{\"queryType\":\"timeseries\",\"dataSource\":{\"type\":\"table\",\"name\":\"__subquery__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"descending\":false,\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"aggregations\":[{\"type\":\"count\",\"name\":\"a0\"}],\"postAggregations\":[],\"limit\":2147483647,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"skipEmptyBuckets\":true,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"}}], signature=[{a0:LONG}])\n"
         + "  DruidSemiJoin(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"default\",\"dimension\":\"dim2\",\"outputName\":\"d0\",\"outputType\":\"STRING\"}],\"aggregations\":[],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"},\"descending\":false}], leftExpressions=[[SUBSTRING($3, 1, 1)]], rightKeys=[[0]])\n"
-        + "    DruidQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"selector\",\"dimension\":\"dim1\",\"value\":\"\",\"extractionFn\":null}},\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"extraction\",\"dimension\":\"dim1\",\"outputName\":\"d0\",\"outputType\":\"STRING\",\"extractionFn\":{\"type\":\"substring\",\"index\":0,\"length\":1}}],\"aggregations\":[],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"},\"descending\":false}], signature=[{d0:STRING}])\n";
+        + "    DruidQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"selector\",\"dimension\":\"dim1\",\"value\":null,\"extractionFn\":null}},\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"extraction\",\"dimension\":\"dim1\",\"outputName\":\"d0\",\"outputType\":\"STRING\",\"extractionFn\":{\"type\":\"substring\",\"index\":0,\"length\":1}}],\"aggregations\":[],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"},\"descending\":false}], signature=[{d0:STRING}])\n";
 
     testQuery(
         "EXPLAIN PLAN FOR SELECT COUNT(*)\n"
@@ -4435,7 +4613,7 @@ public void testExplainExactCountDistinctOfSemiJoinResult() throws Exception
         + "  SELECT DISTINCT dim2\n"
         + "  FROM druid.foo\n"
         + "  WHERE SUBSTRING(dim2, 1, 1) IN (\n"
-        + "    SELECT SUBSTRING(dim1, 1, 1) FROM druid.foo WHERE dim1 <> ''\n"
+        + "    SELECT SUBSTRING(dim1, 1, 1) FROM druid.foo WHERE dim1 IS NOT NULL\n"
         + "  )\n"
         + ")",
         ImmutableList.of(),
@@ -4476,8 +4654,51 @@ public void testExactCountDistinctUsingSubqueryWithWherePushDown() throws Except
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{3L, 2L}
+        ) :
+        ImmutableList.of(
+            new Object[]{5L, 3L}
+        )
+    );
+
+    testQuery(
+        "SELECT\n"
+        + "  SUM(cnt),\n"
+        + "  COUNT(*)\n"
+        + "FROM (SELECT dim2, SUM(cnt) AS cnt FROM druid.foo GROUP BY dim2)\n"
+        + "WHERE dim2 IS NOT NULL",
+        ImmutableList.of(
+            GroupByQuery.builder()
+                        .setDataSource(
+                            new QueryDataSource(
+                                GroupByQuery.builder()
+                                            .setDataSource(CalciteTests.DATASOURCE1)
+                                            .setInterval(QSS(Filtration.eternity()))
+                                            .setDimFilter(NOT(SELECTOR("dim2", null, null)))
+                                            .setGranularity(Granularities.ALL)
+                                            .setDimensions(DIMS(new DefaultDimensionSpec("dim2", "d0")))
+                                            .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt")))
+                                            .setContext(QUERY_CONTEXT_DEFAULT)
+                                            .build()
+                            )
+                        )
+                        .setInterval(QSS(Filtration.eternity()))
+                        .setGranularity(Granularities.ALL)
+                        .setAggregatorSpecs(AGGS(
+                            new LongSumAggregatorFactory("_a0", "a0"),
+                            new CountAggregatorFactory("_a1")
+                        ))
+                        .setContext(QUERY_CONTEXT_DEFAULT)
+                        .build()
+        ),
+        NullHandling.replaceWithDefault() ?
+        ImmutableList.of(
+            new Object[]{3L, 2L}
+        ) :
+        ImmutableList.of(
+            new Object[]{4L, 3L}
         )
     );
   }
@@ -4516,8 +4737,12 @@ public void testExactCountDistinctUsingSubqueryWithWhereToOuterFilter() throws E
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{3L, 1L}
+        ) :
+        ImmutableList.of(
+            new Object[]{2L, 1L}
         )
     );
   }
@@ -4603,10 +4828,15 @@ public void testHistogramUsingSubquery() throws Exception
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{"1", 1L},
             new Object[]{"2", 1L},
             new Object[]{"3", 1L}
+        ) :
+        ImmutableList.of(
+            new Object[]{"1", 2L},
+            new Object[]{"2", 2L}
         )
     );
   }
@@ -4653,9 +4883,14 @@ public void testHistogramUsingSubqueryWithSort() throws Exception
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{"1", 1L},
             new Object[]{"2", 1L}
+        ) :
+        ImmutableList.of(
+            new Object[]{"1", 2L},
+            new Object[]{"2", 2L}
         )
     );
   }
@@ -4809,6 +5044,7 @@ public void testSillyQuarters() throws Exception
   @Test
   public void testRegexpExtract() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
     testQuery(
         "SELECT DISTINCT\n"
         + "  REGEXP_EXTRACT(dim1, '^.'),\n"
@@ -4845,7 +5081,7 @@ public void testRegexpExtract() throws Exception
                         .build()
         ),
         ImmutableList.of(
-            new Object[]{"", ""},
+            new Object[]{nullValue, nullValue},
             new Object[]{"1", "1"},
             new Object[]{"2", "2"},
             new Object[]{"a", "a"},
@@ -4857,6 +5093,7 @@ public void testRegexpExtract() throws Exception
   @Test
   public void testGroupBySortPushDown() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
     testQuery(
         "SELECT dim2, dim1, SUM(cnt) FROM druid.foo GROUP BY dim2, dim1 ORDER BY dim1 LIMIT 4",
         ImmutableList.of(
@@ -4889,7 +5126,7 @@ public void testGroupBySortPushDown() throws Exception
         ImmutableList.of(
             new Object[]{"a", "", 1L},
             new Object[]{"a", "1", 1L},
-            new Object[]{"", "10.1", 1L},
+            new Object[]{nullValue, "10.1", 1L},
             new Object[]{"", "2", 1L}
         )
     );
@@ -4898,6 +5135,7 @@ public void testGroupBySortPushDown() throws Exception
   @Test
   public void testGroupByLimitPushDownWithHavingOnLong() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
     testQuery(
         "SELECT dim1, dim2, SUM(cnt) AS thecnt "
         + "FROM druid.foo "
@@ -4933,11 +5171,18 @@ public void testGroupByLimitPushDownWithHavingOnLong() throws Exception
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{"10.1", "", 1L},
             new Object[]{"2", "", 1L},
             new Object[]{"abc", "", 1L},
             new Object[]{"", "a", 1L}
+        ) :
+        ImmutableList.of(
+            new Object[]{"10.1", null, 1L},
+            new Object[]{"abc", null, 1L},
+            new Object[]{"2", "", 1L},
+            new Object[]{"", "a", 1L}
         )
     );
   }
@@ -5242,7 +5487,7 @@ public void testGroupByFloor() throws Exception
                         .build()
         ),
         ImmutableList.of(
-            new Object[]{0.0f, 3L},
+            new Object[]{NullHandling.defaultFloatValue(), 3L},
             new Object[]{1.0f, 1L},
             new Object[]{2.0f, 1L},
             new Object[]{10.0f, 1L}
@@ -5296,7 +5541,7 @@ public void testGroupByFloorWithOrderBy() throws Exception
             new Object[]{10.0f, 1L},
             new Object[]{2.0f, 1L},
             new Object[]{1.0f, 1L},
-            new Object[]{0.0f, 3L}
+            new Object[]{NullHandling.defaultFloatValue(), 3L}
         )
     );
   }
@@ -5317,7 +5562,7 @@ public void testGroupByFloorTimeAndOneOtherDimensionWithOrderBy() throws Excepti
                         .setVirtualColumns(
                             EXPRESSION_VIRTUAL_COLUMN(
                                 "d0:v",
-                                "timestamp_floor(\"__time\",'P1Y','','UTC')",
+                                "timestamp_floor(\"__time\",'P1Y',null,'UTC')",
                                 ValueType.LONG
                             )
                         )
@@ -5357,12 +5602,21 @@ public void testGroupByFloorTimeAndOneOtherDimensionWithOrderBy() throws Excepti
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{T("2000"), "", 2L},
             new Object[]{T("2000"), "a", 1L},
             new Object[]{T("2001"), "", 1L},
             new Object[]{T("2001"), "a", 1L},
             new Object[]{T("2001"), "abc", 1L}
+        ) :
+        ImmutableList.of(
+            new Object[]{T("2000"), null, 1L},
+            new Object[]{T("2000"), "", 1L},
+            new Object[]{T("2000"), "a", 1L},
+            new Object[]{T("2001"), null, 1L},
+            new Object[]{T("2001"), "a", 1L},
+            new Object[]{T("2001"), "abc", 1L}
         )
     );
   }
@@ -5395,6 +5649,7 @@ public void testGroupByStringLength() throws Exception
   @Test
   public void testFilterAndGroupByLookup() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
     final RegisteredLookupExtractionFn extractionFn = new RegisteredLookupExtractionFn(
         null,
         "lookyloo",
@@ -5439,7 +5694,7 @@ public void testFilterAndGroupByLookup() throws Exception
                         .build()
         ),
         ImmutableList.of(
-            new Object[]{"", 5L},
+            new Object[]{nullValue, 5L},
             new Object[]{"xabc", 1L}
         )
     );
@@ -5477,7 +5732,7 @@ public void testCountDistinctOfLookup() throws Exception
                   .build()
         ),
         ImmutableList.of(
-            new Object[]{2L}
+            new Object[]{NullHandling.replaceWithDefault() ? 2L : 1L}
         )
     );
   }
@@ -5667,7 +5922,7 @@ public void testTimeseriesUsingTimeFloorWithTimeShift() throws Exception
                         .setVirtualColumns(
                             EXPRESSION_VIRTUAL_COLUMN(
                                 "d0:v",
-                                "timestamp_floor(timestamp_shift(\"__time\",'P1D',-1),'P1M','','UTC')",
+                                "timestamp_floor(timestamp_shift(\"__time\",'P1D',-1),'P1M',null,'UTC')",
                                 ValueType.LONG
                             )
                         )
@@ -5715,7 +5970,7 @@ public void testTimeseriesUsingTimeFloorWithTimestampAdd() throws Exception
                         .setVirtualColumns(
                             EXPRESSION_VIRTUAL_COLUMN(
                                 "d0:v",
-                                "timestamp_floor((\"__time\" + -86400000),'P1M','','UTC')",
+                                "timestamp_floor((\"__time\" + -86400000),'P1M',null,'UTC')",
                                 ValueType.LONG
                             )
                         )
@@ -5842,7 +6097,7 @@ public void testTimeseriesLosAngelesUsingTimeFloorConnectionLosAngeles() throws
   public void testTimeseriesDontSkipEmptyBuckets() throws Exception
   {
     // Tests that query context parameters are passed through to the underlying query engine.
-
+    Long defaultVal = NullHandling.replaceWithDefault() ? 0L : null;
     testQuery(
         PLANNER_CONFIG_DEFAULT,
         QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS,
@@ -5864,29 +6119,29 @@ public void testTimeseriesDontSkipEmptyBuckets() throws Exception
         ),
         ImmutableList.<Object[]>builder()
             .add(new Object[]{1L, T("2000-01-01")})
-            .add(new Object[]{0L, T("2000-01-01T01")})
-            .add(new Object[]{0L, T("2000-01-01T02")})
-            .add(new Object[]{0L, T("2000-01-01T03")})
-            .add(new Object[]{0L, T("2000-01-01T04")})
-            .add(new Object[]{0L, T("2000-01-01T05")})
-            .add(new Object[]{0L, T("2000-01-01T06")})
-            .add(new Object[]{0L, T("2000-01-01T07")})
-            .add(new Object[]{0L, T("2000-01-01T08")})
-            .add(new Object[]{0L, T("2000-01-01T09")})
-            .add(new Object[]{0L, T("2000-01-01T10")})
-            .add(new Object[]{0L, T("2000-01-01T11")})
-            .add(new Object[]{0L, T("2000-01-01T12")})
-            .add(new Object[]{0L, T("2000-01-01T13")})
-            .add(new Object[]{0L, T("2000-01-01T14")})
-            .add(new Object[]{0L, T("2000-01-01T15")})
-            .add(new Object[]{0L, T("2000-01-01T16")})
-            .add(new Object[]{0L, T("2000-01-01T17")})
-            .add(new Object[]{0L, T("2000-01-01T18")})
-            .add(new Object[]{0L, T("2000-01-01T19")})
-            .add(new Object[]{0L, T("2000-01-01T20")})
-            .add(new Object[]{0L, T("2000-01-01T21")})
-            .add(new Object[]{0L, T("2000-01-01T22")})
-            .add(new Object[]{0L, T("2000-01-01T23")})
+            .add(new Object[]{defaultVal, T("2000-01-01T01")})
+            .add(new Object[]{defaultVal, T("2000-01-01T02")})
+            .add(new Object[]{defaultVal, T("2000-01-01T03")})
+            .add(new Object[]{defaultVal, T("2000-01-01T04")})
+            .add(new Object[]{defaultVal, T("2000-01-01T05")})
+            .add(new Object[]{defaultVal, T("2000-01-01T06")})
+            .add(new Object[]{defaultVal, T("2000-01-01T07")})
+            .add(new Object[]{defaultVal, T("2000-01-01T08")})
+            .add(new Object[]{defaultVal, T("2000-01-01T09")})
+            .add(new Object[]{defaultVal, T("2000-01-01T10")})
+            .add(new Object[]{defaultVal, T("2000-01-01T11")})
+            .add(new Object[]{defaultVal, T("2000-01-01T12")})
+            .add(new Object[]{defaultVal, T("2000-01-01T13")})
+            .add(new Object[]{defaultVal, T("2000-01-01T14")})
+            .add(new Object[]{defaultVal, T("2000-01-01T15")})
+            .add(new Object[]{defaultVal, T("2000-01-01T16")})
+            .add(new Object[]{defaultVal, T("2000-01-01T17")})
+            .add(new Object[]{defaultVal, T("2000-01-01T18")})
+            .add(new Object[]{defaultVal, T("2000-01-01T19")})
+            .add(new Object[]{defaultVal, T("2000-01-01T20")})
+            .add(new Object[]{defaultVal, T("2000-01-01T21")})
+            .add(new Object[]{defaultVal, T("2000-01-01T22")})
+            .add(new Object[]{defaultVal, T("2000-01-01T23")})
             .build()
     );
   }
@@ -6082,7 +6337,7 @@ public void testGroupByExtractFloorTime() throws Exception
                         .setVirtualColumns(
                             EXPRESSION_VIRTUAL_COLUMN(
                                 "d0:v",
-                                "timestamp_extract(timestamp_floor(\"__time\",'P1Y','','UTC'),'YEAR','UTC')",
+                                "timestamp_extract(timestamp_floor(\"__time\",'P1Y',null,'UTC'),'YEAR','UTC')",
                                 ValueType.LONG
                             )
                         )
@@ -6117,7 +6372,7 @@ public void testGroupByExtractFloorTimeLosAngeles() throws Exception
                         .setVirtualColumns(
                             EXPRESSION_VIRTUAL_COLUMN(
                                 "d0:v",
-                                "timestamp_extract(timestamp_floor(\"__time\",'P1Y','','America/Los_Angeles'),'YEAR','America/Los_Angeles')",
+                                "timestamp_extract(timestamp_floor(\"__time\",'P1Y',null,'America/Los_Angeles'),'YEAR','America/Los_Angeles')",
                                 ValueType.LONG
                             )
                         )
@@ -6235,7 +6490,7 @@ public void testGroupByTimeAndOtherDimension() throws Exception
                         .setVirtualColumns(
                             EXPRESSION_VIRTUAL_COLUMN(
                                 "d1:v",
-                                "timestamp_floor(\"__time\",'P1M','','UTC')",
+                                "timestamp_floor(\"__time\",'P1M',null,'UTC')",
                                 ValueType.LONG
                             )
                         )
@@ -6262,12 +6517,21 @@ public void testGroupByTimeAndOtherDimension() throws Exception
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{"", T("2000-01-01"), 2L},
             new Object[]{"", T("2001-01-01"), 1L},
             new Object[]{"a", T("2000-01-01"), 1L},
             new Object[]{"a", T("2001-01-01"), 1L},
             new Object[]{"abc", T("2001-01-01"), 1L}
+        ) :
+        ImmutableList.of(
+            new Object[]{null, T("2000-01-01"), 1L},
+            new Object[]{null, T("2001-01-01"), 1L},
+            new Object[]{"", T("2000-01-01"), 1L},
+            new Object[]{"a", T("2000-01-01"), 1L},
+            new Object[]{"a", T("2001-01-01"), 1L},
+            new Object[]{"abc", T("2001-01-01"), 1L}
         )
     );
   }
@@ -6397,7 +6661,13 @@ public void testUsingSubqueryAsFilterOnTwoColumns() throws Exception
             newScanQueryBuilder()
                 .dataSource(CalciteTests.DATASOURCE1)
                 .intervals(QSS(Filtration.eternity()))
-                .filters(AND(SELECTOR("dim1", "def", null), SELECTOR("dim2", "abc", null)))
+                .filters(OR(
+                    SELECTOR("dim1", "def", null),
+                    AND(
+                        SELECTOR("dim1", "def", null),
+                        SELECTOR("dim2", "abc", null)
+                    )
+                ))
                 .columns("__time", "cnt", "dim1", "dim2")
                 .resultFormat(ScanQuery.RESULT_FORMAT_COMPACTED_LIST)
                 .context(QUERY_CONTEXT_DEFAULT)
@@ -6412,8 +6682,9 @@ public void testUsingSubqueryAsFilterOnTwoColumns() throws Exception
   @Test
   public void testUsingSubqueryAsFilterWithInnerSort() throws Exception
   {
-    // Regression test for https://github.com/druid-io/druid/issues/4208
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
 
+    // Regression test for https://github.com/druid-io/druid/issues/4208
     testQuery(
         "SELECT dim1, dim2 FROM druid.foo\n"
         + " WHERE dim2 IN (\n"
@@ -6450,13 +6721,20 @@ public void testUsingSubqueryAsFilterWithInnerSort() throws Exception
                 .context(QUERY_CONTEXT_DEFAULT)
                 .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{"", "a"},
-            new Object[]{"10.1", ""},
+            new Object[]{"10.1", nullValue},
             new Object[]{"2", ""},
             new Object[]{"1", "a"},
             new Object[]{"def", "abc"},
-            new Object[]{"abc", ""}
+            new Object[]{"abc", nullValue}
+        ) :
+        ImmutableList.of(
+            new Object[]{"", "a"},
+            new Object[]{"2", ""},
+            new Object[]{"1", "a"},
+            new Object[]{"def", "abc"}
         )
     );
   }
@@ -6610,9 +6888,9 @@ public void testProjectAfterSort2() throws Exception
         ImmutableList.of(
             new Object[]{1.0, "", "a", 1.0},
             new Object[]{4.0, "1", "a", 4.0},
-            new Object[]{2.0, "10.1", "", 2.0},
+            new Object[]{2.0, "10.1", NullHandling.defaultStringValue(), 2.0},
             new Object[]{3.0, "2", "", 3.0},
-            new Object[]{6.0, "abc", "", 6.0},
+            new Object[]{6.0, "abc", NullHandling.defaultStringValue(), 6.0},
             new Object[]{5.0, "def", "abc", 5.0}
         )
     );
@@ -6863,11 +7141,11 @@ public void testConcat() throws Exception
         ),
         ImmutableList.of(
             new Object[]{"ax1.09999"},
-            new Object[]{"10.1x2.0999910.1"},
+            new Object[]{NullHandling.sqlCompatible() ? null : "10.1x2.0999910.1"}, // dim2 is null
             new Object[]{"2x3.099992"},
             new Object[]{"1ax4.099991"},
             new Object[]{"defabcx5.09999def"},
-            new Object[]{"abcx6.09999abc"}
+            new Object[]{NullHandling.sqlCompatible() ? null : "abcx6.09999abc"} // dim2 is null
         )
     );
   }
diff --git a/sql/src/test/java/io/druid/sql/calcite/expression/ExpressionsTest.java b/sql/src/test/java/io/druid/sql/calcite/expression/ExpressionsTest.java
index 153aeda2f48..546c967f69c 100644
--- a/sql/src/test/java/io/druid/sql/calcite/expression/ExpressionsTest.java
+++ b/sql/src/test/java/io/druid/sql/calcite/expression/ExpressionsTest.java
@@ -21,6 +21,7 @@
 
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.DateTimes;
 import io.druid.math.expr.ExprEval;
 import io.druid.math.expr.Parser;
@@ -192,8 +193,8 @@ public void testStrpos()
             rexBuilder.makeNullLiteral(typeFactory.createSqlType(SqlTypeName.VARCHAR)),
             rexBuilder.makeLiteral("ax")
         ),
-        DruidExpression.fromExpression("(strpos('','ax') + 1)"),
-        0L
+        DruidExpression.fromExpression("(strpos(null,'ax') + 1)"),
+        NullHandling.replaceWithDefault() ? 0L : null
     );
   }
 
@@ -326,7 +327,7 @@ public void testDateTrunc()
             rexBuilder.makeLiteral("hour"),
             timestampLiteral(DateTimes.of("2000-02-03T04:05:06Z"))
         ),
-        DruidExpression.fromExpression("timestamp_floor(949550706000,'PT1H','','UTC')"),
+        DruidExpression.fromExpression("timestamp_floor(949550706000,'PT1H',null,'UTC')"),
         DateTimes.of("2000-02-03T04:00:00").getMillis()
     );
 
@@ -336,7 +337,7 @@ public void testDateTrunc()
             rexBuilder.makeLiteral("DAY"),
             timestampLiteral(DateTimes.of("2000-02-03T04:05:06Z"))
         ),
-        DruidExpression.fromExpression("timestamp_floor(949550706000,'P1D','','UTC')"),
+        DruidExpression.fromExpression("timestamp_floor(949550706000,'P1D',null,'UTC')"),
         DateTimes.of("2000-02-03T00:00:00").getMillis()
     );
   }
@@ -387,7 +388,7 @@ public void testTimeFloor()
             timestampLiteral(DateTimes.of("2000-02-03T04:05:06Z")),
             rexBuilder.makeLiteral("PT1H")
         ),
-        DruidExpression.fromExpression("timestamp_floor(949550706000,'PT1H','','UTC')"),
+        DruidExpression.fromExpression("timestamp_floor(949550706000,'PT1H',null,'UTC')"),
         DateTimes.of("2000-02-03T04:00:00").getMillis()
     );
 
@@ -399,7 +400,7 @@ public void testTimeFloor()
             rexBuilder.makeNullLiteral(typeFactory.createSqlType(SqlTypeName.TIMESTAMP)),
             rexBuilder.makeLiteral("America/Los_Angeles")
         ),
-        DruidExpression.fromExpression("timestamp_floor(\"t\",'P1D','','America/Los_Angeles')"),
+        DruidExpression.fromExpression("timestamp_floor(\"t\",'P1D',null,'America/Los_Angeles')"),
         DateTimes.of("2000-02-02T08:00:00").getMillis()
     );
   }
@@ -415,7 +416,7 @@ public void testOtherTimeFloor()
             inputRef("t"),
             rexBuilder.makeFlag(TimeUnitRange.YEAR)
         ),
-        DruidExpression.fromExpression("timestamp_floor(\"t\",'P1Y','','UTC')"),
+        DruidExpression.fromExpression("timestamp_floor(\"t\",'P1Y',null,'UTC')"),
         DateTimes.of("2000").getMillis()
     );
   }
@@ -431,7 +432,7 @@ public void testOtherTimeCeil()
             inputRef("t"),
             rexBuilder.makeFlag(TimeUnitRange.YEAR)
         ),
-        DruidExpression.fromExpression("timestamp_ceil(\"t\",'P1Y','','UTC')"),
+        DruidExpression.fromExpression("timestamp_ceil(\"t\",'P1Y',null,'UTC')"),
         DateTimes.of("2001").getMillis()
     );
   }
@@ -666,7 +667,7 @@ public void testCastAsTimestamp()
         ),
         DruidExpression.of(
             null,
-            "timestamp_parse(\"tstr\",'','UTC')"
+            "timestamp_parse(\"tstr\",null,'UTC')"
         ),
         DateTimes.of("2000-02-03T04:05:06Z").getMillis()
     );
@@ -713,7 +714,7 @@ public void testCastAsDate()
             typeFactory.createSqlType(SqlTypeName.DATE),
             inputRef("t")
         ),
-        DruidExpression.fromExpression("timestamp_floor(\"t\",'P1D','','UTC')"),
+        DruidExpression.fromExpression("timestamp_floor(\"t\",'P1D',null,'UTC')"),
         DateTimes.of("2000-02-03").getMillis()
     );
 
@@ -723,7 +724,7 @@ public void testCastAsDate()
             inputRef("dstr")
         ),
         DruidExpression.fromExpression(
-            "timestamp_floor(timestamp_parse(\"dstr\",'','UTC'),'P1D','','UTC')"
+            "timestamp_floor(timestamp_parse(\"dstr\",null,'UTC'),'P1D',null,'UTC')"
         ),
         DateTimes.of("2000-02-03").getMillis()
     );
@@ -741,7 +742,7 @@ public void testCastFromDate()
             )
         ),
         DruidExpression.fromExpression(
-            "timestamp_format(timestamp_floor(\"t\",'P1D','','UTC'),'yyyy-MM-dd','UTC')"
+            "timestamp_format(timestamp_floor(\"t\",'P1D',null,'UTC'),'yyyy-MM-dd','UTC')"
         ),
         "2000-02-03"
     );
@@ -754,7 +755,7 @@ public void testCastFromDate()
                 inputRef("t")
             )
         ),
-        DruidExpression.fromExpression("timestamp_floor(\"t\",'P1D','','UTC')"),
+        DruidExpression.fromExpression("timestamp_floor(\"t\",'P1D',null,'UTC')"),
         DateTimes.of("2000-02-03").getMillis()
     );
   }
diff --git a/sql/src/test/java/io/druid/sql/calcite/http/SqlResourceTest.java b/sql/src/test/java/io/druid/sql/calcite/http/SqlResourceTest.java
index 32ed71821a9..61a0f054cfc 100644
--- a/sql/src/test/java/io/druid/sql/calcite/http/SqlResourceTest.java
+++ b/sql/src/test/java/io/druid/sql/calcite/http/SqlResourceTest.java
@@ -23,6 +23,8 @@
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Maps;
+import io.druid.common.config.NullHandling;
 import io.druid.jackson.DefaultObjectMapper;
 import io.druid.java.util.common.ISE;
 import io.druid.java.util.common.Pair;
@@ -215,10 +217,18 @@ public void testFieldAliasingGroupBy() throws Exception
     ).rhs;
 
     Assert.assertEquals(
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             ImmutableMap.of("x", "", "y", ""),
             ImmutableMap.of("x", "a", "y", "a"),
             ImmutableMap.of("x", "abc", "y", "abc")
+        ) :
+        ImmutableList.of(
+            // x and y both should be null instead of empty string
+            Maps.transformValues(ImmutableMap.of("x", "", "y", ""), (val) -> null),
+            ImmutableMap.of("x", "", "y", ""),
+            ImmutableMap.of("x", "a", "y", "a"),
+            ImmutableMap.of("x", "abc", "y", "abc")
         ),
         rows
     );
diff --git a/sql/src/test/java/io/druid/sql/calcite/planner/CalcitesTest.java b/sql/src/test/java/io/druid/sql/calcite/planner/CalcitesTest.java
index 341044466cf..beb503ddaa4 100644
--- a/sql/src/test/java/io/druid/sql/calcite/planner/CalcitesTest.java
+++ b/sql/src/test/java/io/druid/sql/calcite/planner/CalcitesTest.java
@@ -29,7 +29,6 @@
   @Test
   public void testEscapeStringLiteral()
   {
-    Assert.assertEquals("''", Calcites.escapeStringLiteral(null));
     Assert.assertEquals("''", Calcites.escapeStringLiteral(""));
     Assert.assertEquals("'foo'", Calcites.escapeStringLiteral("foo"));
     Assert.assertEquals("'foo bar'", Calcites.escapeStringLiteral("foo bar"));
diff --git a/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java b/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java
index 10b9514dcbf..b9e60d9b981 100644
--- a/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java
+++ b/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java
@@ -32,8 +32,6 @@
 import com.google.inject.Injector;
 import com.google.inject.Key;
 import com.google.inject.Module;
-import io.druid.java.util.emitter.core.NoopEmitter;
-import io.druid.java.util.emitter.service.ServiceEmitter;
 import io.druid.collections.StupidPool;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
@@ -43,8 +41,9 @@
 import io.druid.data.input.impl.TimestampSpec;
 import io.druid.guice.ExpressionModule;
 import io.druid.guice.annotations.Json;
+import io.druid.java.util.emitter.core.NoopEmitter;
+import io.druid.java.util.emitter.service.ServiceEmitter;
 import io.druid.math.expr.ExprMacroTable;
-import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
 import io.druid.query.DefaultGenericQueryMetricsFactory;
 import io.druid.query.DefaultQueryRunnerFactoryConglomerate;
 import io.druid.query.DruidProcessingConfig;
@@ -92,12 +91,12 @@
 import io.druid.segment.QueryableIndex;
 import io.druid.segment.TestHelper;
 import io.druid.segment.incremental.IncrementalIndexSchema;
+import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
 import io.druid.server.QueryLifecycleFactory;
 import io.druid.server.log.NoopRequestLogger;
 import io.druid.server.security.Access;
 import io.druid.server.security.Action;
 import io.druid.server.security.AllowAllAuthenticator;
-import io.druid.server.security.NoopEscalator;
 import io.druid.server.security.AuthConfig;
 import io.druid.server.security.AuthenticationResult;
 import io.druid.server.security.Authenticator;
@@ -105,6 +104,7 @@
 import io.druid.server.security.Authorizer;
 import io.druid.server.security.AuthorizerMapper;
 import io.druid.server.security.Escalator;
+import io.druid.server.security.NoopEscalator;
 import io.druid.server.security.Resource;
 import io.druid.server.security.ResourceType;
 import io.druid.sql.calcite.expression.SqlOperatorConversion;
@@ -137,7 +137,8 @@
   public static final String FORBIDDEN_DATASOURCE = "forbiddenDatasource";
 
   public static final String TEST_SUPERUSER_NAME = "testSuperuser";
-  public static final AuthorizerMapper TEST_AUTHORIZER_MAPPER = new AuthorizerMapper(null) {
+  public static final AuthorizerMapper TEST_AUTHORIZER_MAPPER = new AuthorizerMapper(null)
+  {
     @Override
     public Authorizer getAuthorizer(String name)
     {
@@ -162,11 +163,13 @@ public Access authorize(
     }
   };
   public static final AuthenticatorMapper TEST_AUTHENTICATOR_MAPPER;
+
   static {
     final Map<String, Authenticator> defaultMap = Maps.newHashMap();
     defaultMap.put(
         AuthConfig.ALLOW_ALL_NAME,
-        new AllowAllAuthenticator() {
+        new AllowAllAuthenticator()
+        {
           @Override
           public AuthenticationResult authenticateJDBCContext(Map<String, Object> context)
           {
@@ -176,9 +179,12 @@ public AuthenticationResult authenticateJDBCContext(Map<String, Object> context)
     );
     TEST_AUTHENTICATOR_MAPPER = new AuthenticatorMapper(defaultMap);
   }
+
   public static final Escalator TEST_AUTHENTICATOR_ESCALATOR;
+
   static {
-    TEST_AUTHENTICATOR_ESCALATOR = new NoopEscalator() {
+    TEST_AUTHENTICATOR_ESCALATOR = new NoopEscalator()
+    {
 
       @Override
       public AuthenticationResult createEscalatedAuthenticationResult()
@@ -215,15 +221,14 @@ public void configure(final Binder binder)
 
           // This Module is just to get a LookupReferencesManager with a usable "lookyloo" lookup.
 
-          binder.bind(LookupReferencesManager.class)
-                .toInstance(
-                    LookupEnabledTestExprMacroTable.createTestLookupReferencesManager(
-                        ImmutableMap.of(
-                            "a", "xa",
-                            "abc", "xabc"
-                        )
-                    )
-            );
+          binder.bind(LookupReferencesManager.class).toInstance(
+              LookupEnabledTestExprMacroTable.createTestLookupReferencesManager(
+                  ImmutableMap.of(
+                      "a", "xa",
+                      "abc", "xabc"
+                  )
+              )
+          );
 
         }
       }


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@druid.apache.org
For additional commands, e-mail: commits-help@druid.apache.org